comment stringlengths 22 3.02k | method_body stringlengths 46 368k | target_code stringlengths 0 181 | method_body_after stringlengths 12 368k | context_before stringlengths 11 634k | context_after stringlengths 11 632k |
|---|---|---|---|---|---|
We can merge records and objects into one by casting the typeNode to `BStructureTypeNode` | private void createDummyTypeDefSymbol(BLangTypeDefinition typeDef, SymbolEnv env) {
typeDef.symbol = Symbols.createTypeSymbol(SymTag.TYPE_DEF, Flags.asMask(typeDef.flagSet),
names.fromIdNode(typeDef.name), env.enclPkg.symbol.pkgID, typeDef.typeNode.type, env.scope.owner);
typeDef.symbol.scope = env.scope;
switch (typeDef.typeNode.type.tag) {
case TypeTags.RECORD:
typeDef.symbol.kind = ((BLangRecordTypeNode) typeDef.typeNode).symbol.kind;
((BLangRecordTypeNode) typeDef.typeNode).symbol.scope = env.scope;
break;
case TypeTags.OBJECT:
typeDef.symbol.kind = ((BLangObjectTypeNode) typeDef.typeNode).symbol.kind;
((BLangObjectTypeNode) typeDef.typeNode).symbol.scope = env.scope;
break;
}
defineSymbol(typeDef.pos, typeDef.symbol, env);
} | case TypeTags.OBJECT: | private void createDummyTypeDefSymbol(BLangTypeDefinition typeDef, SymbolEnv env) {
typeDef.symbol = Symbols.createTypeSymbol(SymTag.TYPE_DEF, Flags.asMask(typeDef.flagSet),
names.fromIdNode(typeDef.name), env.enclPkg.symbol.pkgID, typeDef.typeNode.type, env.scope.owner);
typeDef.symbol.scope = env.scope;
switch (typeDef.typeNode.type.tag) {
case TypeTags.RECORD:
case TypeTags.OBJECT:
typeDef.symbol.kind = ((BLangStructureTypeNode) typeDef.typeNode).symbol.kind;
((BLangStructureTypeNode) typeDef.typeNode).symbol.scope = env.scope;
break;
}
defineSymbol(typeDef.pos, typeDef.symbol, env);
} | class SymbolEnter extends BLangNodeVisitor {
private static final CompilerContext.Key<SymbolEnter> SYMBOL_ENTER_KEY =
new CompilerContext.Key<>();
private final PackageLoader pkgLoader;
private final SymbolTable symTable;
private final Names names;
private final SymbolResolver symResolver;
private final BLangDiagnosticLog dlog;
private final Types types;
private List<BLangTypeDefinition> unresolvedTypes;
private int typePrecedence;
private SymbolEnv env;
public static SymbolEnter getInstance(CompilerContext context) {
SymbolEnter symbolEnter = context.get(SYMBOL_ENTER_KEY);
if (symbolEnter == null) {
symbolEnter = new SymbolEnter(context);
}
return symbolEnter;
}
public SymbolEnter(CompilerContext context) {
context.put(SYMBOL_ENTER_KEY, this);
this.pkgLoader = PackageLoader.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.names = Names.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.types = Types.getInstance(context);
}
public BLangPackage definePackage(BLangPackage pkgNode) {
populatePackageNode(pkgNode);
defineNode(pkgNode, this.symTable.pkgEnvMap.get(symTable.builtInPackageSymbol));
return pkgNode;
}
public void defineNode(BLangNode node, SymbolEnv env) {
SymbolEnv prevEnv = this.env;
this.env = env;
node.accept(this);
this.env = prevEnv;
}
public BLangPackage defineTestablePackage(BLangTestablePackage pkgNode, SymbolEnv env,
List<BLangImportPackage> enclPkgImports) {
populatePackageNode(pkgNode, enclPkgImports);
defineNode(pkgNode, env);
return pkgNode;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DEFINE)) {
return;
}
BPackageSymbol pkgSymbol;
if (Symbols.isFlagOn(Flags.asMask(pkgNode.flagSet), Flags.TESTABLE)) {
pkgSymbol = Symbols.createPackageSymbol(pkgNode.packageID, this.symTable, Flags.asMask(pkgNode.flagSet));
} else {
pkgSymbol = Symbols.createPackageSymbol(pkgNode.packageID, this.symTable);
}
pkgNode.symbol = pkgSymbol;
SymbolEnv pkgEnv = SymbolEnv.createPkgEnv(pkgNode, pkgSymbol.scope, this.env);
this.symTable.pkgEnvMap.put(pkgSymbol, pkgEnv);
defineConstructs(pkgNode, pkgEnv);
pkgNode.getTestablePkgs().forEach(testablePackage -> defineTestablePackage(testablePackage, pkgEnv,
pkgNode.imports));
pkgNode.completedPhases.add(CompilerPhase.DEFINE);
}
private void defineConstructs(BLangPackage pkgNode, SymbolEnv pkgEnv) {
pkgNode.imports.forEach(importNode -> defineNode(importNode, pkgEnv));
this.typePrecedence = 0;
pkgNode.constants.forEach(constant -> defineNode(constant, pkgEnv));
defineTypeNodes(pkgNode.typeDefinitions, pkgEnv);
resolveConstantTypeNode(pkgNode.constants, pkgEnv);
pkgNode.globalVars.forEach(var -> defineNode(var, pkgEnv));
pkgEnv.logErrors = true;
pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence));
defineErrorDetails(pkgNode.typeDefinitions, pkgEnv);
defineFields(pkgNode.typeDefinitions, pkgEnv);
defineMembers(pkgNode.typeDefinitions, pkgEnv);
pkgNode.services.forEach(service -> defineNode(service, pkgEnv));
pkgNode.functions.forEach(func -> defineNode(func, pkgEnv));
pkgNode.annotations.forEach(annot -> defineNode(annot, pkgEnv));
pkgNode.globalVars.stream().filter(var -> var.symbol.type.tsymbol != null && Symbols
.isFlagOn(var.symbol.type.tsymbol.flags, Flags.CLIENT)).map(varNode -> varNode.symbol)
.forEach(varSymbol -> varSymbol.tag = SymTag.ENDPOINT);
}
public void visit(BLangAnnotation annotationNode) {
BAnnotationSymbol annotationSymbol = Symbols.createAnnotationSymbol(Flags.asMask(annotationNode.flagSet),
AttachPoints.asMask(annotationNode.attachPoints), names.fromIdNode(annotationNode.name),
env.enclPkg.symbol.pkgID, null, env.scope.owner);
annotationSymbol.markdownDocumentation =
getMarkdownDocAttachment(annotationNode.markdownDocumentationAttachment);
annotationSymbol.type = new BAnnotationType(annotationSymbol);
annotationNode.symbol = annotationSymbol;
defineSymbol(annotationNode.name.pos, annotationSymbol);
SymbolEnv annotationEnv = SymbolEnv.createAnnotationEnv(annotationNode, annotationSymbol.scope, env);
if (annotationNode.typeNode != null) {
BType recordType = this.symResolver.resolveTypeNode(annotationNode.typeNode, annotationEnv);
annotationSymbol.attachedType = recordType.tsymbol;
if (recordType != symTable.semanticError && recordType.tag != TypeTags.RECORD) {
dlog.error(annotationNode.typeNode.pos, DiagnosticCode.ANNOTATION_REQUIRE_RECORD, recordType);
}
}
}
@Override
public void visit(BLangImportPackage importPkgNode) {
Name pkgAlias = names.fromIdNode(importPkgNode.alias);
if (symResolver.lookupSymbol(env, pkgAlias, SymTag.IMPORT) != symTable.notFoundSymbol) {
dlog.error(importPkgNode.pos, DiagnosticCode.REDECLARED_SYMBOL, pkgAlias);
return;
}
Name orgName;
Name version;
PackageID enclPackageID = env.enclPkg.packageID;
if (importPkgNode.orgName.value == null || importPkgNode.orgName.value.isEmpty()) {
orgName = enclPackageID.orgName;
version = (Names.DEFAULT_VERSION.equals(enclPackageID.version)) ? new Name("") : enclPackageID.version;
} else if (importPkgNode.orgName.value.equals(enclPackageID.orgName.value)) {
orgName = names.fromIdNode(importPkgNode.orgName);
version = new Name("");
} else {
orgName = names.fromIdNode(importPkgNode.orgName);
version = names.fromIdNode(importPkgNode.version);
}
List<Name> nameComps = importPkgNode.pkgNameComps.stream()
.map(identifier -> names.fromIdNode(identifier))
.collect(Collectors.toList());
PackageID pkgId = new PackageID(orgName, nameComps, version);
if (pkgId.name.getValue().startsWith(Names.BUILTIN_PACKAGE.value)) {
dlog.error(importPkgNode.pos, DiagnosticCode.MODULE_NOT_FOUND,
importPkgNode.getQualifiedPackageName());
return;
}
BPackageSymbol pkgSymbol = pkgLoader.loadPackageSymbol(pkgId, enclPackageID, this.env.enclPkg.repos);
if (pkgSymbol == null) {
dlog.error(importPkgNode.pos, DiagnosticCode.MODULE_NOT_FOUND,
importPkgNode.getQualifiedPackageName());
return;
}
importPkgNode.symbol = pkgSymbol;
((BPackageSymbol) this.env.scope.owner).imports.add(pkgSymbol);
this.env.scope.define(pkgAlias, pkgSymbol);
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
String nsURI = (String) ((BLangLiteral) xmlnsNode.namespaceURI).value;
if (xmlnsNode.prefix.value != null && nsURI.isEmpty()) {
dlog.error(xmlnsNode.pos, DiagnosticCode.INVALID_NAMESPACE_DECLARATION, xmlnsNode.prefix);
}
if (xmlnsNode.prefix.value == null) {
xmlnsNode.prefix.value = XMLConstants.DEFAULT_NS_PREFIX;
}
BXMLNSSymbol xmlnsSymbol = Symbols.createXMLNSSymbol(names.fromIdNode(xmlnsNode.prefix), nsURI,
env.enclPkg.symbol.pkgID, env.scope.owner);
xmlnsNode.symbol = xmlnsSymbol;
BSymbol foundSym = symResolver.lookupSymbol(env, xmlnsSymbol.name, SymTag.PACKAGE);
if (foundSym != symTable.notFoundSymbol) {
dlog.error(xmlnsNode.pos, DiagnosticCode.REDECLARED_SYMBOL, xmlnsSymbol.name);
return;
}
defineSymbol(xmlnsNode.pos, xmlnsSymbol);
}
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
defineNode(xmlnsStmtNode.xmlnsDecl, env);
}
private void defineTypeNodes(List<BLangTypeDefinition> typeDefs, SymbolEnv env) {
if (typeDefs.size() == 0) {
return;
}
this.unresolvedTypes = new ArrayList<>();
for (BLangTypeDefinition typeDef : typeDefs) {
defineNode(typeDef, env);
}
if (typeDefs.size() <= unresolvedTypes.size()) {
LinkedList<LocationData> unknownTypes = new LinkedList<>();
for (BLangTypeDefinition unresolvedType : unresolvedTypes) {
LinkedList<String> references = new LinkedList<>();
references.add(unresolvedType.name.value);
checkErrors(unresolvedType, unresolvedType.typeNode, references, unknownTypes);
}
unresolvedTypes.forEach(type -> createDummyTypeDefSymbol(type, env));
unresolvedTypes.forEach(type -> defineNode(type, env));
return;
}
defineTypeNodes(unresolvedTypes, env);
}
private void checkErrors(BLangTypeDefinition unresolvedType, BLangType currentTypeNode, List<String> visitedNodes,
List<LocationData> encounteredUnknownTypes) {
String unresolvedTypeNodeName = unresolvedType.name.value;
if (currentTypeNode.getKind() == NodeKind.UNION_TYPE_NODE) {
List<BLangType> memberTypeNodes = ((BLangUnionTypeNode) currentTypeNode).memberTypeNodes;
for (BLangType memberTypeNode : memberTypeNodes) {
if (memberTypeNode.getKind() == NodeKind.UNION_TYPE_NODE) {
checkErrors(unresolvedType, memberTypeNode, visitedNodes, encounteredUnknownTypes);
} else if (memberTypeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
String memberTypeNodeName = ((BLangUserDefinedType) memberTypeNode).typeName.value;
if (memberTypeNodeName.startsWith("$")) {
continue;
}
if (unresolvedTypeNodeName.equals(memberTypeNodeName)) {
visitedNodes.add(memberTypeNodeName);
dlog.error(unresolvedType.pos, DiagnosticCode.CYCLIC_TYPE_REFERENCE, visitedNodes);
visitedNodes.remove(visitedNodes.lastIndexOf(memberTypeNodeName));
} else {
checkErrors(unresolvedType, memberTypeNode, visitedNodes, encounteredUnknownTypes);
}
}
}
} else if (currentTypeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
String currentTypeNodeName = ((BLangUserDefinedType) currentTypeNode).typeName.value;
if (unresolvedTypeNodeName.equals(currentTypeNodeName)) {
visitedNodes.add(currentTypeNodeName);
dlog.error(unresolvedType.pos, DiagnosticCode.CYCLIC_TYPE_REFERENCE, visitedNodes);
visitedNodes.remove(visitedNodes.lastIndexOf(currentTypeNodeName));
} else if (visitedNodes.contains(currentTypeNodeName)) {
List<String> dependencyList = new LinkedList<>();
for (int i = visitedNodes.indexOf(currentTypeNodeName); i < visitedNodes.size(); i++) {
dependencyList.add(visitedNodes.get(i));
}
dependencyList.add(currentTypeNodeName);
dlog.error(unresolvedType.pos, DiagnosticCode.CYCLIC_TYPE_REFERENCE, dependencyList);
} else {
List<BLangTypeDefinition> typeDefinitions = unresolvedTypes.stream()
.filter(typeDefinition -> typeDefinition.name.value.equals(currentTypeNodeName))
.collect(Collectors.toList());
if (typeDefinitions.isEmpty()) {
LocationData locationData = new LocationData(currentTypeNodeName, currentTypeNode.pos.sLine,
currentTypeNode.pos.sCol);
if (!encounteredUnknownTypes.contains(locationData)) {
dlog.error(currentTypeNode.pos, DiagnosticCode.UNKNOWN_TYPE, currentTypeNodeName);
encounteredUnknownTypes.add(locationData);
}
} else {
for (BLangTypeDefinition typeDefinition : typeDefinitions) {
String typeName = typeDefinition.name.value;
visitedNodes.add(typeName);
checkErrors(unresolvedType, typeDefinition.typeNode, visitedNodes, encounteredUnknownTypes);
visitedNodes.remove(visitedNodes.lastIndexOf(typeName));
}
}
}
}
}
@Override
public void visit(BLangTypeDefinition typeDefinition) {
BType definedType = symResolver.resolveTypeNode(typeDefinition.typeNode, env);
if (definedType == symTable.noType) {
if (!this.unresolvedTypes.contains(typeDefinition)) {
this.unresolvedTypes.add(typeDefinition);
}
return;
}
if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE ||
typeDefinition.typeNode.getKind() == NodeKind.RECORD_TYPE) {
BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDefinition.typeNode;
for (BLangType typeRef : structureTypeNode.typeRefs) {
BType referencedType = symResolver.resolveTypeNode(typeRef, env);
if (referencedType == symTable.noType) {
if (!this.unresolvedTypes.contains(typeDefinition)) {
this.unresolvedTypes.add(typeDefinition);
}
return;
}
}
}
typeDefinition.precedence = this.typePrecedence++;
BTypeSymbol typeDefSymbol;
if (definedType.tsymbol.name != Names.EMPTY) {
typeDefSymbol = definedType.tsymbol.createLabelSymbol();
} else {
typeDefSymbol = definedType.tsymbol;
}
typeDefSymbol.markdownDocumentation = getMarkdownDocAttachment(typeDefinition.markdownDocumentationAttachment);
typeDefSymbol.name = names.fromIdNode(typeDefinition.getName());
typeDefSymbol.pkgID = env.enclPkg.packageID;
typeDefSymbol.flags |= Flags.asMask(typeDefinition.flagSet);
typeDefinition.symbol = typeDefSymbol;
defineSymbol(typeDefinition.name.pos, typeDefSymbol);
}
@Override
public void visit(BLangWorker workerNode) {
BInvokableSymbol workerSymbol = Symbols.createWorkerSymbol(Flags.asMask(workerNode.flagSet),
names.fromIdNode(workerNode.name), env.enclPkg.symbol.pkgID, null, env.scope.owner);
workerSymbol.markdownDocumentation = getMarkdownDocAttachment(workerNode.markdownDocumentationAttachment);
workerNode.symbol = workerSymbol;
defineSymbolWithCurrentEnvOwner(workerNode.pos, workerSymbol);
}
@Override
public void visit(BLangService serviceNode) {
BServiceSymbol serviceSymbol = Symbols.createServiceSymbol(Flags.asMask(serviceNode.flagSet),
names.fromIdNode(serviceNode.name), env.enclPkg.symbol.pkgID, serviceNode.type, env.scope.owner);
serviceSymbol.markdownDocumentation = getMarkdownDocAttachment(serviceNode.markdownDocumentationAttachment);
BType serviceObjectType = serviceNode.serviceTypeDefinition.symbol.type;
serviceNode.symbol = serviceSymbol;
serviceNode.symbol.type = new BServiceType(serviceObjectType.tsymbol);
defineSymbol(serviceNode.name.pos, serviceSymbol);
if (serviceNode.serviceTypeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) serviceNode.serviceTypeDefinition.typeNode;
objectTypeNode.functions.stream().filter(func -> func.flagSet.contains(Flag.RESOURCE))
.forEach(func -> serviceNode.resourceFunctions.add(func));
}
}
@Override
public void visit(BLangFunction funcNode) {
boolean validAttachedFunc = validateFuncReceiver(funcNode);
boolean remoteFlagSetOnNode = Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.REMOTE);
if (funcNode.attachedOuterFunction) {
if (Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.PUBLIC)) {
dlog.error(funcNode.pos, DiagnosticCode.ATTACHED_FUNC_CANT_HAVE_VISIBILITY_MODIFIERS, funcNode.name);
}
if (funcNode.receiver.type.tsymbol.kind == SymbolKind.RECORD) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_ATTACH_FUNCTIONS_TO_RECORDS, funcNode.name,
funcNode.receiver.type.tsymbol.name);
createDummyFunctionSymbol(funcNode);
visitObjectAttachedFunction(funcNode);
return;
}
BSymbol funcSymbol = symTable.notFoundSymbol;
if (funcNode.receiver.type.tag == TypeTags.OBJECT) {
SymbolEnv objectEnv = SymbolEnv.createObjectMethodsEnv(null, (BObjectTypeSymbol) funcNode.receiver.type.
tsymbol, env);
funcSymbol = symResolver.lookupSymbol(objectEnv, getFuncSymbolName(funcNode), SymTag.FUNCTION);
}
if (funcSymbol == symTable.notFoundSymbol) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_FIND_MATCHING_FUNCTION, funcNode.name,
funcNode.receiver.type.tsymbol.name);
createDummyFunctionSymbol(funcNode);
visitObjectAttachedFunction(funcNode);
return;
}
funcNode.symbol = (BInvokableSymbol) funcSymbol;
if (funcNode.symbol.bodyExist) {
dlog.error(funcNode.pos, DiagnosticCode.IMPLEMENTATION_ALREADY_EXIST, funcNode.name);
}
if (remoteFlagSetOnNode && !Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(funcNode.pos, DiagnosticCode.REMOTE_ON_NON_REMOTE_FUNCTION, funcNode.name.value);
}
if (!remoteFlagSetOnNode && Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(funcNode.pos, DiagnosticCode.REMOTE_REQUIRED_ON_REMOTE_FUNCTION);
}
validateAttachedFunction(funcNode, funcNode.receiver.type.tsymbol.name);
visitObjectAttachedFunction(funcNode);
return;
}
if (funcNode.receiver == null && !funcNode.attachedFunction && remoteFlagSetOnNode) {
dlog.error(funcNode.pos, DiagnosticCode.REMOTE_IN_NON_OBJECT_FUNCTION, funcNode.name.value);
}
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
getFuncSymbolName(funcNode), env.enclPkg.symbol.pkgID, null, env.scope.owner, funcNode.body != null);
funcSymbol.markdownDocumentation = getMarkdownDocAttachment(funcNode.markdownDocumentationAttachment);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
if (funcNode.receiver != null) {
defineAttachedFunctions(funcNode, funcSymbol, invokableEnv, validAttachedFunc);
}
}
private void createDummyFunctionSymbol(BLangFunction funcNode) {
funcNode.symbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
getFuncSymbolName(funcNode), env.enclPkg.symbol.pkgID, null, env.scope.owner, true);
funcNode.symbol.scope = new Scope(funcNode.symbol);
}
private void visitObjectAttachedFunction(BLangFunction funcNode) {
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
invokableEnv.scope = funcNode.symbol.scope;
defineObjectAttachedInvokableSymbolParams(funcNode, invokableEnv);
if (env.enclPkg.objAttachedFunctions.contains(funcNode.symbol)) {
dlog.error(funcNode.pos, DiagnosticCode.IMPLEMENTATION_ALREADY_EXIST, funcNode.name);
return;
}
if (!funcNode.objInitFunction) {
env.enclPkg.objAttachedFunctions.add(funcNode.symbol);
}
funcNode.receiver.symbol = funcNode.symbol.receiverSymbol;
}
private void validateAttachedFunction(BLangFunction funcNode, Name objName) {
SymbolEnv invokableEnv = SymbolEnv.createDummyEnv(funcNode, funcNode.symbol.scope, env);
List<BType> paramTypes = funcNode.requiredParams.stream()
.peek(varNode -> varNode.type = symResolver.resolveTypeNode(varNode.typeNode, invokableEnv))
.map(varNode -> varNode.type)
.collect(Collectors.toList());
funcNode.defaultableParams.forEach(p -> paramTypes.add(symResolver
.resolveTypeNode(p.var.typeNode, invokableEnv)));
if (!funcNode.desugaredReturnType) {
symResolver.resolveTypeNode(funcNode.returnTypeNode, invokableEnv);
}
if (funcNode.restParam != null) {
if (!funcNode.symbol.restParam.name.equals(names.fromIdNode(funcNode.restParam.name))) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_FIND_MATCHING_INTERFACE, funcNode.name, objName);
return;
}
BType restParamType = symResolver.resolveTypeNode(funcNode.restParam.typeNode, invokableEnv);
paramTypes.add(restParamType);
}
BInvokableType sourceType = (BInvokableType) funcNode.symbol.type;
if (typesMissMatch(paramTypes, sourceType.paramTypes)
|| namesMissMatch(funcNode.requiredParams, funcNode.symbol.params)
|| namesMissMatchDef(funcNode.defaultableParams, funcNode.symbol.defaultableParams)) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_FIND_MATCHING_INTERFACE, funcNode.name, objName);
return;
}
if (funcNode.returnTypeNode.type == null && sourceType.retType == null) {
return;
} else if (funcNode.returnTypeNode.type == null || sourceType.retType == null) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_FIND_MATCHING_INTERFACE, funcNode.name, objName);
return;
}
if (funcNode.returnTypeNode.type.tag != sourceType.retType.tag) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_FIND_MATCHING_INTERFACE, funcNode.name, objName);
return;
}
funcNode.symbol.flags = funcNode.symbol.flags ^ Flags.INTERFACE;
}
private boolean typesMissMatch(List<BType> lhs, List<BType> rhs) {
if (lhs.size() != rhs.size()) {
return true;
}
for (int i = 0; i < lhs.size(); i++) {
if (!types.isSameType(lhs.get(i), rhs.get(i))) {
return true;
}
}
return false;
}
private boolean namesMissMatch(List<BLangSimpleVariable> lhs, List<BVarSymbol> rhs) {
if (lhs.size() != rhs.size()) {
return true;
}
for (int i = 0; i < lhs.size(); i++) {
if (!rhs.get(i).name.equals(names.fromIdNode(lhs.get(i).name))) {
return true;
}
}
return false;
}
private boolean namesMissMatchDef(List<BLangSimpleVariableDef> lhs, List<BVarSymbol> rhs) {
if (lhs.size() != rhs.size()) {
return true;
}
for (int i = 0; i < lhs.size(); i++) {
if (!rhs.get(i).name.equals(names.fromIdNode(lhs.get(i).var.name))) {
return true;
}
}
return false;
}
private void defineObjectAttachedInvokableSymbolParams(BLangInvokableNode invokableNode, SymbolEnv invokableEnv) {
invokableNode.requiredParams.forEach(varNode -> {
visitObjectAttachedFunctionParam(varNode, invokableEnv);
});
invokableNode.defaultableParams.forEach(varDefNode -> {
visitObjectAttachedFunctionParam(varDefNode.var, invokableEnv);
});
if (invokableNode.returnTypeNode != null) {
invokableNode.returnTypeNode.type = symResolver.resolveTypeNode(invokableNode.returnTypeNode, env);
}
if (invokableNode.restParam != null) {
visitObjectAttachedFunctionParam(invokableNode.restParam, invokableEnv);
}
}
void visitObjectAttachedFunctionParam(BLangSimpleVariable variable, SymbolEnv invokableEnv) {
if (variable.type == null) {
variable.type = symResolver.resolveTypeNode(variable.typeNode, env);
}
visitObjectAttachedFunctionParamSymbol(variable, invokableEnv);
}
void visitObjectAttachedFunctionParamSymbol(BLangSimpleVariable variable, SymbolEnv invokableEnv) {
BSymbol varSymbol = symResolver.lookupSymbol(invokableEnv, names.fromIdNode(variable.name),
SymTag.VARIABLE);
if (varSymbol == symTable.notFoundSymbol) {
defineNode(variable, invokableEnv);
} else {
variable.symbol = (BVarSymbol) varSymbol;
}
if (variable.expr == null) {
return;
}
if (variable.expr.getKind() != NodeKind.LITERAL) {
this.dlog.error(variable.expr.pos, DiagnosticCode.INVALID_DEFAULT_PARAM_VALUE, variable.name);
return;
}
BLangLiteral literal = (BLangLiteral) variable.expr;
variable.symbol.defaultValue = new DefaultValueLiteral(literal.value, literal.typeTag);
}
@Override
public void visit(BLangResource resourceNode) {
}
@Override
public void visit(BLangConstant constant) {
Name name = names.fromIdNode(constant.name);
PackageID pkgID = env.enclPkg.symbol.pkgID;
BConstantSymbol constantSymbol = new BConstantSymbol(Flags.asMask(constant.flagSet), name, pkgID,
symTable.semanticError, symTable.semanticError, env.scope.owner);
constant.symbol = constantSymbol;
if (((BLangExpression) constant.value).getKind() != NodeKind.LITERAL) {
if (symResolver.checkForUniqueSymbol(constant.pos, env, constantSymbol, SymTag.VARIABLE_NAME)) {
env.scope.define(constantSymbol.name, constantSymbol);
}
return;
}
defineNode(constant.associatedTypeDefinition, env);
constantSymbol.type = constant.associatedTypeDefinition.symbol.type;
constantSymbol.literalValue = ((BLangLiteral) constant.value).value;
constantSymbol.literalValueTypeTag = ((BLangLiteral) constant.value).typeTag;
constantSymbol.markdownDocumentation = getMarkdownDocAttachment(constant.markdownDocumentationAttachment);
if (!symResolver.checkForUniqueSymbol(constant.pos, env, constantSymbol, SymTag.VARIABLE_NAME)) {
return;
}
env.scope.define(constantSymbol.name, constantSymbol);
}
@Override
public void visit(BLangSimpleVariable varNode) {
if (varNode.type == null) {
if (varNode.typeNode != null) {
varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);
} else {
varNode.type = symTable.noType;
}
}
Name varName = names.fromIdNode(varNode.name);
if (varName == Names.EMPTY || varName == Names.IGNORE) {
return;
}
BVarSymbol varSymbol = defineVarSymbol(varNode.pos, varNode.flagSet, varNode.type, varName, env);
varSymbol.markdownDocumentation = getMarkdownDocAttachment(varNode.markdownDocumentationAttachment);
varNode.symbol = varSymbol;
if (varNode.symbol.type.tsymbol != null && Symbols.isFlagOn(varNode.symbol.type.tsymbol.flags, Flags.CLIENT)) {
varSymbol.tag = SymTag.ENDPOINT;
}
if (varSymbol.type.tag == TypeTags.FUTURE && ((BFutureType) varSymbol.type).workerDerivative) {
Iterator<BLangLambdaFunction> lambdaFunctions = env.enclPkg.lambdaFunctions.iterator();
while (lambdaFunctions.hasNext()) {
BLangLambdaFunction lambdaFunction = lambdaFunctions.next();
if (lambdaFunctions.hasNext() &&
varSymbol.owner == lambdaFunction.cachedEnv.enclInvokable.symbol) {
lambdaFunction.cachedEnv.scope.define(varSymbol.name, varSymbol);
}
}
}
}
@Override
public void visit(BLangTupleVariable varNode) {
if (varNode.type == null) {
varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);
}
}
@Override
public void visit(BLangRecordVariable varNode) {
if (varNode.type == null) {
varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);
}
}
@Override
public void visit(BLangEndpoint endpoint) {
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
if (!(bLangXMLAttribute.name.getKind() == NodeKind.XML_QNAME)) {
return;
}
BLangXMLQName qname = (BLangXMLQName) bLangXMLAttribute.name;
if (!bLangXMLAttribute.isNamespaceDeclr) {
BXMLAttributeSymbol attrSymbol = new BXMLAttributeSymbol(qname.localname.value, qname.namespaceURI,
env.enclPkg.symbol.pkgID, env.scope.owner);
if (symResolver.checkForUniqueMemberSymbol(bLangXMLAttribute.pos, env, attrSymbol)) {
env.scope.define(attrSymbol.name, attrSymbol);
bLangXMLAttribute.symbol = attrSymbol;
}
return;
}
List<BLangExpression> exprs = bLangXMLAttribute.value.textFragments;
String nsURI = null;
if (exprs.size() == 1 && exprs.get(0).getKind() == NodeKind.LITERAL) {
nsURI = (String) ((BLangLiteral) exprs.get(0)).value;
}
String symbolName = qname.localname.value;
if (symbolName.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
symbolName = XMLConstants.DEFAULT_NS_PREFIX;
}
BXMLNSSymbol xmlnsSymbol =
new BXMLNSSymbol(names.fromString(symbolName), nsURI, env.enclPkg.symbol.pkgID, env.scope.owner);
if (symResolver.checkForUniqueMemberSymbol(bLangXMLAttribute.pos, env, xmlnsSymbol)) {
env.scope.define(xmlnsSymbol.name, xmlnsSymbol);
bLangXMLAttribute.symbol = xmlnsSymbol;
}
}
private void resolveConstantTypeNode(List<BLangConstant> constants, SymbolEnv env) {
for (BLangConstant constant : constants) {
if (constant.symbol.type == symTable.semanticError) {
continue;
}
if (constant.typeNode != null) {
constant.symbol.literalValueType = symResolver.resolveTypeNode(constant.typeNode, env);
} else {
constant.symbol.literalValueType = symTable.getTypeFromTag(constant.symbol.literalValueTypeTag);
}
if (!isAllowedConstantType(constant.symbol)) {
dlog.error(constant.typeNode.pos, DiagnosticCode.CANNOT_DEFINE_CONSTANT_WITH_TYPE, constant.typeNode);
}
}
}
private boolean isAllowedConstantType(BConstantSymbol symbol) {
switch (symbol.literalValueType.tag) {
case TypeTags.BOOLEAN:
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
return true;
}
return false;
}
private boolean hasAnnotation(List<BLangAnnotationAttachment> annotationAttachmentList, String expectedAnnotation) {
return annotationAttachmentList.stream()
.filter(annotation -> annotation.annotationName.value.equals(expectedAnnotation)).count() > 0;
}
/**
* Visit each compilation unit (.bal file) and add each top-level node
* in the compilation unit to the package node.
*
* @param pkgNode current package node
*/
private void populatePackageNode(BLangPackage pkgNode) {
List<BLangCompilationUnit> compUnits = pkgNode.getCompilationUnits();
compUnits.forEach(compUnit -> populateCompilationUnit(pkgNode, compUnit));
}
/**
* Visit each compilation unit (.bal file) and add each top-level node in the compilation unit to the
* testable package node.
*
* @param pkgNode current package node
* @param enclPkgImports imports of the enclosed package
*/
private void populatePackageNode(BLangTestablePackage pkgNode, List<BLangImportPackage> enclPkgImports) {
populatePackageNode(pkgNode);
pkgNode.getImports().removeIf(enclPkgImports::contains);
}
/**
* Visit each top-level node and add it to the package node.
*
* @param pkgNode current package node
* @param compUnit current compilation unit
*/
private void populateCompilationUnit(BLangPackage pkgNode, BLangCompilationUnit compUnit) {
compUnit.getTopLevelNodes().forEach(node -> addTopLevelNode(pkgNode, node));
}
private void addTopLevelNode(BLangPackage pkgNode, TopLevelNode node) {
NodeKind kind = node.getKind();
if (kind != NodeKind.PACKAGE_DECLARATION && kind != IMPORT) {
pkgNode.topLevelNodes.add(node);
}
switch (kind) {
case IMPORT:
if (!pkgNode.imports.contains(node)) {
pkgNode.imports.add((BLangImportPackage) node);
}
break;
case FUNCTION:
pkgNode.functions.add((BLangFunction) node);
break;
case TYPE_DEFINITION:
pkgNode.typeDefinitions.add((BLangTypeDefinition) node);
break;
case SERVICE:
pkgNode.services.add((BLangService) node);
break;
case VARIABLE:
pkgNode.globalVars.add((BLangSimpleVariable) node);
break;
case ANNOTATION:
pkgNode.annotations.add((BLangAnnotation) node);
break;
case XMLNS:
pkgNode.xmlnsList.add((BLangXMLNS) node);
break;
case CONSTANT:
pkgNode.constants.add((BLangConstant) node);
break;
}
}
private void defineErrorDetails(List<BLangTypeDefinition> typeDefNodes, SymbolEnv pkgEnv) {
for (BLangTypeDefinition typeDef : typeDefNodes) {
if (typeDef.typeNode.getKind() != NodeKind.ERROR_TYPE) {
continue;
}
BLangErrorType errorTypeNode = (BLangErrorType) typeDef.typeNode;
SymbolEnv typeDefEnv = SymbolEnv.createTypeEnv(errorTypeNode, typeDef.symbol.scope, pkgEnv);
BType reasonType = Optional.ofNullable(errorTypeNode.reasonType)
.map(bLangType -> symResolver.resolveTypeNode(bLangType, typeDefEnv))
.orElse(symTable.stringType);
BType detailType = Optional.ofNullable(errorTypeNode.detailType)
.map(bLangType -> symResolver.resolveTypeNode(bLangType, typeDefEnv))
.orElse(symTable.mapType);
if (reasonType == symTable.stringType && detailType == symTable.mapType) {
typeDef.symbol.type = symTable.errorType;
continue;
}
BErrorType errorType = (BErrorType) typeDef.symbol.type;
errorType.reasonType = reasonType;
errorType.detailType = detailType;
}
}
private void defineFields(List<BLangTypeDefinition> typeDefNodes, SymbolEnv pkgEnv) {
for (BLangTypeDefinition typeDef : typeDefNodes) {
if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE ||
(typeDef.symbol.type.tag != TypeTags.OBJECT && typeDef.symbol.type.tag != TypeTags.RECORD)) {
continue;
}
BStructureType structureType = (BStructureType) typeDef.symbol.type;
BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDef.typeNode;
SymbolEnv typeDefEnv = SymbolEnv.createTypeEnv(structureTypeNode, typeDef.symbol.scope, pkgEnv);
resolveReferencedFields(structureTypeNode, typeDefEnv);
structureType.fields =
Stream.concat(structureTypeNode.fields.stream(), structureTypeNode.referencedFields.stream())
.peek(field -> defineNode(field, typeDefEnv))
.filter(field -> field.symbol.type != symTable.semanticError)
.map(field -> new BField(names.fromIdNode(field.name), field.symbol))
.collect(Collectors.toList());
if (typeDef.symbol.kind != SymbolKind.RECORD) {
continue;
}
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) structureTypeNode;
BRecordType recordType = (BRecordType) structureType;
recordType.sealed = recordTypeNode.sealed;
if (recordTypeNode.sealed && recordTypeNode.restFieldType != null) {
dlog.error(recordTypeNode.restFieldType.pos, DiagnosticCode.REST_FIELD_NOT_ALLOWED_IN_SEALED_RECORDS);
continue;
}
if (recordTypeNode.restFieldType == null) {
if (recordTypeNode.sealed) {
recordType.restFieldType = symTable.noType;
continue;
}
recordType.restFieldType = symTable.anydataType;
continue;
}
recordType.restFieldType = symResolver.resolveTypeNode(recordTypeNode.restFieldType, typeDefEnv);
}
}
private void defineMembers(List<BLangTypeDefinition> typeDefNodes, SymbolEnv pkgEnv) {
for (BLangTypeDefinition typeDef : typeDefNodes) {
if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
continue;
}
if (typeDef.symbol.kind == SymbolKind.OBJECT) {
BLangObjectTypeNode objTypeNode = (BLangObjectTypeNode) typeDef.typeNode;
SymbolEnv objMethodsEnv =
SymbolEnv.createObjectMethodsEnv(objTypeNode, (BObjectTypeSymbol) objTypeNode.symbol, pkgEnv);
defineObjectInitFunction(objTypeNode, objMethodsEnv);
objTypeNode.functions.forEach(f -> {
f.setReceiver(ASTBuilderUtil.createReceiver(typeDef.pos, typeDef.symbol.type));
defineNode(f, objMethodsEnv);
});
for (BLangType typeRef : objTypeNode.typeRefs) {
if (typeRef.type.tsymbol.kind != SymbolKind.OBJECT) {
continue;
}
List<BAttachedFunction> functions = ((BObjectTypeSymbol) typeRef.type.tsymbol).attachedFuncs;
for (BAttachedFunction function : functions) {
defineReferencedFunction(typeDef, objMethodsEnv, typeRef, function);
}
}
} else if (typeDef.symbol.kind == SymbolKind.RECORD) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode;
SymbolEnv typeDefEnv = SymbolEnv.createPkgLevelSymbolEnv(recordTypeNode, typeDef.symbol.scope, pkgEnv);
defineRecordInitFunction(typeDef, typeDefEnv);
}
}
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
defineSymbol(invokableNode.name.pos, funcSymbol);
invokableEnv.scope = funcSymbol.scope;
defineInvokableSymbolParams(invokableNode, funcSymbol, invokableEnv);
}
private void defineInvokableSymbolParams(BLangInvokableNode invokableNode, BInvokableSymbol invokableSymbol,
SymbolEnv invokableEnv) {
List<BVarSymbol> paramSymbols =
invokableNode.requiredParams.stream()
.peek(varNode -> defineNode(varNode, invokableEnv))
.map(varNode -> varNode.symbol)
.collect(Collectors.toList());
List<BVarSymbol> namedParamSymbols =
invokableNode.defaultableParams.stream()
.peek(varDefNode -> defineNode(varDefNode.var, invokableEnv))
.map(varDefNode -> {
BVarSymbol varSymbol = varDefNode.var.symbol;
if (varDefNode.var.expr.getKind() != NodeKind.LITERAL) {
this.dlog.error(varDefNode.var.expr.pos, DiagnosticCode.INVALID_DEFAULT_PARAM_VALUE,
varDefNode.var.name);
} else {
BLangLiteral literal = (BLangLiteral) varDefNode.var.expr;
varSymbol.defaultValue = new DefaultValueLiteral(literal.value, literal.typeTag);
}
return varSymbol;
})
.collect(Collectors.toList());
if (!invokableNode.desugaredReturnType) {
symResolver.resolveTypeNode(invokableNode.returnTypeNode, invokableEnv);
}
invokableSymbol.params = paramSymbols;
invokableSymbol.retType = invokableNode.returnTypeNode.type;
invokableSymbol.defaultableParams = namedParamSymbols;
List<BType> paramTypes = paramSymbols.stream()
.map(paramSym -> paramSym.type)
.collect(Collectors.toList());
namedParamSymbols.forEach(paramSymbol -> paramTypes.add(paramSymbol.type));
if (invokableNode.restParam != null) {
defineNode(invokableNode.restParam, invokableEnv);
invokableSymbol.restParam = invokableNode.restParam.symbol;
paramTypes.add(invokableSymbol.restParam.type);
}
invokableSymbol.type = new BInvokableType(paramTypes, invokableNode.returnTypeNode.type, null);
}
private void defineSymbol(DiagnosticPos pos, BSymbol symbol) {
symbol.scope = new Scope(symbol);
if (symResolver.checkForUniqueSymbol(pos, env, symbol, symbol.tag)) {
env.scope.define(symbol.name, symbol);
}
}
public void defineSymbol(DiagnosticPos pos, BSymbol symbol, SymbolEnv env) {
symbol.scope = new Scope(symbol);
if (symResolver.checkForUniqueSymbol(pos, env, symbol, symbol.tag)) {
env.scope.define(symbol.name, symbol);
}
}
/**
* Define a symbol that is unique only for the current scope.
*
* @param pos Line number information of the source file
* @param symbol Symbol to be defines
* @param env Environment to define the symbol
*/
public void defineShadowedSymbol(DiagnosticPos pos, BSymbol symbol, SymbolEnv env) {
symbol.scope = new Scope(symbol);
if (symResolver.checkForUniqueSymbolInCurrentScope(pos, env, symbol, symbol.tag)) {
env.scope.define(symbol.name, symbol);
}
}
private void defineSymbolWithCurrentEnvOwner(DiagnosticPos pos, BSymbol symbol) {
symbol.scope = new Scope(env.scope.owner);
if (symResolver.checkForUniqueSymbol(pos, env, symbol, symbol.tag)) {
env.scope.define(symbol.name, symbol);
}
}
public BVarSymbol defineVarSymbol(DiagnosticPos pos, Set<Flag> flagSet, BType varType, Name varName,
SymbolEnv env) {
Scope enclScope = env.scope;
BVarSymbol varSymbol = createVarSymbol(flagSet, varType, varName, env);
if (!symResolver.checkForUniqueSymbol(pos, env, varSymbol, SymTag.VARIABLE_NAME)) {
varSymbol.type = symTable.semanticError;
}
enclScope.define(varSymbol.name, varSymbol);
return varSymbol;
}
public BVarSymbol createVarSymbol(Set<Flag> flagSet, BType varType, Name varName, SymbolEnv env) {
return createVarSymbol(Flags.asMask(flagSet), varType, varName, env);
}
public BVarSymbol createVarSymbol(int flags, BType varType, Name varName, SymbolEnv env) {
BType safeType = types.getSafeType(varType, false);
BVarSymbol varSymbol;
if (safeType.tag == TypeTags.INVOKABLE) {
varSymbol = new BInvokableSymbol(SymTag.VARIABLE, flags, varName, env.enclPkg.symbol.pkgID, varType,
env.scope.owner);
varSymbol.kind = SymbolKind.FUNCTION;
} else {
varSymbol = new BVarSymbol(flags, varName, env.enclPkg.symbol.pkgID, varType, env.scope.owner);
if (varType.tsymbol != null && Symbols.isFlagOn(varType.tsymbol.flags, Flags.CLIENT)) {
varSymbol.tag = SymTag.ENDPOINT;
}
}
return varSymbol;
}
private void defineObjectInitFunction(BLangObjectTypeNode object, SymbolEnv conEnv) {
BLangFunction initFunction = object.initFunction;
if (initFunction == null) {
return;
}
initFunction.receiver = ASTBuilderUtil.createReceiver(object.pos, object.type);
initFunction.attachedFunction = true;
initFunction.flagSet.add(Flag.ATTACHED);
defineNode(initFunction, conEnv);
}
private void defineRecordInitFunction(BLangTypeDefinition typeDef, SymbolEnv conEnv) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode;
recordTypeNode.initFunction = ASTBuilderUtil.createInitFunction(typeDef.pos, "", Names.INIT_FUNCTION_SUFFIX);
recordTypeNode.initFunction.receiver = createReceiver(typeDef.pos, typeDef.name);
recordTypeNode.initFunction.attachedFunction = true;
recordTypeNode.initFunction.flagSet.add(Flag.ATTACHED);
defineNode(recordTypeNode.initFunction, conEnv);
}
private void defineAttachedFunctions(BLangFunction funcNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv, boolean isValidAttachedFunc) {
BTypeSymbol typeSymbol = funcNode.receiver.type.tsymbol;
if (isValidAttachedFunc) {
if (typeSymbol.tag == SymTag.OBJECT) {
validateFunctionsAttachedToObject(funcNode, funcSymbol, invokableEnv);
} else if (typeSymbol.tag == SymTag.RECORD) {
validateFunctionsAttachedToRecords(funcNode, funcSymbol, invokableEnv);
}
}
defineNode(funcNode.receiver, invokableEnv);
funcSymbol.receiverSymbol = funcNode.receiver.symbol;
}
private void validateFunctionsAttachedToRecords(BLangFunction funcNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
BInvokableType funcType = (BInvokableType) funcSymbol.type;
BRecordTypeSymbol recordSymbol = (BRecordTypeSymbol) funcNode.receiver.type.tsymbol;
recordSymbol.initializerFunc = new BAttachedFunction(
names.fromIdNode(funcNode.name), funcSymbol, funcType);
}
private void validateFunctionsAttachedToObject(BLangFunction funcNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
BInvokableType funcType = (BInvokableType) funcSymbol.type;
BObjectTypeSymbol objectSymbol = (BObjectTypeSymbol) funcNode.receiver.type.tsymbol;
BSymbol symbol = symResolver.lookupMemberSymbol(funcNode.receiver.pos, objectSymbol.scope, invokableEnv,
names.fromIdNode(funcNode.name), SymTag.VARIABLE);
if (symbol != symTable.notFoundSymbol) {
dlog.error(funcNode.pos, DiagnosticCode.OBJECT_FIELD_AND_FUNC_WITH_SAME_NAME,
funcNode.name.value, funcNode.receiver.type.toString());
return;
}
BAttachedFunction attachedFunc = new BAttachedFunction(
names.fromIdNode(funcNode.name), funcSymbol, funcType);
validateRemoteFunctionAttachedToObject(funcNode, objectSymbol);
validateResourceFunctionAttachedToObject(funcNode, objectSymbol);
if (!funcNode.objInitFunction) {
objectSymbol.attachedFuncs.add(attachedFunc);
return;
}
if (funcNode.returnTypeNode.type != symTable.nilType) {
dlog.error(funcNode.pos, DiagnosticCode.INVALID_OBJECT_CONSTRUCTOR,
funcNode.name.value, funcNode.receiver.type.toString());
}
objectSymbol.initializerFunc = attachedFunc;
}
private void validateRemoteFunctionAttachedToObject(BLangFunction funcNode, BObjectTypeSymbol objectSymbol) {
if (!Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.REMOTE)) {
return;
}
funcNode.symbol.flags |= Flags.REMOTE;
if (!Symbols.isFlagOn(objectSymbol.flags, Flags.CLIENT)) {
this.dlog.error(funcNode.pos, DiagnosticCode.REMOTE_FUNCTION_IN_NON_CLIENT_OBJECT);
}
}
private void validateResourceFunctionAttachedToObject(BLangFunction funcNode, BObjectTypeSymbol objectSymbol) {
if (!Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.RESOURCE)) {
return;
}
funcNode.symbol.flags |= Flags.RESOURCE;
if (!Symbols.isFlagOn(objectSymbol.flags, Flags.SERVICE)) {
this.dlog.error(funcNode.pos, DiagnosticCode.RESOURCE_FUNCTION_IN_NON_SERVICE_OBJECT);
}
}
private StatementNode createAssignmentStmt(BLangSimpleVariable variable, BVarSymbol varSym, BSymbol fieldVar) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = (BLangIdentifier) createIdentifier(fieldVar.name.getValue());
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
varRef.symbol = fieldVar;
varRef.type = fieldVar.type;
BLangSimpleVarRef exprVar = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
exprVar.pos = variable.pos;
exprVar.variableName = (BLangIdentifier) createIdentifier(varSym.name.getValue());
exprVar.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
exprVar.symbol = varSym;
exprVar.type = varSym.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = exprVar;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangSimpleVariable createReceiver(DiagnosticPos pos, BLangIdentifier name) {
BLangSimpleVariable receiver = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
receiver.pos = pos;
IdentifierNode identifier = createIdentifier(Names.SELF.getValue());
receiver.setName(identifier);
BLangUserDefinedType structTypeNode = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
structTypeNode.pkgAlias = new BLangIdentifier();
structTypeNode.typeName = name;
receiver.setTypeNode(structTypeNode);
return receiver;
}
private IdentifierNode createIdentifier(String value) {
IdentifierNode node = TreeBuilder.createIdentifierNode();
if (value != null) {
node.setValue(value);
}
return node;
}
private boolean validateFuncReceiver(BLangFunction funcNode) {
if (funcNode.receiver == null) {
return true;
}
if (funcNode.receiver.type == null) {
funcNode.receiver.type = symResolver.resolveTypeNode(funcNode.receiver.typeNode, env);
}
if (funcNode.receiver.type.tag == TypeTags.SEMANTIC_ERROR) {
return true;
}
if (funcNode.receiver.type.tag != TypeTags.BOOLEAN
&& funcNode.receiver.type.tag != TypeTags.STRING
&& funcNode.receiver.type.tag != TypeTags.INT
&& funcNode.receiver.type.tag != TypeTags.FLOAT
&& funcNode.receiver.type.tag != TypeTags.DECIMAL
&& funcNode.receiver.type.tag != TypeTags.JSON
&& funcNode.receiver.type.tag != TypeTags.XML
&& funcNode.receiver.type.tag != TypeTags.MAP
&& funcNode.receiver.type.tag != TypeTags.TABLE
&& funcNode.receiver.type.tag != TypeTags.STREAM
&& funcNode.receiver.type.tag != TypeTags.FUTURE
&& funcNode.receiver.type.tag != TypeTags.OBJECT
&& funcNode.receiver.type.tag != TypeTags.RECORD) {
dlog.error(funcNode.receiver.pos, DiagnosticCode.FUNC_DEFINED_ON_NOT_SUPPORTED_TYPE,
funcNode.name.value, funcNode.receiver.type.toString());
return false;
}
if (!this.env.enclPkg.symbol.pkgID.equals(funcNode.receiver.type.tsymbol.pkgID)) {
dlog.error(funcNode.receiver.pos, DiagnosticCode.FUNC_DEFINED_ON_NON_LOCAL_TYPE,
funcNode.name.value, funcNode.receiver.type.toString());
return false;
}
return true;
}
private Name getFuncSymbolName(BLangFunction funcNode) {
if (funcNode.receiver != null) {
return names.fromString(Symbols.getAttachedFuncSymbolName(
funcNode.receiver.type.tsymbol.name.value, funcNode.name.value));
}
return names.fromIdNode(funcNode.name);
}
private Name getFieldSymbolName(BLangSimpleVariable receiver, BLangSimpleVariable variable) {
return names.fromString(Symbols.getAttachedFuncSymbolName(
receiver.type.tsymbol.name.value, variable.name.value));
}
private MarkdownDocAttachment getMarkdownDocAttachment(BLangMarkdownDocumentation docNode) {
if (docNode == null) {
return new MarkdownDocAttachment();
}
MarkdownDocAttachment docAttachment = new MarkdownDocAttachment();
docAttachment.description = docNode.getDocumentation();
docNode.getParameters().forEach(p ->
docAttachment.parameters.add(new MarkdownDocAttachment.Parameter(p.parameterName.value,
p.getParameterDocumentation())));
docAttachment.returnValueDescription = docNode.getReturnParameterDocumentation();
return docAttachment;
}
private void resolveReferencedFields(BLangStructureTypeNode structureTypeNode, SymbolEnv typeDefEnv) {
List<BSymbol> referencedTypes = new ArrayList<>();
structureTypeNode.referencedFields = structureTypeNode.typeRefs.stream().flatMap(typeRef -> {
BType referredType = symResolver.resolveTypeNode(typeRef, typeDefEnv);
if (referredType == symTable.semanticError) {
return Stream.empty();
}
if (referencedTypes.contains(referredType.tsymbol)) {
dlog.error(typeRef.pos, DiagnosticCode.REDECLARED_TYPE_REFERENCE, typeRef);
return Stream.empty();
}
if (structureTypeNode.type.tag == TypeTags.OBJECT && (referredType.tag != TypeTags.OBJECT || !Symbols
.isFlagOn(referredType.tsymbol.flags, Flags.ABSTRACT))) {
dlog.error(typeRef.pos, DiagnosticCode.INCOMPATIBLE_TYPE_REFERENCE, typeRef);
return Stream.empty();
}
if (structureTypeNode.type.tag == TypeTags.RECORD && referredType.tag != TypeTags.RECORD) {
dlog.error(typeRef.pos, DiagnosticCode.INCOMPATIBLE_RECORD_TYPE_REFERENCE, typeRef);
return Stream.empty();
}
referencedTypes.add(referredType.tsymbol);
return ((BStructureType) referredType).fields.stream().map(field -> {
BLangSimpleVariable var = ASTBuilderUtil.createVariable(typeRef.pos, field.name.value, field.type);
var.flagSet = field.symbol.getFlags();
return var;
});
}).collect(Collectors.toList());
}
private void defineReferencedFunction(BLangTypeDefinition typeDef, SymbolEnv objEnv, BLangType typeRef,
BAttachedFunction function) {
Name funcName = names.fromString(
Symbols.getAttachedFuncSymbolName(typeDef.symbol.name.value, function.funcName.value));
BSymbol foundSymbol = symResolver.lookupSymbol(objEnv, funcName, SymTag.VARIABLE);
if (foundSymbol != symTable.notFoundSymbol) {
if (Symbols.isFlagOn(foundSymbol.flags, Flags.INTERFACE) &&
Symbols.isFlagOn(function.symbol.flags, Flags.INTERFACE)) {
dlog.error(typeRef.pos, DiagnosticCode.REDECLARED_FUNCTION_FROM_TYPE_REFERENCE, function.funcName,
typeRef);
}
return;
}
BInvokableSymbol funcSymbol = ASTBuilderUtil.duplicateInvokableSymbol(function.symbol, typeDef.symbol, funcName,
typeDef.symbol.pkgID);
defineSymbol(typeRef.pos, funcSymbol, objEnv);
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(null, funcSymbol.scope, objEnv);
funcSymbol.params.forEach(param -> defineSymbol(typeRef.pos, param, funcEnv));
funcSymbol.defaultableParams.forEach(param -> defineSymbol(typeRef.pos, param, funcEnv));
if (funcSymbol.restParam != null) {
defineSymbol(typeRef.pos, funcSymbol.restParam, funcEnv);
}
funcSymbol.receiverSymbol =
defineVarSymbol(typeDef.pos, typeDef.flagSet, typeDef.symbol.type, Names.SELF, funcEnv);
BAttachedFunction attachedFunc =
new BAttachedFunction(function.funcName, funcSymbol, (BInvokableType) funcSymbol.type);
((BObjectTypeSymbol) typeDef.symbol).attachedFuncs.add(attachedFunc);
((BObjectTypeSymbol) typeDef.symbol).referencedFunctions.add(attachedFunc);
}
private void defineInitFunctionParam(BLangSimpleVariable varNode) {
Name varName = names.fromIdNode(varNode.name);
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) env.enclType;
BTypeSymbol objectTypeSumbol = objectTypeNode.type.tsymbol;
BSymbol fieldSymbol = symResolver.resolveObjectField(varNode.pos, env, varName, objectTypeSumbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(varNode.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD, varName,
env.enclType.type.getKind().typeName(), env.enclType.type.tsymbol.name);
}
varNode.type = fieldSymbol.type;
BVarSymbol paramSymbol;
if (fieldSymbol.kind == SymbolKind.FUNCTION) {
paramSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol,
objectTypeNode.initFunction.symbol, fieldSymbol.name, objectTypeSumbol.pkgID);
} else {
paramSymbol = new BVarSymbol(Flags.asMask(varNode.flagSet), varName, env.enclPkg.symbol.pkgID, varNode.type,
env.scope.owner);
}
defineShadowedSymbol(varNode.pos, paramSymbol, env);
objectTypeNode.initFunction.initFunctionStmts.put(fieldSymbol,
(BLangStatement) createAssignmentStmt(varNode, paramSymbol, fieldSymbol));
varNode.symbol = paramSymbol;
return;
}
/**
* Used to store location data for encountered unknown types in `checkErrors` method.
*
* @since 0.985.0
*/
class LocationData {
private String name;
private int row;
private int column;
LocationData(String name, int row, int column) {
this.name = name;
this.row = row;
this.column = column;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof LocationData)) {
return false;
}
LocationData data = (LocationData) o;
return name.equals(data.name) && row == data.row && column == data.column;
}
}
} | class SymbolEnter extends BLangNodeVisitor {
private static final CompilerContext.Key<SymbolEnter> SYMBOL_ENTER_KEY =
new CompilerContext.Key<>();
private final PackageLoader pkgLoader;
private final SymbolTable symTable;
private final Names names;
private final SymbolResolver symResolver;
private final BLangDiagnosticLog dlog;
private final Types types;
private List<BLangTypeDefinition> unresolvedTypes;
private int typePrecedence;
private SymbolEnv env;
public static SymbolEnter getInstance(CompilerContext context) {
SymbolEnter symbolEnter = context.get(SYMBOL_ENTER_KEY);
if (symbolEnter == null) {
symbolEnter = new SymbolEnter(context);
}
return symbolEnter;
}
public SymbolEnter(CompilerContext context) {
context.put(SYMBOL_ENTER_KEY, this);
this.pkgLoader = PackageLoader.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.names = Names.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.types = Types.getInstance(context);
}
public BLangPackage definePackage(BLangPackage pkgNode) {
populatePackageNode(pkgNode);
defineNode(pkgNode, this.symTable.pkgEnvMap.get(symTable.builtInPackageSymbol));
return pkgNode;
}
public void defineNode(BLangNode node, SymbolEnv env) {
SymbolEnv prevEnv = this.env;
this.env = env;
node.accept(this);
this.env = prevEnv;
}
public BLangPackage defineTestablePackage(BLangTestablePackage pkgNode, SymbolEnv env,
List<BLangImportPackage> enclPkgImports) {
populatePackageNode(pkgNode, enclPkgImports);
defineNode(pkgNode, env);
return pkgNode;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DEFINE)) {
return;
}
BPackageSymbol pkgSymbol;
if (Symbols.isFlagOn(Flags.asMask(pkgNode.flagSet), Flags.TESTABLE)) {
pkgSymbol = Symbols.createPackageSymbol(pkgNode.packageID, this.symTable, Flags.asMask(pkgNode.flagSet));
} else {
pkgSymbol = Symbols.createPackageSymbol(pkgNode.packageID, this.symTable);
}
pkgNode.symbol = pkgSymbol;
SymbolEnv pkgEnv = SymbolEnv.createPkgEnv(pkgNode, pkgSymbol.scope, this.env);
this.symTable.pkgEnvMap.put(pkgSymbol, pkgEnv);
defineConstructs(pkgNode, pkgEnv);
pkgNode.getTestablePkgs().forEach(testablePackage -> defineTestablePackage(testablePackage, pkgEnv,
pkgNode.imports));
pkgNode.completedPhases.add(CompilerPhase.DEFINE);
}
private void defineConstructs(BLangPackage pkgNode, SymbolEnv pkgEnv) {
pkgNode.imports.forEach(importNode -> defineNode(importNode, pkgEnv));
this.typePrecedence = 0;
pkgNode.constants.forEach(constant -> defineNode(constant, pkgEnv));
defineTypeNodes(pkgNode.typeDefinitions, pkgEnv);
resolveConstantTypeNode(pkgNode.constants, pkgEnv);
pkgNode.globalVars.forEach(var -> defineNode(var, pkgEnv));
pkgEnv.logErrors = true;
pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence));
defineErrorDetails(pkgNode.typeDefinitions, pkgEnv);
defineFields(pkgNode.typeDefinitions, pkgEnv);
defineMembers(pkgNode.typeDefinitions, pkgEnv);
pkgNode.services.forEach(service -> defineNode(service, pkgEnv));
pkgNode.functions.forEach(func -> defineNode(func, pkgEnv));
pkgNode.annotations.forEach(annot -> defineNode(annot, pkgEnv));
pkgNode.globalVars.stream().filter(var -> var.symbol.type.tsymbol != null && Symbols
.isFlagOn(var.symbol.type.tsymbol.flags, Flags.CLIENT)).map(varNode -> varNode.symbol)
.forEach(varSymbol -> varSymbol.tag = SymTag.ENDPOINT);
}
public void visit(BLangAnnotation annotationNode) {
BAnnotationSymbol annotationSymbol = Symbols.createAnnotationSymbol(Flags.asMask(annotationNode.flagSet),
AttachPoints.asMask(annotationNode.attachPoints), names.fromIdNode(annotationNode.name),
env.enclPkg.symbol.pkgID, null, env.scope.owner);
annotationSymbol.markdownDocumentation =
getMarkdownDocAttachment(annotationNode.markdownDocumentationAttachment);
annotationSymbol.type = new BAnnotationType(annotationSymbol);
annotationNode.symbol = annotationSymbol;
defineSymbol(annotationNode.name.pos, annotationSymbol);
SymbolEnv annotationEnv = SymbolEnv.createAnnotationEnv(annotationNode, annotationSymbol.scope, env);
if (annotationNode.typeNode != null) {
BType recordType = this.symResolver.resolveTypeNode(annotationNode.typeNode, annotationEnv);
annotationSymbol.attachedType = recordType.tsymbol;
if (recordType != symTable.semanticError && recordType.tag != TypeTags.RECORD) {
dlog.error(annotationNode.typeNode.pos, DiagnosticCode.ANNOTATION_REQUIRE_RECORD, recordType);
}
}
}
@Override
public void visit(BLangImportPackage importPkgNode) {
Name pkgAlias = names.fromIdNode(importPkgNode.alias);
if (symResolver.lookupSymbol(env, pkgAlias, SymTag.IMPORT) != symTable.notFoundSymbol) {
dlog.error(importPkgNode.pos, DiagnosticCode.REDECLARED_SYMBOL, pkgAlias);
return;
}
Name orgName;
Name version;
PackageID enclPackageID = env.enclPkg.packageID;
if (importPkgNode.orgName.value == null || importPkgNode.orgName.value.isEmpty()) {
orgName = enclPackageID.orgName;
version = (Names.DEFAULT_VERSION.equals(enclPackageID.version)) ? new Name("") : enclPackageID.version;
} else if (importPkgNode.orgName.value.equals(enclPackageID.orgName.value)) {
orgName = names.fromIdNode(importPkgNode.orgName);
version = new Name("");
} else {
orgName = names.fromIdNode(importPkgNode.orgName);
version = names.fromIdNode(importPkgNode.version);
}
List<Name> nameComps = importPkgNode.pkgNameComps.stream()
.map(identifier -> names.fromIdNode(identifier))
.collect(Collectors.toList());
PackageID pkgId = new PackageID(orgName, nameComps, version);
if (pkgId.name.getValue().startsWith(Names.BUILTIN_PACKAGE.value)) {
dlog.error(importPkgNode.pos, DiagnosticCode.MODULE_NOT_FOUND,
importPkgNode.getQualifiedPackageName());
return;
}
BPackageSymbol pkgSymbol = pkgLoader.loadPackageSymbol(pkgId, enclPackageID, this.env.enclPkg.repos);
if (pkgSymbol == null) {
dlog.error(importPkgNode.pos, DiagnosticCode.MODULE_NOT_FOUND,
importPkgNode.getQualifiedPackageName());
return;
}
importPkgNode.symbol = pkgSymbol;
((BPackageSymbol) this.env.scope.owner).imports.add(pkgSymbol);
this.env.scope.define(pkgAlias, pkgSymbol);
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
String nsURI = (String) ((BLangLiteral) xmlnsNode.namespaceURI).value;
if (xmlnsNode.prefix.value != null && nsURI.isEmpty()) {
dlog.error(xmlnsNode.pos, DiagnosticCode.INVALID_NAMESPACE_DECLARATION, xmlnsNode.prefix);
}
if (xmlnsNode.prefix.value == null) {
xmlnsNode.prefix.value = XMLConstants.DEFAULT_NS_PREFIX;
}
BXMLNSSymbol xmlnsSymbol = Symbols.createXMLNSSymbol(names.fromIdNode(xmlnsNode.prefix), nsURI,
env.enclPkg.symbol.pkgID, env.scope.owner);
xmlnsNode.symbol = xmlnsSymbol;
BSymbol foundSym = symResolver.lookupSymbol(env, xmlnsSymbol.name, SymTag.PACKAGE);
if (foundSym != symTable.notFoundSymbol) {
dlog.error(xmlnsNode.pos, DiagnosticCode.REDECLARED_SYMBOL, xmlnsSymbol.name);
return;
}
defineSymbol(xmlnsNode.pos, xmlnsSymbol);
}
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
defineNode(xmlnsStmtNode.xmlnsDecl, env);
}
private void defineTypeNodes(List<BLangTypeDefinition> typeDefs, SymbolEnv env) {
if (typeDefs.size() == 0) {
return;
}
this.unresolvedTypes = new ArrayList<>();
for (BLangTypeDefinition typeDef : typeDefs) {
defineNode(typeDef, env);
}
if (typeDefs.size() <= unresolvedTypes.size()) {
LinkedList<LocationData> unknownTypes = new LinkedList<>();
for (BLangTypeDefinition unresolvedType : unresolvedTypes) {
LinkedList<String> references = new LinkedList<>();
references.add(unresolvedType.name.value);
checkErrors(unresolvedType, unresolvedType.typeNode, references, unknownTypes);
}
unresolvedTypes.forEach(type -> createDummyTypeDefSymbol(type, env));
unresolvedTypes.forEach(type -> defineNode(type, env));
return;
}
defineTypeNodes(unresolvedTypes, env);
}
private void checkErrors(BLangTypeDefinition unresolvedType, BLangType currentTypeNode, List<String> visitedNodes,
List<LocationData> encounteredUnknownTypes) {
String unresolvedTypeNodeName = unresolvedType.name.value;
if (currentTypeNode.getKind() == NodeKind.UNION_TYPE_NODE) {
List<BLangType> memberTypeNodes = ((BLangUnionTypeNode) currentTypeNode).memberTypeNodes;
for (BLangType memberTypeNode : memberTypeNodes) {
if (memberTypeNode.getKind() == NodeKind.UNION_TYPE_NODE) {
checkErrors(unresolvedType, memberTypeNode, visitedNodes, encounteredUnknownTypes);
} else if (memberTypeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
String memberTypeNodeName = ((BLangUserDefinedType) memberTypeNode).typeName.value;
if (memberTypeNodeName.startsWith("$")) {
continue;
}
if (unresolvedTypeNodeName.equals(memberTypeNodeName)) {
visitedNodes.add(memberTypeNodeName);
dlog.error(unresolvedType.pos, DiagnosticCode.CYCLIC_TYPE_REFERENCE, visitedNodes);
visitedNodes.remove(visitedNodes.lastIndexOf(memberTypeNodeName));
} else {
checkErrors(unresolvedType, memberTypeNode, visitedNodes, encounteredUnknownTypes);
}
}
}
} else if (currentTypeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
String currentTypeNodeName = ((BLangUserDefinedType) currentTypeNode).typeName.value;
if (unresolvedTypeNodeName.equals(currentTypeNodeName)) {
visitedNodes.add(currentTypeNodeName);
dlog.error(unresolvedType.pos, DiagnosticCode.CYCLIC_TYPE_REFERENCE, visitedNodes);
visitedNodes.remove(visitedNodes.lastIndexOf(currentTypeNodeName));
} else if (visitedNodes.contains(currentTypeNodeName)) {
List<String> dependencyList = new LinkedList<>();
for (int i = visitedNodes.indexOf(currentTypeNodeName); i < visitedNodes.size(); i++) {
dependencyList.add(visitedNodes.get(i));
}
dependencyList.add(currentTypeNodeName);
dlog.error(unresolvedType.pos, DiagnosticCode.CYCLIC_TYPE_REFERENCE, dependencyList);
} else {
List<BLangTypeDefinition> typeDefinitions = unresolvedTypes.stream()
.filter(typeDefinition -> typeDefinition.name.value.equals(currentTypeNodeName))
.collect(Collectors.toList());
if (typeDefinitions.isEmpty()) {
LocationData locationData = new LocationData(currentTypeNodeName, currentTypeNode.pos.sLine,
currentTypeNode.pos.sCol);
if (!encounteredUnknownTypes.contains(locationData)) {
dlog.error(currentTypeNode.pos, DiagnosticCode.UNKNOWN_TYPE, currentTypeNodeName);
encounteredUnknownTypes.add(locationData);
}
} else {
for (BLangTypeDefinition typeDefinition : typeDefinitions) {
String typeName = typeDefinition.name.value;
visitedNodes.add(typeName);
checkErrors(unresolvedType, typeDefinition.typeNode, visitedNodes, encounteredUnknownTypes);
visitedNodes.remove(visitedNodes.lastIndexOf(typeName));
}
}
}
}
}
@Override
public void visit(BLangTypeDefinition typeDefinition) {
BType definedType = symResolver.resolveTypeNode(typeDefinition.typeNode, env);
if (definedType == symTable.noType) {
if (!this.unresolvedTypes.contains(typeDefinition)) {
this.unresolvedTypes.add(typeDefinition);
}
return;
}
if (typeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE ||
typeDefinition.typeNode.getKind() == NodeKind.RECORD_TYPE) {
BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDefinition.typeNode;
for (BLangType typeRef : structureTypeNode.typeRefs) {
BType referencedType = symResolver.resolveTypeNode(typeRef, env);
if (referencedType == symTable.noType) {
if (!this.unresolvedTypes.contains(typeDefinition)) {
this.unresolvedTypes.add(typeDefinition);
}
return;
}
}
}
typeDefinition.precedence = this.typePrecedence++;
BTypeSymbol typeDefSymbol;
if (definedType.tsymbol.name != Names.EMPTY) {
typeDefSymbol = definedType.tsymbol.createLabelSymbol();
} else {
typeDefSymbol = definedType.tsymbol;
}
typeDefSymbol.markdownDocumentation = getMarkdownDocAttachment(typeDefinition.markdownDocumentationAttachment);
typeDefSymbol.name = names.fromIdNode(typeDefinition.getName());
typeDefSymbol.pkgID = env.enclPkg.packageID;
typeDefSymbol.flags |= Flags.asMask(typeDefinition.flagSet);
typeDefinition.symbol = typeDefSymbol;
defineSymbol(typeDefinition.name.pos, typeDefSymbol);
}
@Override
public void visit(BLangWorker workerNode) {
BInvokableSymbol workerSymbol = Symbols.createWorkerSymbol(Flags.asMask(workerNode.flagSet),
names.fromIdNode(workerNode.name), env.enclPkg.symbol.pkgID, null, env.scope.owner);
workerSymbol.markdownDocumentation = getMarkdownDocAttachment(workerNode.markdownDocumentationAttachment);
workerNode.symbol = workerSymbol;
defineSymbolWithCurrentEnvOwner(workerNode.pos, workerSymbol);
}
@Override
public void visit(BLangService serviceNode) {
BServiceSymbol serviceSymbol = Symbols.createServiceSymbol(Flags.asMask(serviceNode.flagSet),
names.fromIdNode(serviceNode.name), env.enclPkg.symbol.pkgID, serviceNode.type, env.scope.owner);
serviceSymbol.markdownDocumentation = getMarkdownDocAttachment(serviceNode.markdownDocumentationAttachment);
BType serviceObjectType = serviceNode.serviceTypeDefinition.symbol.type;
serviceNode.symbol = serviceSymbol;
serviceNode.symbol.type = new BServiceType(serviceObjectType.tsymbol);
defineSymbol(serviceNode.name.pos, serviceSymbol);
if (serviceNode.serviceTypeDefinition.typeNode.getKind() == NodeKind.OBJECT_TYPE) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) serviceNode.serviceTypeDefinition.typeNode;
objectTypeNode.functions.stream().filter(func -> func.flagSet.contains(Flag.RESOURCE))
.forEach(func -> serviceNode.resourceFunctions.add(func));
}
}
@Override
public void visit(BLangFunction funcNode) {
boolean validAttachedFunc = validateFuncReceiver(funcNode);
boolean remoteFlagSetOnNode = Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.REMOTE);
if (funcNode.attachedOuterFunction) {
if (Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.PUBLIC)) {
dlog.error(funcNode.pos, DiagnosticCode.ATTACHED_FUNC_CANT_HAVE_VISIBILITY_MODIFIERS, funcNode.name);
}
if (funcNode.receiver.type.tsymbol.kind == SymbolKind.RECORD) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_ATTACH_FUNCTIONS_TO_RECORDS, funcNode.name,
funcNode.receiver.type.tsymbol.name);
createDummyFunctionSymbol(funcNode);
visitObjectAttachedFunction(funcNode);
return;
}
BSymbol funcSymbol = symTable.notFoundSymbol;
if (funcNode.receiver.type.tag == TypeTags.OBJECT) {
SymbolEnv objectEnv = SymbolEnv.createObjectMethodsEnv(null, (BObjectTypeSymbol) funcNode.receiver.type.
tsymbol, env);
funcSymbol = symResolver.lookupSymbol(objectEnv, getFuncSymbolName(funcNode), SymTag.FUNCTION);
}
if (funcSymbol == symTable.notFoundSymbol) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_FIND_MATCHING_FUNCTION, funcNode.name,
funcNode.receiver.type.tsymbol.name);
createDummyFunctionSymbol(funcNode);
visitObjectAttachedFunction(funcNode);
return;
}
funcNode.symbol = (BInvokableSymbol) funcSymbol;
if (funcNode.symbol.bodyExist) {
dlog.error(funcNode.pos, DiagnosticCode.IMPLEMENTATION_ALREADY_EXIST, funcNode.name);
}
if (remoteFlagSetOnNode && !Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(funcNode.pos, DiagnosticCode.REMOTE_ON_NON_REMOTE_FUNCTION, funcNode.name.value);
}
if (!remoteFlagSetOnNode && Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(funcNode.pos, DiagnosticCode.REMOTE_REQUIRED_ON_REMOTE_FUNCTION);
}
validateAttachedFunction(funcNode, funcNode.receiver.type.tsymbol.name);
visitObjectAttachedFunction(funcNode);
return;
}
if (funcNode.receiver == null && !funcNode.attachedFunction && remoteFlagSetOnNode) {
dlog.error(funcNode.pos, DiagnosticCode.REMOTE_IN_NON_OBJECT_FUNCTION, funcNode.name.value);
}
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
getFuncSymbolName(funcNode), env.enclPkg.symbol.pkgID, null, env.scope.owner, funcNode.body != null);
funcSymbol.markdownDocumentation = getMarkdownDocAttachment(funcNode.markdownDocumentationAttachment);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
if (funcNode.receiver != null) {
defineAttachedFunctions(funcNode, funcSymbol, invokableEnv, validAttachedFunc);
}
}
private void createDummyFunctionSymbol(BLangFunction funcNode) {
funcNode.symbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
getFuncSymbolName(funcNode), env.enclPkg.symbol.pkgID, null, env.scope.owner, true);
funcNode.symbol.scope = new Scope(funcNode.symbol);
}
private void visitObjectAttachedFunction(BLangFunction funcNode) {
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
invokableEnv.scope = funcNode.symbol.scope;
defineObjectAttachedInvokableSymbolParams(funcNode, invokableEnv);
if (env.enclPkg.objAttachedFunctions.contains(funcNode.symbol)) {
dlog.error(funcNode.pos, DiagnosticCode.IMPLEMENTATION_ALREADY_EXIST, funcNode.name);
return;
}
if (!funcNode.objInitFunction) {
env.enclPkg.objAttachedFunctions.add(funcNode.symbol);
}
funcNode.receiver.symbol = funcNode.symbol.receiverSymbol;
}
private void validateAttachedFunction(BLangFunction funcNode, Name objName) {
SymbolEnv invokableEnv = SymbolEnv.createDummyEnv(funcNode, funcNode.symbol.scope, env);
List<BType> paramTypes = funcNode.requiredParams.stream()
.peek(varNode -> varNode.type = symResolver.resolveTypeNode(varNode.typeNode, invokableEnv))
.map(varNode -> varNode.type)
.collect(Collectors.toList());
funcNode.defaultableParams.forEach(p -> paramTypes.add(symResolver
.resolveTypeNode(p.var.typeNode, invokableEnv)));
if (!funcNode.desugaredReturnType) {
symResolver.resolveTypeNode(funcNode.returnTypeNode, invokableEnv);
}
if (funcNode.restParam != null) {
if (!funcNode.symbol.restParam.name.equals(names.fromIdNode(funcNode.restParam.name))) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_FIND_MATCHING_INTERFACE, funcNode.name, objName);
return;
}
BType restParamType = symResolver.resolveTypeNode(funcNode.restParam.typeNode, invokableEnv);
paramTypes.add(restParamType);
}
BInvokableType sourceType = (BInvokableType) funcNode.symbol.type;
if (typesMissMatch(paramTypes, sourceType.paramTypes)
|| namesMissMatch(funcNode.requiredParams, funcNode.symbol.params)
|| namesMissMatchDef(funcNode.defaultableParams, funcNode.symbol.defaultableParams)) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_FIND_MATCHING_INTERFACE, funcNode.name, objName);
return;
}
if (funcNode.returnTypeNode.type == null && sourceType.retType == null) {
return;
} else if (funcNode.returnTypeNode.type == null || sourceType.retType == null) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_FIND_MATCHING_INTERFACE, funcNode.name, objName);
return;
}
if (funcNode.returnTypeNode.type.tag != sourceType.retType.tag) {
dlog.error(funcNode.pos, DiagnosticCode.CANNOT_FIND_MATCHING_INTERFACE, funcNode.name, objName);
return;
}
funcNode.symbol.flags = funcNode.symbol.flags ^ Flags.INTERFACE;
}
private boolean typesMissMatch(List<BType> lhs, List<BType> rhs) {
if (lhs.size() != rhs.size()) {
return true;
}
for (int i = 0; i < lhs.size(); i++) {
if (!types.isSameType(lhs.get(i), rhs.get(i))) {
return true;
}
}
return false;
}
private boolean namesMissMatch(List<BLangSimpleVariable> lhs, List<BVarSymbol> rhs) {
if (lhs.size() != rhs.size()) {
return true;
}
for (int i = 0; i < lhs.size(); i++) {
if (!rhs.get(i).name.equals(names.fromIdNode(lhs.get(i).name))) {
return true;
}
}
return false;
}
private boolean namesMissMatchDef(List<BLangSimpleVariableDef> lhs, List<BVarSymbol> rhs) {
if (lhs.size() != rhs.size()) {
return true;
}
for (int i = 0; i < lhs.size(); i++) {
if (!rhs.get(i).name.equals(names.fromIdNode(lhs.get(i).var.name))) {
return true;
}
}
return false;
}
private void defineObjectAttachedInvokableSymbolParams(BLangInvokableNode invokableNode, SymbolEnv invokableEnv) {
invokableNode.requiredParams.forEach(varNode -> {
visitObjectAttachedFunctionParam(varNode, invokableEnv);
});
invokableNode.defaultableParams.forEach(varDefNode -> {
visitObjectAttachedFunctionParam(varDefNode.var, invokableEnv);
});
if (invokableNode.returnTypeNode != null) {
invokableNode.returnTypeNode.type = symResolver.resolveTypeNode(invokableNode.returnTypeNode, env);
}
if (invokableNode.restParam != null) {
visitObjectAttachedFunctionParam(invokableNode.restParam, invokableEnv);
}
}
void visitObjectAttachedFunctionParam(BLangSimpleVariable variable, SymbolEnv invokableEnv) {
if (variable.type == null) {
variable.type = symResolver.resolveTypeNode(variable.typeNode, env);
}
visitObjectAttachedFunctionParamSymbol(variable, invokableEnv);
}
void visitObjectAttachedFunctionParamSymbol(BLangSimpleVariable variable, SymbolEnv invokableEnv) {
BSymbol varSymbol = symResolver.lookupSymbol(invokableEnv, names.fromIdNode(variable.name),
SymTag.VARIABLE);
if (varSymbol == symTable.notFoundSymbol) {
defineNode(variable, invokableEnv);
} else {
variable.symbol = (BVarSymbol) varSymbol;
}
if (variable.expr == null) {
return;
}
if (variable.expr.getKind() != NodeKind.LITERAL) {
this.dlog.error(variable.expr.pos, DiagnosticCode.INVALID_DEFAULT_PARAM_VALUE, variable.name);
return;
}
BLangLiteral literal = (BLangLiteral) variable.expr;
variable.symbol.defaultValue = new DefaultValueLiteral(literal.value, literal.typeTag);
}
@Override
public void visit(BLangResource resourceNode) {
}
@Override
public void visit(BLangConstant constant) {
Name name = names.fromIdNode(constant.name);
PackageID pkgID = env.enclPkg.symbol.pkgID;
BConstantSymbol constantSymbol = new BConstantSymbol(Flags.asMask(constant.flagSet), name, pkgID,
symTable.semanticError, symTable.semanticError, env.scope.owner);
constant.symbol = constantSymbol;
if (((BLangExpression) constant.value).getKind() != NodeKind.LITERAL) {
if (symResolver.checkForUniqueSymbol(constant.pos, env, constantSymbol, SymTag.VARIABLE_NAME)) {
env.scope.define(constantSymbol.name, constantSymbol);
}
return;
}
defineNode(constant.associatedTypeDefinition, env);
constantSymbol.type = constant.associatedTypeDefinition.symbol.type;
constantSymbol.literalValue = ((BLangLiteral) constant.value).value;
constantSymbol.literalValueTypeTag = ((BLangLiteral) constant.value).typeTag;
constantSymbol.markdownDocumentation = getMarkdownDocAttachment(constant.markdownDocumentationAttachment);
if (!symResolver.checkForUniqueSymbol(constant.pos, env, constantSymbol, SymTag.VARIABLE_NAME)) {
return;
}
env.scope.define(constantSymbol.name, constantSymbol);
}
@Override
public void visit(BLangSimpleVariable varNode) {
if (varNode.type == null) {
if (varNode.typeNode != null) {
varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);
} else {
varNode.type = symTable.noType;
}
}
Name varName = names.fromIdNode(varNode.name);
if (varName == Names.EMPTY || varName == Names.IGNORE) {
return;
}
BVarSymbol varSymbol = defineVarSymbol(varNode.pos, varNode.flagSet, varNode.type, varName, env);
varSymbol.markdownDocumentation = getMarkdownDocAttachment(varNode.markdownDocumentationAttachment);
varNode.symbol = varSymbol;
if (varNode.symbol.type.tsymbol != null && Symbols.isFlagOn(varNode.symbol.type.tsymbol.flags, Flags.CLIENT)) {
varSymbol.tag = SymTag.ENDPOINT;
}
if (varSymbol.type.tag == TypeTags.FUTURE && ((BFutureType) varSymbol.type).workerDerivative) {
Iterator<BLangLambdaFunction> lambdaFunctions = env.enclPkg.lambdaFunctions.iterator();
while (lambdaFunctions.hasNext()) {
BLangLambdaFunction lambdaFunction = lambdaFunctions.next();
if (lambdaFunctions.hasNext() &&
varSymbol.owner == lambdaFunction.cachedEnv.enclInvokable.symbol) {
lambdaFunction.cachedEnv.scope.define(varSymbol.name, varSymbol);
}
}
}
}
@Override
public void visit(BLangTupleVariable varNode) {
if (varNode.type == null) {
varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);
}
}
@Override
public void visit(BLangRecordVariable varNode) {
if (varNode.type == null) {
varNode.type = symResolver.resolveTypeNode(varNode.typeNode, env);
}
}
@Override
public void visit(BLangEndpoint endpoint) {
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
if (!(bLangXMLAttribute.name.getKind() == NodeKind.XML_QNAME)) {
return;
}
BLangXMLQName qname = (BLangXMLQName) bLangXMLAttribute.name;
if (!bLangXMLAttribute.isNamespaceDeclr) {
BXMLAttributeSymbol attrSymbol = new BXMLAttributeSymbol(qname.localname.value, qname.namespaceURI,
env.enclPkg.symbol.pkgID, env.scope.owner);
if (symResolver.checkForUniqueMemberSymbol(bLangXMLAttribute.pos, env, attrSymbol)) {
env.scope.define(attrSymbol.name, attrSymbol);
bLangXMLAttribute.symbol = attrSymbol;
}
return;
}
List<BLangExpression> exprs = bLangXMLAttribute.value.textFragments;
String nsURI = null;
if (exprs.size() == 1 && exprs.get(0).getKind() == NodeKind.LITERAL) {
nsURI = (String) ((BLangLiteral) exprs.get(0)).value;
}
String symbolName = qname.localname.value;
if (symbolName.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
symbolName = XMLConstants.DEFAULT_NS_PREFIX;
}
BXMLNSSymbol xmlnsSymbol =
new BXMLNSSymbol(names.fromString(symbolName), nsURI, env.enclPkg.symbol.pkgID, env.scope.owner);
if (symResolver.checkForUniqueMemberSymbol(bLangXMLAttribute.pos, env, xmlnsSymbol)) {
env.scope.define(xmlnsSymbol.name, xmlnsSymbol);
bLangXMLAttribute.symbol = xmlnsSymbol;
}
}
private void resolveConstantTypeNode(List<BLangConstant> constants, SymbolEnv env) {
for (BLangConstant constant : constants) {
if (constant.symbol.type == symTable.semanticError) {
continue;
}
if (constant.typeNode != null) {
constant.symbol.literalValueType = symResolver.resolveTypeNode(constant.typeNode, env);
} else {
constant.symbol.literalValueType = symTable.getTypeFromTag(constant.symbol.literalValueTypeTag);
}
if (!isAllowedConstantType(constant.symbol)) {
dlog.error(constant.typeNode.pos, DiagnosticCode.CANNOT_DEFINE_CONSTANT_WITH_TYPE, constant.typeNode);
}
}
}
private boolean isAllowedConstantType(BConstantSymbol symbol) {
switch (symbol.literalValueType.tag) {
case TypeTags.BOOLEAN:
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
return true;
}
return false;
}
private boolean hasAnnotation(List<BLangAnnotationAttachment> annotationAttachmentList, String expectedAnnotation) {
return annotationAttachmentList.stream()
.filter(annotation -> annotation.annotationName.value.equals(expectedAnnotation)).count() > 0;
}
/**
* Visit each compilation unit (.bal file) and add each top-level node
* in the compilation unit to the package node.
*
* @param pkgNode current package node
*/
private void populatePackageNode(BLangPackage pkgNode) {
List<BLangCompilationUnit> compUnits = pkgNode.getCompilationUnits();
compUnits.forEach(compUnit -> populateCompilationUnit(pkgNode, compUnit));
}
/**
* Visit each compilation unit (.bal file) and add each top-level node in the compilation unit to the
* testable package node.
*
* @param pkgNode current package node
* @param enclPkgImports imports of the enclosed package
*/
private void populatePackageNode(BLangTestablePackage pkgNode, List<BLangImportPackage> enclPkgImports) {
populatePackageNode(pkgNode);
pkgNode.getImports().removeIf(enclPkgImports::contains);
}
/**
* Visit each top-level node and add it to the package node.
*
* @param pkgNode current package node
* @param compUnit current compilation unit
*/
private void populateCompilationUnit(BLangPackage pkgNode, BLangCompilationUnit compUnit) {
compUnit.getTopLevelNodes().forEach(node -> addTopLevelNode(pkgNode, node));
}
private void addTopLevelNode(BLangPackage pkgNode, TopLevelNode node) {
NodeKind kind = node.getKind();
if (kind != NodeKind.PACKAGE_DECLARATION && kind != IMPORT) {
pkgNode.topLevelNodes.add(node);
}
switch (kind) {
case IMPORT:
if (!pkgNode.imports.contains(node)) {
pkgNode.imports.add((BLangImportPackage) node);
}
break;
case FUNCTION:
pkgNode.functions.add((BLangFunction) node);
break;
case TYPE_DEFINITION:
pkgNode.typeDefinitions.add((BLangTypeDefinition) node);
break;
case SERVICE:
pkgNode.services.add((BLangService) node);
break;
case VARIABLE:
pkgNode.globalVars.add((BLangSimpleVariable) node);
break;
case ANNOTATION:
pkgNode.annotations.add((BLangAnnotation) node);
break;
case XMLNS:
pkgNode.xmlnsList.add((BLangXMLNS) node);
break;
case CONSTANT:
pkgNode.constants.add((BLangConstant) node);
break;
}
}
private void defineErrorDetails(List<BLangTypeDefinition> typeDefNodes, SymbolEnv pkgEnv) {
for (BLangTypeDefinition typeDef : typeDefNodes) {
if (typeDef.typeNode.getKind() != NodeKind.ERROR_TYPE) {
continue;
}
BLangErrorType errorTypeNode = (BLangErrorType) typeDef.typeNode;
SymbolEnv typeDefEnv = SymbolEnv.createTypeEnv(errorTypeNode, typeDef.symbol.scope, pkgEnv);
BType reasonType = Optional.ofNullable(errorTypeNode.reasonType)
.map(bLangType -> symResolver.resolveTypeNode(bLangType, typeDefEnv))
.orElse(symTable.stringType);
BType detailType = Optional.ofNullable(errorTypeNode.detailType)
.map(bLangType -> symResolver.resolveTypeNode(bLangType, typeDefEnv))
.orElse(symTable.mapType);
if (reasonType == symTable.stringType && detailType == symTable.mapType) {
typeDef.symbol.type = symTable.errorType;
continue;
}
BErrorType errorType = (BErrorType) typeDef.symbol.type;
errorType.reasonType = reasonType;
errorType.detailType = detailType;
}
}
private void defineFields(List<BLangTypeDefinition> typeDefNodes, SymbolEnv pkgEnv) {
for (BLangTypeDefinition typeDef : typeDefNodes) {
if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE ||
(typeDef.symbol.type.tag != TypeTags.OBJECT && typeDef.symbol.type.tag != TypeTags.RECORD)) {
continue;
}
BStructureType structureType = (BStructureType) typeDef.symbol.type;
BLangStructureTypeNode structureTypeNode = (BLangStructureTypeNode) typeDef.typeNode;
SymbolEnv typeDefEnv = SymbolEnv.createTypeEnv(structureTypeNode, typeDef.symbol.scope, pkgEnv);
resolveReferencedFields(structureTypeNode, typeDefEnv);
structureType.fields =
Stream.concat(structureTypeNode.fields.stream(), structureTypeNode.referencedFields.stream())
.peek(field -> defineNode(field, typeDefEnv))
.filter(field -> field.symbol.type != symTable.semanticError)
.map(field -> new BField(names.fromIdNode(field.name), field.symbol))
.collect(Collectors.toList());
if (typeDef.symbol.kind != SymbolKind.RECORD) {
continue;
}
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) structureTypeNode;
BRecordType recordType = (BRecordType) structureType;
recordType.sealed = recordTypeNode.sealed;
if (recordTypeNode.sealed && recordTypeNode.restFieldType != null) {
dlog.error(recordTypeNode.restFieldType.pos, DiagnosticCode.REST_FIELD_NOT_ALLOWED_IN_SEALED_RECORDS);
continue;
}
if (recordTypeNode.restFieldType == null) {
if (recordTypeNode.sealed) {
recordType.restFieldType = symTable.noType;
continue;
}
recordType.restFieldType = symTable.anydataType;
continue;
}
recordType.restFieldType = symResolver.resolveTypeNode(recordTypeNode.restFieldType, typeDefEnv);
}
}
private void defineMembers(List<BLangTypeDefinition> typeDefNodes, SymbolEnv pkgEnv) {
for (BLangTypeDefinition typeDef : typeDefNodes) {
if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
continue;
}
if (typeDef.symbol.kind == SymbolKind.OBJECT) {
BLangObjectTypeNode objTypeNode = (BLangObjectTypeNode) typeDef.typeNode;
SymbolEnv objMethodsEnv =
SymbolEnv.createObjectMethodsEnv(objTypeNode, (BObjectTypeSymbol) objTypeNode.symbol, pkgEnv);
defineObjectInitFunction(objTypeNode, objMethodsEnv);
objTypeNode.functions.forEach(f -> {
f.setReceiver(ASTBuilderUtil.createReceiver(typeDef.pos, typeDef.symbol.type));
defineNode(f, objMethodsEnv);
});
for (BLangType typeRef : objTypeNode.typeRefs) {
if (typeRef.type.tsymbol.kind != SymbolKind.OBJECT) {
continue;
}
List<BAttachedFunction> functions = ((BObjectTypeSymbol) typeRef.type.tsymbol).attachedFuncs;
for (BAttachedFunction function : functions) {
defineReferencedFunction(typeDef, objMethodsEnv, typeRef, function);
}
}
} else if (typeDef.symbol.kind == SymbolKind.RECORD) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode;
SymbolEnv typeDefEnv = SymbolEnv.createPkgLevelSymbolEnv(recordTypeNode, typeDef.symbol.scope, pkgEnv);
defineRecordInitFunction(typeDef, typeDefEnv);
}
}
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
defineSymbol(invokableNode.name.pos, funcSymbol);
invokableEnv.scope = funcSymbol.scope;
defineInvokableSymbolParams(invokableNode, funcSymbol, invokableEnv);
}
private void defineInvokableSymbolParams(BLangInvokableNode invokableNode, BInvokableSymbol invokableSymbol,
SymbolEnv invokableEnv) {
List<BVarSymbol> paramSymbols =
invokableNode.requiredParams.stream()
.peek(varNode -> defineNode(varNode, invokableEnv))
.map(varNode -> varNode.symbol)
.collect(Collectors.toList());
List<BVarSymbol> namedParamSymbols =
invokableNode.defaultableParams.stream()
.peek(varDefNode -> defineNode(varDefNode.var, invokableEnv))
.map(varDefNode -> {
BVarSymbol varSymbol = varDefNode.var.symbol;
if (varDefNode.var.expr.getKind() != NodeKind.LITERAL) {
this.dlog.error(varDefNode.var.expr.pos, DiagnosticCode.INVALID_DEFAULT_PARAM_VALUE,
varDefNode.var.name);
} else {
BLangLiteral literal = (BLangLiteral) varDefNode.var.expr;
varSymbol.defaultValue = new DefaultValueLiteral(literal.value, literal.typeTag);
}
return varSymbol;
})
.collect(Collectors.toList());
if (!invokableNode.desugaredReturnType) {
symResolver.resolveTypeNode(invokableNode.returnTypeNode, invokableEnv);
}
invokableSymbol.params = paramSymbols;
invokableSymbol.retType = invokableNode.returnTypeNode.type;
invokableSymbol.defaultableParams = namedParamSymbols;
List<BType> paramTypes = paramSymbols.stream()
.map(paramSym -> paramSym.type)
.collect(Collectors.toList());
namedParamSymbols.forEach(paramSymbol -> paramTypes.add(paramSymbol.type));
if (invokableNode.restParam != null) {
defineNode(invokableNode.restParam, invokableEnv);
invokableSymbol.restParam = invokableNode.restParam.symbol;
paramTypes.add(invokableSymbol.restParam.type);
}
invokableSymbol.type = new BInvokableType(paramTypes, invokableNode.returnTypeNode.type, null);
}
private void defineSymbol(DiagnosticPos pos, BSymbol symbol) {
symbol.scope = new Scope(symbol);
if (symResolver.checkForUniqueSymbol(pos, env, symbol, symbol.tag)) {
env.scope.define(symbol.name, symbol);
}
}
public void defineSymbol(DiagnosticPos pos, BSymbol symbol, SymbolEnv env) {
symbol.scope = new Scope(symbol);
if (symResolver.checkForUniqueSymbol(pos, env, symbol, symbol.tag)) {
env.scope.define(symbol.name, symbol);
}
}
/**
* Define a symbol that is unique only for the current scope.
*
* @param pos Line number information of the source file
* @param symbol Symbol to be defines
* @param env Environment to define the symbol
*/
public void defineShadowedSymbol(DiagnosticPos pos, BSymbol symbol, SymbolEnv env) {
symbol.scope = new Scope(symbol);
if (symResolver.checkForUniqueSymbolInCurrentScope(pos, env, symbol, symbol.tag)) {
env.scope.define(symbol.name, symbol);
}
}
private void defineSymbolWithCurrentEnvOwner(DiagnosticPos pos, BSymbol symbol) {
symbol.scope = new Scope(env.scope.owner);
if (symResolver.checkForUniqueSymbol(pos, env, symbol, symbol.tag)) {
env.scope.define(symbol.name, symbol);
}
}
public BVarSymbol defineVarSymbol(DiagnosticPos pos, Set<Flag> flagSet, BType varType, Name varName,
SymbolEnv env) {
Scope enclScope = env.scope;
BVarSymbol varSymbol = createVarSymbol(flagSet, varType, varName, env);
if (!symResolver.checkForUniqueSymbol(pos, env, varSymbol, SymTag.VARIABLE_NAME)) {
varSymbol.type = symTable.semanticError;
}
enclScope.define(varSymbol.name, varSymbol);
return varSymbol;
}
public BVarSymbol createVarSymbol(Set<Flag> flagSet, BType varType, Name varName, SymbolEnv env) {
return createVarSymbol(Flags.asMask(flagSet), varType, varName, env);
}
public BVarSymbol createVarSymbol(int flags, BType varType, Name varName, SymbolEnv env) {
BType safeType = types.getSafeType(varType, false);
BVarSymbol varSymbol;
if (safeType.tag == TypeTags.INVOKABLE) {
varSymbol = new BInvokableSymbol(SymTag.VARIABLE, flags, varName, env.enclPkg.symbol.pkgID, varType,
env.scope.owner);
varSymbol.kind = SymbolKind.FUNCTION;
} else {
varSymbol = new BVarSymbol(flags, varName, env.enclPkg.symbol.pkgID, varType, env.scope.owner);
if (varType.tsymbol != null && Symbols.isFlagOn(varType.tsymbol.flags, Flags.CLIENT)) {
varSymbol.tag = SymTag.ENDPOINT;
}
}
return varSymbol;
}
private void defineObjectInitFunction(BLangObjectTypeNode object, SymbolEnv conEnv) {
BLangFunction initFunction = object.initFunction;
if (initFunction == null) {
return;
}
initFunction.receiver = ASTBuilderUtil.createReceiver(object.pos, object.type);
initFunction.attachedFunction = true;
initFunction.flagSet.add(Flag.ATTACHED);
defineNode(initFunction, conEnv);
}
private void defineRecordInitFunction(BLangTypeDefinition typeDef, SymbolEnv conEnv) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode;
recordTypeNode.initFunction = ASTBuilderUtil.createInitFunction(typeDef.pos, "", Names.INIT_FUNCTION_SUFFIX);
recordTypeNode.initFunction.receiver = createReceiver(typeDef.pos, typeDef.name);
recordTypeNode.initFunction.attachedFunction = true;
recordTypeNode.initFunction.flagSet.add(Flag.ATTACHED);
defineNode(recordTypeNode.initFunction, conEnv);
}
private void defineAttachedFunctions(BLangFunction funcNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv, boolean isValidAttachedFunc) {
BTypeSymbol typeSymbol = funcNode.receiver.type.tsymbol;
if (isValidAttachedFunc) {
if (typeSymbol.tag == SymTag.OBJECT) {
validateFunctionsAttachedToObject(funcNode, funcSymbol, invokableEnv);
} else if (typeSymbol.tag == SymTag.RECORD) {
validateFunctionsAttachedToRecords(funcNode, funcSymbol, invokableEnv);
}
}
defineNode(funcNode.receiver, invokableEnv);
funcSymbol.receiverSymbol = funcNode.receiver.symbol;
}
private void validateFunctionsAttachedToRecords(BLangFunction funcNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
BInvokableType funcType = (BInvokableType) funcSymbol.type;
BRecordTypeSymbol recordSymbol = (BRecordTypeSymbol) funcNode.receiver.type.tsymbol;
recordSymbol.initializerFunc = new BAttachedFunction(
names.fromIdNode(funcNode.name), funcSymbol, funcType);
}
private void validateFunctionsAttachedToObject(BLangFunction funcNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
BInvokableType funcType = (BInvokableType) funcSymbol.type;
BObjectTypeSymbol objectSymbol = (BObjectTypeSymbol) funcNode.receiver.type.tsymbol;
BSymbol symbol = symResolver.lookupMemberSymbol(funcNode.receiver.pos, objectSymbol.scope, invokableEnv,
names.fromIdNode(funcNode.name), SymTag.VARIABLE);
if (symbol != symTable.notFoundSymbol) {
dlog.error(funcNode.pos, DiagnosticCode.OBJECT_FIELD_AND_FUNC_WITH_SAME_NAME,
funcNode.name.value, funcNode.receiver.type.toString());
return;
}
BAttachedFunction attachedFunc = new BAttachedFunction(
names.fromIdNode(funcNode.name), funcSymbol, funcType);
validateRemoteFunctionAttachedToObject(funcNode, objectSymbol);
validateResourceFunctionAttachedToObject(funcNode, objectSymbol);
if (!funcNode.objInitFunction) {
objectSymbol.attachedFuncs.add(attachedFunc);
return;
}
if (funcNode.returnTypeNode.type != symTable.nilType) {
dlog.error(funcNode.pos, DiagnosticCode.INVALID_OBJECT_CONSTRUCTOR,
funcNode.name.value, funcNode.receiver.type.toString());
}
objectSymbol.initializerFunc = attachedFunc;
}
private void validateRemoteFunctionAttachedToObject(BLangFunction funcNode, BObjectTypeSymbol objectSymbol) {
if (!Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.REMOTE)) {
return;
}
funcNode.symbol.flags |= Flags.REMOTE;
if (!Symbols.isFlagOn(objectSymbol.flags, Flags.CLIENT)) {
this.dlog.error(funcNode.pos, DiagnosticCode.REMOTE_FUNCTION_IN_NON_CLIENT_OBJECT);
}
}
private void validateResourceFunctionAttachedToObject(BLangFunction funcNode, BObjectTypeSymbol objectSymbol) {
if (!Symbols.isFlagOn(Flags.asMask(funcNode.flagSet), Flags.RESOURCE)) {
return;
}
funcNode.symbol.flags |= Flags.RESOURCE;
if (!Symbols.isFlagOn(objectSymbol.flags, Flags.SERVICE)) {
this.dlog.error(funcNode.pos, DiagnosticCode.RESOURCE_FUNCTION_IN_NON_SERVICE_OBJECT);
}
}
private StatementNode createAssignmentStmt(BLangSimpleVariable variable, BVarSymbol varSym, BSymbol fieldVar) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = (BLangIdentifier) createIdentifier(fieldVar.name.getValue());
varRef.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
varRef.symbol = fieldVar;
varRef.type = fieldVar.type;
BLangSimpleVarRef exprVar = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
exprVar.pos = variable.pos;
exprVar.variableName = (BLangIdentifier) createIdentifier(varSym.name.getValue());
exprVar.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
exprVar.symbol = varSym;
exprVar.type = varSym.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = exprVar;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangSimpleVariable createReceiver(DiagnosticPos pos, BLangIdentifier name) {
BLangSimpleVariable receiver = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
receiver.pos = pos;
IdentifierNode identifier = createIdentifier(Names.SELF.getValue());
receiver.setName(identifier);
BLangUserDefinedType structTypeNode = (BLangUserDefinedType) TreeBuilder.createUserDefinedTypeNode();
structTypeNode.pkgAlias = new BLangIdentifier();
structTypeNode.typeName = name;
receiver.setTypeNode(structTypeNode);
return receiver;
}
private IdentifierNode createIdentifier(String value) {
IdentifierNode node = TreeBuilder.createIdentifierNode();
if (value != null) {
node.setValue(value);
}
return node;
}
private boolean validateFuncReceiver(BLangFunction funcNode) {
if (funcNode.receiver == null) {
return true;
}
if (funcNode.receiver.type == null) {
funcNode.receiver.type = symResolver.resolveTypeNode(funcNode.receiver.typeNode, env);
}
if (funcNode.receiver.type.tag == TypeTags.SEMANTIC_ERROR) {
return true;
}
if (funcNode.receiver.type.tag != TypeTags.BOOLEAN
&& funcNode.receiver.type.tag != TypeTags.STRING
&& funcNode.receiver.type.tag != TypeTags.INT
&& funcNode.receiver.type.tag != TypeTags.FLOAT
&& funcNode.receiver.type.tag != TypeTags.DECIMAL
&& funcNode.receiver.type.tag != TypeTags.JSON
&& funcNode.receiver.type.tag != TypeTags.XML
&& funcNode.receiver.type.tag != TypeTags.MAP
&& funcNode.receiver.type.tag != TypeTags.TABLE
&& funcNode.receiver.type.tag != TypeTags.STREAM
&& funcNode.receiver.type.tag != TypeTags.FUTURE
&& funcNode.receiver.type.tag != TypeTags.OBJECT
&& funcNode.receiver.type.tag != TypeTags.RECORD) {
dlog.error(funcNode.receiver.pos, DiagnosticCode.FUNC_DEFINED_ON_NOT_SUPPORTED_TYPE,
funcNode.name.value, funcNode.receiver.type.toString());
return false;
}
if (!this.env.enclPkg.symbol.pkgID.equals(funcNode.receiver.type.tsymbol.pkgID)) {
dlog.error(funcNode.receiver.pos, DiagnosticCode.FUNC_DEFINED_ON_NON_LOCAL_TYPE,
funcNode.name.value, funcNode.receiver.type.toString());
return false;
}
return true;
}
private Name getFuncSymbolName(BLangFunction funcNode) {
if (funcNode.receiver != null) {
return names.fromString(Symbols.getAttachedFuncSymbolName(
funcNode.receiver.type.tsymbol.name.value, funcNode.name.value));
}
return names.fromIdNode(funcNode.name);
}
private Name getFieldSymbolName(BLangSimpleVariable receiver, BLangSimpleVariable variable) {
return names.fromString(Symbols.getAttachedFuncSymbolName(
receiver.type.tsymbol.name.value, variable.name.value));
}
private MarkdownDocAttachment getMarkdownDocAttachment(BLangMarkdownDocumentation docNode) {
if (docNode == null) {
return new MarkdownDocAttachment();
}
MarkdownDocAttachment docAttachment = new MarkdownDocAttachment();
docAttachment.description = docNode.getDocumentation();
docNode.getParameters().forEach(p ->
docAttachment.parameters.add(new MarkdownDocAttachment.Parameter(p.parameterName.value,
p.getParameterDocumentation())));
docAttachment.returnValueDescription = docNode.getReturnParameterDocumentation();
return docAttachment;
}
private void resolveReferencedFields(BLangStructureTypeNode structureTypeNode, SymbolEnv typeDefEnv) {
List<BSymbol> referencedTypes = new ArrayList<>();
structureTypeNode.referencedFields = structureTypeNode.typeRefs.stream().flatMap(typeRef -> {
BType referredType = symResolver.resolveTypeNode(typeRef, typeDefEnv);
if (referredType == symTable.semanticError) {
return Stream.empty();
}
if (referencedTypes.contains(referredType.tsymbol)) {
dlog.error(typeRef.pos, DiagnosticCode.REDECLARED_TYPE_REFERENCE, typeRef);
return Stream.empty();
}
if (structureTypeNode.type.tag == TypeTags.OBJECT && (referredType.tag != TypeTags.OBJECT || !Symbols
.isFlagOn(referredType.tsymbol.flags, Flags.ABSTRACT))) {
dlog.error(typeRef.pos, DiagnosticCode.INCOMPATIBLE_TYPE_REFERENCE, typeRef);
return Stream.empty();
}
if (structureTypeNode.type.tag == TypeTags.RECORD && referredType.tag != TypeTags.RECORD) {
dlog.error(typeRef.pos, DiagnosticCode.INCOMPATIBLE_RECORD_TYPE_REFERENCE, typeRef);
return Stream.empty();
}
referencedTypes.add(referredType.tsymbol);
return ((BStructureType) referredType).fields.stream().map(field -> {
BLangSimpleVariable var = ASTBuilderUtil.createVariable(typeRef.pos, field.name.value, field.type);
var.flagSet = field.symbol.getFlags();
return var;
});
}).collect(Collectors.toList());
}
private void defineReferencedFunction(BLangTypeDefinition typeDef, SymbolEnv objEnv, BLangType typeRef,
BAttachedFunction function) {
Name funcName = names.fromString(
Symbols.getAttachedFuncSymbolName(typeDef.symbol.name.value, function.funcName.value));
BSymbol foundSymbol = symResolver.lookupSymbol(objEnv, funcName, SymTag.VARIABLE);
if (foundSymbol != symTable.notFoundSymbol) {
if (Symbols.isFlagOn(foundSymbol.flags, Flags.INTERFACE) &&
Symbols.isFlagOn(function.symbol.flags, Flags.INTERFACE)) {
dlog.error(typeRef.pos, DiagnosticCode.REDECLARED_FUNCTION_FROM_TYPE_REFERENCE, function.funcName,
typeRef);
}
return;
}
BInvokableSymbol funcSymbol = ASTBuilderUtil.duplicateInvokableSymbol(function.symbol, typeDef.symbol, funcName,
typeDef.symbol.pkgID);
defineSymbol(typeRef.pos, funcSymbol, objEnv);
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(null, funcSymbol.scope, objEnv);
funcSymbol.params.forEach(param -> defineSymbol(typeRef.pos, param, funcEnv));
funcSymbol.defaultableParams.forEach(param -> defineSymbol(typeRef.pos, param, funcEnv));
if (funcSymbol.restParam != null) {
defineSymbol(typeRef.pos, funcSymbol.restParam, funcEnv);
}
funcSymbol.receiverSymbol =
defineVarSymbol(typeDef.pos, typeDef.flagSet, typeDef.symbol.type, Names.SELF, funcEnv);
BAttachedFunction attachedFunc =
new BAttachedFunction(function.funcName, funcSymbol, (BInvokableType) funcSymbol.type);
((BObjectTypeSymbol) typeDef.symbol).attachedFuncs.add(attachedFunc);
((BObjectTypeSymbol) typeDef.symbol).referencedFunctions.add(attachedFunc);
}
private void defineInitFunctionParam(BLangSimpleVariable varNode) {
Name varName = names.fromIdNode(varNode.name);
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) env.enclType;
BTypeSymbol objectTypeSumbol = objectTypeNode.type.tsymbol;
BSymbol fieldSymbol = symResolver.resolveObjectField(varNode.pos, env, varName, objectTypeSumbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(varNode.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD, varName,
env.enclType.type.getKind().typeName(), env.enclType.type.tsymbol.name);
}
varNode.type = fieldSymbol.type;
BVarSymbol paramSymbol;
if (fieldSymbol.kind == SymbolKind.FUNCTION) {
paramSymbol = ASTBuilderUtil.duplicateInvokableSymbol((BInvokableSymbol) fieldSymbol,
objectTypeNode.initFunction.symbol, fieldSymbol.name, objectTypeSumbol.pkgID);
} else {
paramSymbol = new BVarSymbol(Flags.asMask(varNode.flagSet), varName, env.enclPkg.symbol.pkgID, varNode.type,
env.scope.owner);
}
defineShadowedSymbol(varNode.pos, paramSymbol, env);
objectTypeNode.initFunction.initFunctionStmts.put(fieldSymbol,
(BLangStatement) createAssignmentStmt(varNode, paramSymbol, fieldSymbol));
varNode.symbol = paramSymbol;
return;
}
/**
* Used to store location data for encountered unknown types in `checkErrors` method.
*
* @since 0.985.0
*/
class LocationData {
private String name;
private int row;
private int column;
LocationData(String name, int row, int column) {
this.name = name;
this.row = row;
this.column = column;
}
@Override
public boolean equals(Object o) {
if (!(o instanceof LocationData)) {
return false;
}
LocationData data = (LocationData) o;
return name.equals(data.name) && row == data.row && column == data.column;
}
}
} |
@geoand is it the expected behavior? I'm surprised that RestEasy Reactive does not decode the path parameter. | void shouldDetermineUrlViaStorkWhenUsingTarget() throws URISyntaxException {
String greeting = ClientBuilder.newClient().target("stork:
.get(String.class);
assertThat(greeting).isEqualTo("Hello, World!");
greeting = ClientBuilder.newClient().target(new URI("stork:
assertThat(greeting).isEqualTo("Hello, World!");
greeting = ClientBuilder.newClient().target(UriBuilder.fromUri("stork:
.get(String.class);
assertThat(greeting).isEqualTo("Hello, World!");
greeting = ClientBuilder.newClient().target("stork:
.get(String.class);
assertThat(greeting).isEqualTo("Hello, big%20bird");
} | assertThat(greeting).isEqualTo("Hello, big%20bird"); | void shouldDetermineUrlViaStorkWhenUsingTarget() throws URISyntaxException {
String greeting = ClientBuilder.newClient().target("stork:
.get(String.class);
assertThat(greeting).isEqualTo("Hello, World!");
greeting = ClientBuilder.newClient().target(new URI("stork:
assertThat(greeting).isEqualTo("Hello, World!");
greeting = ClientBuilder.newClient().target(UriBuilder.fromUri("stork:
.get(String.class);
assertThat(greeting).isEqualTo("Hello, World!");
greeting = ClientBuilder.newClient().target("stork:
.get(String.class);
assertThat(greeting).isEqualTo("Hello, big bird");
} | class StorkIntegrationTest {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(HelloClient.class, HelloResource.class))
.withConfigurationResource("stork-application.properties");
@RestClient
HelloClient client;
@Test
void shouldDetermineUrlViaStork() {
String greeting = RestClientBuilder.newBuilder().baseUri(URI.create("stork:
.build(HelloClient.class)
.echo("black and white bird");
assertThat(greeting).isEqualTo("hello, black and white bird");
greeting = RestClientBuilder.newBuilder().baseUri(URI.create("stork:
.build(HelloClient.class)
.helloWithPathParam("black and white bird");
assertThat(greeting).isEqualTo("Hello, black%20and%20white%20bird");
}
@Test
@Test
void shouldDetermineUrlViaStorkCDI() {
String greeting = client.echo("big bird");
assertThat(greeting).isEqualTo("hello, big bird");
greeting = client.helloWithPathParam("big bird");
assertThat(greeting).isEqualTo("Hello, big%20bird");
}
@Test
@Timeout(20)
void shouldFailOnUnknownService() {
HelloClient client = RestClientBuilder.newBuilder()
.baseUri(URI.create("stork:
.build(HelloClient.class);
assertThatThrownBy(() -> client.echo("foo")).isInstanceOf(NoSuchServiceDefinitionException.class);
}
} | class StorkIntegrationTest {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addClasses(HelloClient.class, HelloResource.class))
.withConfigurationResource("stork-application.properties");
@RestClient
HelloClient client;
@Test
void shouldDetermineUrlViaStork() {
String greeting = RestClientBuilder.newBuilder().baseUri(URI.create("stork:
.build(HelloClient.class)
.echo("black and white bird");
assertThat(greeting).isEqualTo("hello, black and white bird");
greeting = RestClientBuilder.newBuilder().baseUri(URI.create("stork:
.build(HelloClient.class)
.helloWithPathParam("black and white bird");
assertThat(greeting).isEqualTo("Hello, black and white bird");
}
@Test
@Test
void shouldDetermineUrlViaStorkCDI() {
String greeting = client.echo("big bird");
assertThat(greeting).isEqualTo("hello, big bird");
greeting = client.helloWithPathParam("big bird");
assertThat(greeting).isEqualTo("Hello, big bird");
}
@Test
@Timeout(20)
void shouldFailOnUnknownService() {
HelloClient client = RestClientBuilder.newBuilder()
.baseUri(URI.create("stork:
.build(HelloClient.class);
assertThatThrownBy(() -> client.echo("foo")).isInstanceOf(NoSuchServiceDefinitionException.class);
}
} |
The only thing I ask you @mkouba to think about is that in logging, when receiving null in the handler, to send to the log and `Thread.currentThread().getStackTrace()` so that you can roughly understand which extension had an error. Please think about it and let me know | public StartupContext() {
ShutdownContext shutdownContext = new ShutdownContext() {
@Override
public void addShutdownTask(Runnable runnable) {
if (Objects.nonNull(runnable)) {
shutdownTasks.addFirst(runnable);
} else {
LOG.warn("Shutdown task turned out to be null!");
}
}
@Override
public void addLastShutdownTask(Runnable runnable) {
if (Objects.nonNull(runnable)) {
lastShutdownTasks.addFirst(runnable);
} else {
LOG.warn("Last shutdown task turned out to be null!");
}
}
};
values.put(ShutdownContext.class.getName(), shutdownContext);
values.put(RAW_COMMAND_LINE_ARGS, new Supplier<String[]>() {
@Override
public String[] get() {
if (commandLineArgs == null) {
throw new RuntimeException("Command line arguments not available during static init");
}
return commandLineArgs;
}
});
} | if (Objects.nonNull(runnable)) { | public StartupContext() {
ShutdownContext shutdownContext = new ShutdownContext() {
@Override
public void addShutdownTask(Runnable runnable) {
if (runnable != null) {
shutdownTasks.addFirst(runnable);
} else {
throw new IllegalArgumentException("Extension passed an invalid shutdown handler");
}
}
@Override
public void addLastShutdownTask(Runnable runnable) {
if (runnable != null) {
lastShutdownTasks.addFirst(runnable);
} else {
throw new IllegalArgumentException("Extension passed an invalid last shutdown handler");
}
}
};
values.put(ShutdownContext.class.getName(), shutdownContext);
values.put(RAW_COMMAND_LINE_ARGS, new Supplier<String[]>() {
@Override
public String[] get() {
if (commandLineArgs == null) {
throw new RuntimeException("Command line arguments not available during static init");
}
return commandLineArgs;
}
});
} | class StartupContext implements Closeable {
public static final String RAW_COMMAND_LINE_ARGS = StartupContext.class.getName() + ".raw-command-line-args";
private static final Logger LOG = Logger.getLogger(StartupContext.class);
private final Map<String, Object> values = new HashMap<>();
private Object lastValue;
private boolean lastValueSet = false;
private final Deque<Runnable> shutdownTasks = new ConcurrentLinkedDeque<>();
private final Deque<Runnable> lastShutdownTasks = new ConcurrentLinkedDeque<>();
private String[] commandLineArgs;
private String currentBuildStepName;
public void putValue(String name, Object value) {
values.put(name, value);
lastValueSet = true;
this.lastValue = value;
}
public Object getValue(String name) {
return values.get(name);
}
public Object getLastValue() {
return lastValue;
}
public boolean isLastValueSet() {
return lastValueSet;
}
@Override
public void close() {
runAll(shutdownTasks);
runAll(lastShutdownTasks);
}
private void runAll(Deque<Runnable> tasks) {
while (!tasks.isEmpty()) {
try {
var runnable = tasks.remove();
runnable.run();
} catch (Throwable ex) {
LOG.error("Running a shutdown task failed", ex);
}
}
}
@SuppressWarnings("unused")
public void setCommandLineArguments(String[] commandLineArguments) {
this.commandLineArgs = commandLineArguments;
}
@SuppressWarnings("unused")
public String getCurrentBuildStepName() {
return currentBuildStepName;
}
@SuppressWarnings("unused")
public void setCurrentBuildStepName(String currentBuildStepName) {
this.currentBuildStepName = currentBuildStepName;
}
} | class StartupContext implements Closeable {
public static final String RAW_COMMAND_LINE_ARGS = StartupContext.class.getName() + ".raw-command-line-args";
private static final Logger LOG = Logger.getLogger(StartupContext.class);
private final Map<String, Object> values = new HashMap<>();
private Object lastValue;
private boolean lastValueSet = false;
private final Deque<Runnable> shutdownTasks = new ConcurrentLinkedDeque<>();
private final Deque<Runnable> lastShutdownTasks = new ConcurrentLinkedDeque<>();
private String[] commandLineArgs;
private String currentBuildStepName;
public void putValue(String name, Object value) {
values.put(name, value);
lastValueSet = true;
this.lastValue = value;
}
public Object getValue(String name) {
return values.get(name);
}
public Object getLastValue() {
return lastValue;
}
public boolean isLastValueSet() {
return lastValueSet;
}
@Override
public void close() {
runAllAndClear(shutdownTasks);
runAllAndClear(lastShutdownTasks);
}
private void runAllAndClear(Deque<Runnable> tasks) {
while (!tasks.isEmpty()) {
try {
var runnable = tasks.remove();
runnable.run();
} catch (Throwable ex) {
LOG.error("Running a shutdown task failed", ex);
}
}
}
@SuppressWarnings("unused")
public void setCommandLineArguments(String[] commandLineArguments) {
this.commandLineArgs = commandLineArguments;
}
@SuppressWarnings("unused")
public String getCurrentBuildStepName() {
return currentBuildStepName;
}
@SuppressWarnings("unused")
public void setCurrentBuildStepName(String currentBuildStepName) {
this.currentBuildStepName = currentBuildStepName;
}
} |
The builder methods can be chained instead. ```java ServiceBusProcessorClient processor = new ServiceBusClientBuilder() .connectionString(connectionString) .processor() .queueName(queueName) .processMessage(System.out::println) .processError(context -> System.err.println(context.getErrorSource())) .buildProcessorClient(); ``` | public void instantiateProcessor() {
ServiceBusClientBuilder builder = new ServiceBusClientBuilder()
.connectionString(connectionString);
ServiceBusProcessorClient processor = builder
.processor()
.queueName(queueName)
.processMessage(System.out::println)
.processError(context -> System.err.println(context.getErrorSource()))
.buildProcessorClient();
processor.start();
processor.stop();
} | ServiceBusProcessorClient processor = builder | public void instantiateProcessor() {
ServiceBusClientBuilder builder = new ServiceBusClientBuilder()
.connectionString(connectionString);
ServiceBusProcessorClient processor = builder
.processor()
.queueName(queueName)
.processMessage(System.out::println)
.processError(context -> System.err.println(context.getErrorSource()))
.buildProcessorClient();
processor.start();
processor.stop();
} | class ServiceBusClientBuilderJavaDocCodeSamples {
String connectionString = System.getenv("AZURE_SERVICEBUS_NAMESPACE_CONNECTION_STRING");
String queueName = System.getenv("AZURE_SERVICEBUS_SAMPLE_QUEUE_NAME");
String topicName = System.getenv("AZURE_SERVICEBUS_SAMPLE_TOPIC_NAME");
String subscriptionName = System.getenv("AZURE_SERVICEBUS_SAMPLE_SUBSCRIPTION_NAME");
@Test
public void instantiateReceiverAsync() {
ServiceBusClientBuilder builder = new ServiceBusClientBuilder()
.connectionString(connectionString);
ServiceBusReceiverAsyncClient receiver = builder
.receiver()
.disableAutoComplete()
.topicName(topicName)
.subscriptionName(subscriptionName)
.buildAsyncClient();
receiver.receiveMessages().blockFirst(Duration.ofSeconds(1));
}
public void instantiateSessionReceiver() {
ServiceBusClientBuilder builder = new ServiceBusClientBuilder()
.connectionString(connectionString);
ServiceBusSessionReceiverAsyncClient sessionReceiver = builder
.sessionReceiver()
.receiveMode(ServiceBusReceiveMode.PEEK_LOCK)
.topicName(topicName)
.subscriptionName(subscriptionName)
.buildAsyncClient();
Mono<ServiceBusReceiverAsyncClient> receiverMono = sessionReceiver.acceptNextSession();
Disposable subscription = Flux.usingWhen(receiverMono,
receiver -> receiver.receiveMessages(),
receiver -> Mono.fromRunnable(receiver::close))
.subscribe(message -> System.out.println(message.getBody().toString()));
subscription.dispose();
}
@Test
public void instantiateSenderSync() {
ServiceBusClientBuilder builder = new ServiceBusClientBuilder()
.connectionString(connectionString);
ServiceBusSenderClient sender = builder
.sender()
.queueName(queueName)
.buildClient();
sender.sendMessage(new ServiceBusMessage("payload"));
}
@Test
@Test
public void connectionSharingAcrossClients() {
ServiceBusClientBuilder sharedConnectionBuilder = new ServiceBusClientBuilder()
.connectionString(connectionString);
ServiceBusReceiverClient receiver = sharedConnectionBuilder
.receiver()
.queueName(queueName)
.buildClient();
ServiceBusSenderClient sender = sharedConnectionBuilder
.sender()
.queueName(queueName)
.buildClient();
sender.sendMessage(new ServiceBusMessage("payload"));
receiver.receiveMessages(1);
}
} | class ServiceBusClientBuilderJavaDocCodeSamples {
String connectionString = System.getenv("AZURE_SERVICEBUS_NAMESPACE_CONNECTION_STRING");
String queueName = System.getenv("AZURE_SERVICEBUS_SAMPLE_QUEUE_NAME");
String topicName = System.getenv("AZURE_SERVICEBUS_SAMPLE_TOPIC_NAME");
String subscriptionName = System.getenv("AZURE_SERVICEBUS_SAMPLE_SUBSCRIPTION_NAME");
@Test
public void instantiateReceiverAsync() {
ServiceBusClientBuilder builder = new ServiceBusClientBuilder()
.connectionString(connectionString);
ServiceBusReceiverAsyncClient receiver = builder
.receiver()
.disableAutoComplete()
.topicName(topicName)
.subscriptionName(subscriptionName)
.buildAsyncClient();
receiver.receiveMessages().blockFirst(Duration.ofSeconds(1));
}
public void instantiateSessionReceiver() {
ServiceBusSessionReceiverAsyncClient sessionReceiver = new ServiceBusClientBuilder()
.connectionString(connectionString)
.sessionReceiver()
.receiveMode(ServiceBusReceiveMode.PEEK_LOCK)
.topicName(topicName)
.subscriptionName(subscriptionName)
.buildAsyncClient();
Mono<ServiceBusReceiverAsyncClient> receiverMono = sessionReceiver.acceptNextSession();
Flux.usingWhen(receiverMono,
receiver -> receiver.receiveMessages(),
receiver -> Mono.fromRunnable(receiver::close))
.subscribe(message -> System.out.println(message.getBody().toString()));
}
@Test
public void instantiateSenderSync() {
ServiceBusClientBuilder builder = new ServiceBusClientBuilder()
.connectionString(connectionString);
ServiceBusSenderClient sender = builder
.sender()
.queueName(queueName)
.buildClient();
sender.sendMessage(new ServiceBusMessage("payload"));
}
@Test
@Test
public void connectionSharingAcrossClients() {
ServiceBusClientBuilder sharedConnectionBuilder = new ServiceBusClientBuilder()
.connectionString(connectionString);
ServiceBusReceiverClient receiver = sharedConnectionBuilder
.receiver()
.queueName(queueName)
.buildClient();
ServiceBusSenderClient sender = sharedConnectionBuilder
.sender()
.queueName(queueName)
.buildClient();
sender.sendMessage(new ServiceBusMessage("payload"));
receiver.receiveMessages(1);
}
} |
Sounds reasonable. I'll change it. | DevConsoleRouteBuildItem handlePost() {
return new DevConsoleRouteBuildItem("config", "POST", new DevConsolePostHandler() {
@Override
protected void handlePost(RoutingContext event, MultiMap form) throws Exception {
String key = event.request().getFormAttribute("name");
String value = event.request().getFormAttribute("value");
List<Path> resourcesDir = DevConsoleManager.getHotReplacementContext().getResourcesDir();
if (resourcesDir.isEmpty()) {
throw new IllegalStateException("Unable to manage configurations - no resource directory found");
}
Path path = resourcesDir.get(0);
Path configPath = path.resolve("application.properties");
if (!Files.exists(configPath)) {
configPath = Files.createFile(path.resolve("application.properties"));
}
String profileKey = "%" + ProfileManager.getActiveProfile() + "." + key;
List<String> lines = Files.readAllLines(configPath);
int profileKeyLine = -1;
int generatedLine = -1;
for (int i = 0, linesSize = lines.size(); i < linesSize; i++) {
final String line = lines.get(i);
if (line.startsWith(profileKey)) {
profileKeyLine = i;
}
if (line.startsWith("
generatedLine = i;
}
if (profileKeyLine != -1 && generatedLine != -1) {
break;
}
}
if (profileKeyLine != -1) {
lines.set(profileKeyLine, profileKey + "=" + value);
} else if (generatedLine != -1) {
lines.add(generatedLine + 1, profileKey + "=" + value);
} else {
lines.add("");
lines.add("
lines.add(profileKey + "=" + value);
}
try (BufferedWriter writer = Files.newBufferedWriter(configPath)) {
for (String i : lines) {
writer.write(i);
writer.newLine();
}
}
DevConsoleManager.getHotReplacementContext().doScan(true);
flashMessage(event, "Configuration updated");
}
});
} | lines.add(generatedLine + 1, profileKey + "=" + value); | DevConsoleRouteBuildItem handlePost() {
return new DevConsoleRouteBuildItem("config", "POST", new DevConsolePostHandler() {
@Override
protected void handlePost(RoutingContext event, MultiMap form) throws Exception {
String name = event.request().getFormAttribute("name");
String value = event.request().getFormAttribute("value");
List<Path> resourcesDir = DevConsoleManager.getHotReplacementContext().getResourcesDir();
if (resourcesDir.isEmpty()) {
throw new IllegalStateException("Unable to manage configurations - no resource directory found");
}
Path path = resourcesDir.get(0);
Path configPath = path.resolve("application.properties");
if (!Files.exists(configPath)) {
configPath = Files.createFile(path.resolve("application.properties"));
}
String profile = ProfileManager.getActiveProfile();
name = !profile.equals(DEVELOPMENT.getDefaultProfile()) ? "%" + profile + "." + name : name;
List<String> lines = Files.readAllLines(configPath);
int nameLine = -1;
for (int i = 0, linesSize = lines.size(); i < linesSize; i++) {
final String line = lines.get(i);
if (line.startsWith(name + "=")) {
nameLine = i;
break;
}
}
if (nameLine != -1) {
if (value.isEmpty()) {
lines.remove(nameLine);
} else {
lines.set(nameLine, name + "=" + value);
}
} else {
if (!value.isEmpty()) {
lines.add(name + "=" + value);
}
}
try (BufferedWriter writer = Files.newBufferedWriter(configPath)) {
for (String i : lines) {
writer.write(i);
writer.newLine();
}
}
DevConsoleManager.getHotReplacementContext().doScan(true);
flashMessage(event, "Configuration updated");
}
});
} | class ConfigEditorProcessor {
@BuildStep(onlyIf = IsDevelopment.class)
@Record(ExecutionTime.RUNTIME_INIT)
public DevConsoleRuntimeTemplateInfoBuildItem config(ConfigRecorder recorder,
List<ConfigDescriptionBuildItem> configDescriptionBuildItems) {
List<ConfigDescription> configDescriptions = new ArrayList<>();
for (ConfigDescriptionBuildItem item : configDescriptionBuildItems) {
configDescriptions.add(
new ConfigDescription(item.getPropertyName(), item.getDocs(), item.getDefaultValue()));
}
return new DevConsoleRuntimeTemplateInfoBuildItem("config", new ConfigDescriptionsSupplier(configDescriptions));
}
@BuildStep
} | class ConfigEditorProcessor {
@BuildStep(onlyIf = IsDevelopment.class)
@Record(ExecutionTime.RUNTIME_INIT)
public DevConsoleRuntimeTemplateInfoBuildItem config(ConfigRecorder recorder,
List<ConfigDescriptionBuildItem> configDescriptionBuildItems) {
List<ConfigDescription> configDescriptions = new ArrayList<>();
for (ConfigDescriptionBuildItem item : configDescriptionBuildItems) {
configDescriptions.add(
new ConfigDescription(item.getPropertyName(), item.getDocs(), item.getDefaultValue()));
}
return new DevConsoleRuntimeTemplateInfoBuildItem("config", new ConfigDescriptionsSupplier(configDescriptions));
}
@BuildStep
} |
Why cannot change the parallelism of a job running in reactive mode | public static RestHandlerConfiguration fromConfiguration(Configuration configuration) {
final long refreshInterval = configuration.getLong(WebOptions.REFRESH_INTERVAL);
final int maxCheckpointStatisticCacheEntries =
configuration.getInteger(WebOptions.CHECKPOINTS_HISTORY_SIZE);
final Time timeout = Time.milliseconds(configuration.getLong(WebOptions.TIMEOUT));
final String rootDir = "flink-web-ui";
final File webUiDir = new File(configuration.getString(WebOptions.TMP_DIR), rootDir);
final boolean webSubmitEnabled = configuration.getBoolean(WebOptions.SUBMIT_ENABLE);
final boolean webCancelEnabled = configuration.getBoolean(WebOptions.CANCEL_ENABLE);
final boolean webRescaleSupported =
ClusterOptions.isAdaptiveSchedulerEnabled(configuration)
&& !ClusterOptions.isReactiveModeEnabled(configuration);
final boolean webRescaleEnabled =
webRescaleSupported && configuration.getBoolean(WebOptions.RESCALE_ENABLE);
return new RestHandlerConfiguration(
refreshInterval,
maxCheckpointStatisticCacheEntries,
timeout,
webUiDir,
webSubmitEnabled,
webCancelEnabled,
webRescaleEnabled);
} | && !ClusterOptions.isReactiveModeEnabled(configuration); | public static RestHandlerConfiguration fromConfiguration(Configuration configuration) {
final long refreshInterval = configuration.getLong(WebOptions.REFRESH_INTERVAL);
final int maxCheckpointStatisticCacheEntries =
configuration.getInteger(WebOptions.CHECKPOINTS_HISTORY_SIZE);
final Time timeout = Time.milliseconds(configuration.getLong(WebOptions.TIMEOUT));
final String rootDir = "flink-web-ui";
final File webUiDir = new File(configuration.getString(WebOptions.TMP_DIR), rootDir);
final boolean webSubmitEnabled = configuration.getBoolean(WebOptions.SUBMIT_ENABLE);
final boolean webCancelEnabled = configuration.getBoolean(WebOptions.CANCEL_ENABLE);
final boolean webRescaleSupported =
ClusterOptions.isAdaptiveSchedulerEnabled(configuration)
&& !ClusterOptions.isReactiveModeEnabled(configuration);
final boolean webRescaleEnabled =
webRescaleSupported && configuration.getBoolean(WebOptions.RESCALE_ENABLE);
return new RestHandlerConfiguration(
refreshInterval,
maxCheckpointStatisticCacheEntries,
timeout,
webUiDir,
webSubmitEnabled,
webCancelEnabled,
webRescaleEnabled);
} | class RestHandlerConfiguration {
private final long refreshInterval;
private final int maxCheckpointStatisticCacheEntries;
private final Time timeout;
private final File webUiDir;
private final boolean webSubmitEnabled;
private final boolean webCancelEnabled;
private final boolean webRescaleEnabled;
public RestHandlerConfiguration(
long refreshInterval,
int maxCheckpointStatisticCacheEntries,
Time timeout,
File webUiDir,
boolean webSubmitEnabled,
boolean webCancelEnabled,
boolean webRescaleEnabled) {
Preconditions.checkArgument(
refreshInterval > 0L, "The refresh interval (ms) should be larger than 0.");
this.refreshInterval = refreshInterval;
this.maxCheckpointStatisticCacheEntries = maxCheckpointStatisticCacheEntries;
this.timeout = Preconditions.checkNotNull(timeout);
this.webUiDir = Preconditions.checkNotNull(webUiDir);
this.webSubmitEnabled = webSubmitEnabled;
this.webCancelEnabled = webCancelEnabled;
this.webRescaleEnabled = webRescaleEnabled;
}
public long getRefreshInterval() {
return refreshInterval;
}
public int getMaxCheckpointStatisticCacheEntries() {
return maxCheckpointStatisticCacheEntries;
}
public Time getTimeout() {
return timeout;
}
public File getWebUiDir() {
return webUiDir;
}
public boolean isWebSubmitEnabled() {
return webSubmitEnabled;
}
public boolean isWebCancelEnabled() {
return webCancelEnabled;
}
public boolean isWebRescaleEnabled() {
return webRescaleEnabled;
}
} | class RestHandlerConfiguration {
private final long refreshInterval;
private final int maxCheckpointStatisticCacheEntries;
private final Time timeout;
private final File webUiDir;
private final boolean webSubmitEnabled;
private final boolean webCancelEnabled;
private final boolean webRescaleEnabled;
public RestHandlerConfiguration(
long refreshInterval,
int maxCheckpointStatisticCacheEntries,
Time timeout,
File webUiDir,
boolean webSubmitEnabled,
boolean webCancelEnabled,
boolean webRescaleEnabled) {
Preconditions.checkArgument(
refreshInterval > 0L, "The refresh interval (ms) should be larger than 0.");
this.refreshInterval = refreshInterval;
this.maxCheckpointStatisticCacheEntries = maxCheckpointStatisticCacheEntries;
this.timeout = Preconditions.checkNotNull(timeout);
this.webUiDir = Preconditions.checkNotNull(webUiDir);
this.webSubmitEnabled = webSubmitEnabled;
this.webCancelEnabled = webCancelEnabled;
this.webRescaleEnabled = webRescaleEnabled;
}
public long getRefreshInterval() {
return refreshInterval;
}
public int getMaxCheckpointStatisticCacheEntries() {
return maxCheckpointStatisticCacheEntries;
}
public Time getTimeout() {
return timeout;
}
public File getWebUiDir() {
return webUiDir;
}
public boolean isWebSubmitEnabled() {
return webSubmitEnabled;
}
public boolean isWebCancelEnabled() {
return webCancelEnabled;
}
public boolean isWebRescaleEnabled() {
return webRescaleEnabled;
}
} |
Okay, thanks for the hint. Changed | void assertFind() {
ShardingRuleConfiguration ruleConfig = new ShardingRuleConfiguration();
ShardingTableRuleConfiguration shardingTableRuleConfiguration = getShardingTableRuleConfiguration();
Map<String, AlgorithmConfiguration> allAlgorithms = getAlgorithms();
ruleConfig.getTables().add(shardingTableRuleConfiguration);
ruleConfig.getShardingAlgorithms().putAll(allAlgorithms);
ruleConfig.setDefaultDatabaseShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_DATABASE_SHARDING_DEFAULT_ALGORITHM));
ruleConfig.setDefaultTableShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_TABLE_SHARDING_DEFAULT_ALGORITHM));
Collection<String> unusedAlgorithmsCollection = UnusedAlgorithmFinder.find(ruleConfig);
assertNotNull(unusedAlgorithmsCollection);
assertThat(unusedAlgorithmsCollection.size(), is(1));
assertTrue(unusedAlgorithmsCollection.contains(UNUSED_ALGORITHM));
} | Map<String, AlgorithmConfiguration> allAlgorithms = getAlgorithms(); | void assertFind() {
ShardingRuleConfiguration ruleConfig = new ShardingRuleConfiguration();
ShardingTableRuleConfiguration shardingTableRuleConfig = getShardingTableRuleConfiguration();
ruleConfig.getTables().add(shardingTableRuleConfig);
ruleConfig.getShardingAlgorithms().putAll(getAlgorithms());
ruleConfig.setDefaultDatabaseShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_DATABASE_SHARDING_DEFAULT_ALGORITHM));
ruleConfig.setDefaultTableShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_TABLE_SHARDING_DEFAULT_ALGORITHM));
Collection<String> actual = UnusedAlgorithmFinder.find(ruleConfig);
assertNotNull(actual);
assertThat(actual.size(), is(1));
assertTrue(actual.contains(UNUSED_ALGORITHM));
} | class UnusedAlgorithmFinderTest {
private static final String USED_TABLE_SHARDING_ALGORITHM = "used_table_sharding_algorithm";
private static final String USED_TABLE_SHARDING_DEFAULT_ALGORITHM = "used_table_sharding_default_algorithm";
private static final String USED_DATABASE_SHARDING_ALGORITHM = "used_database_sharding_algorithm";
private static final String USED_DATABASE_SHARDING_DEFAULT_ALGORITHM = "used_database_sharding_default_algorithm";
private static final String UNUSED_ALGORITHM = "unused_algorithm";
@Test
private ShardingTableRuleConfiguration getShardingTableRuleConfiguration() {
ShardingTableRuleConfiguration shardingTableRuleConfiguration = new ShardingTableRuleConfiguration("t_order", null);
shardingTableRuleConfiguration.setTableShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_TABLE_SHARDING_ALGORITHM));
shardingTableRuleConfiguration.setDatabaseShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_DATABASE_SHARDING_ALGORITHM));
return shardingTableRuleConfiguration;
}
private Map<String, AlgorithmConfiguration> getAlgorithms() {
return ImmutableMap.of(
USED_DATABASE_SHARDING_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()),
USED_DATABASE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()),
USED_TABLE_SHARDING_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()),
USED_TABLE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()),
UNUSED_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()));
}
} | class UnusedAlgorithmFinderTest {
private static final String USED_TABLE_SHARDING_ALGORITHM = "used_table_sharding_algorithm";
private static final String USED_TABLE_SHARDING_DEFAULT_ALGORITHM = "used_table_sharding_default_algorithm";
private static final String USED_DATABASE_SHARDING_ALGORITHM = "used_database_sharding_algorithm";
private static final String USED_DATABASE_SHARDING_DEFAULT_ALGORITHM = "used_database_sharding_default_algorithm";
private static final String UNUSED_ALGORITHM = "unused_algorithm";
@Test
private ShardingTableRuleConfiguration getShardingTableRuleConfiguration() {
ShardingTableRuleConfiguration result = new ShardingTableRuleConfiguration("t_order", null);
result.setTableShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_TABLE_SHARDING_ALGORITHM));
result.setDatabaseShardingStrategy(new StandardShardingStrategyConfiguration("order_id", USED_DATABASE_SHARDING_ALGORITHM));
return result;
}
private Map<String, AlgorithmConfiguration> getAlgorithms() {
return ImmutableMap.of(
USED_DATABASE_SHARDING_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()),
USED_DATABASE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()),
USED_TABLE_SHARDING_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()),
USED_TABLE_SHARDING_DEFAULT_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()),
UNUSED_ALGORITHM, new AlgorithmConfiguration("INLINE", new Properties()));
}
} |
@lirui-apache I thought that `SHOW PARTITIONS` is DQL. I will modify `getDQLOpExecuteErrorMsg` to `getDDLOpExecuteErrorMsg`. | private TableResult executeOperation(Operation operation) {
if (operation instanceof ModifyOperation) {
return executeInternal(Collections.singletonList((ModifyOperation) operation));
} else if (operation instanceof CreateTableOperation) {
CreateTableOperation createTableOperation = (CreateTableOperation) operation;
if (createTableOperation.isTemporary()) {
catalogManager.createTemporaryTable(
createTableOperation.getCatalogTable(),
createTableOperation.getTableIdentifier(),
createTableOperation.isIgnoreIfExists());
} else {
catalogManager.createTable(
createTableOperation.getCatalogTable(),
createTableOperation.getTableIdentifier(),
createTableOperation.isIgnoreIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof DropTableOperation) {
DropTableOperation dropTableOperation = (DropTableOperation) operation;
if (dropTableOperation.isTemporary()) {
catalogManager.dropTemporaryTable(
dropTableOperation.getTableIdentifier(),
dropTableOperation.isIfExists());
} else {
catalogManager.dropTable(
dropTableOperation.getTableIdentifier(),
dropTableOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof AlterTableOperation) {
AlterTableOperation alterTableOperation = (AlterTableOperation) operation;
Catalog catalog = getCatalogOrThrowException(alterTableOperation.getTableIdentifier().getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterTableOperation.asSummaryString());
try {
if (alterTableOperation instanceof AlterTableRenameOperation) {
AlterTableRenameOperation alterTableRenameOp = (AlterTableRenameOperation) operation;
catalog.renameTable(
alterTableRenameOp.getTableIdentifier().toObjectPath(),
alterTableRenameOp.getNewTableIdentifier().getObjectName(),
false);
} else if (alterTableOperation instanceof AlterTablePropertiesOperation) {
AlterTablePropertiesOperation alterTablePropertiesOp = (AlterTablePropertiesOperation) operation;
catalog.alterTable(
alterTablePropertiesOp.getTableIdentifier().toObjectPath(),
alterTablePropertiesOp.getCatalogTable(),
false);
} else if (alterTableOperation instanceof AlterTableAddConstraintOperation){
AlterTableAddConstraintOperation addConstraintOP =
(AlterTableAddConstraintOperation) operation;
CatalogTable oriTable = (CatalogTable) catalogManager
.getTable(addConstraintOP.getTableIdentifier())
.get()
.getTable();
TableSchema.Builder builder = TableSchemaUtils
.builderWithGivenSchema(oriTable.getSchema());
if (addConstraintOP.getConstraintName().isPresent()) {
builder.primaryKey(
addConstraintOP.getConstraintName().get(),
addConstraintOP.getColumnNames());
} else {
builder.primaryKey(addConstraintOP.getColumnNames());
}
CatalogTable newTable = new CatalogTableImpl(
builder.build(),
oriTable.getPartitionKeys(),
oriTable.getOptions(),
oriTable.getComment());
catalog.alterTable(
addConstraintOP.getTableIdentifier().toObjectPath(),
newTable,
false);
} else if (alterTableOperation instanceof AlterTableDropConstraintOperation){
AlterTableDropConstraintOperation dropConstraintOperation =
(AlterTableDropConstraintOperation) operation;
CatalogTable oriTable = (CatalogTable) catalogManager
.getTable(dropConstraintOperation.getTableIdentifier())
.get()
.getTable();
CatalogTable newTable = new CatalogTableImpl(
TableSchemaUtils.dropConstraint(
oriTable.getSchema(),
dropConstraintOperation.getConstraintName()),
oriTable.getPartitionKeys(),
oriTable.getOptions(),
oriTable.getComment());
catalog.alterTable(
dropConstraintOperation.getTableIdentifier().toObjectPath(),
newTable,
false);
} else if (alterTableOperation instanceof AlterPartitionPropertiesOperation) {
AlterPartitionPropertiesOperation alterPartPropsOp = (AlterPartitionPropertiesOperation) operation;
catalog.alterPartition(alterPartPropsOp.getTableIdentifier().toObjectPath(),
alterPartPropsOp.getPartitionSpec(),
alterPartPropsOp.getCatalogPartition(),
false);
} else if (alterTableOperation instanceof AlterTableSchemaOperation) {
AlterTableSchemaOperation alterTableSchemaOperation = (AlterTableSchemaOperation) alterTableOperation;
catalog.alterTable(alterTableSchemaOperation.getTableIdentifier().toObjectPath(),
alterTableSchemaOperation.getCatalogTable(),
false);
} else if (alterTableOperation instanceof AddPartitionsOperation) {
AddPartitionsOperation addPartitionsOperation = (AddPartitionsOperation) alterTableOperation;
List<CatalogPartitionSpec> specs = addPartitionsOperation.getPartitionSpecs();
List<CatalogPartition> partitions = addPartitionsOperation.getCatalogPartitions();
boolean ifNotExists = addPartitionsOperation.ifNotExists();
ObjectPath tablePath = addPartitionsOperation.getTableIdentifier().toObjectPath();
for (int i = 0; i < specs.size(); i++) {
catalog.createPartition(tablePath, specs.get(i), partitions.get(i), ifNotExists);
}
} else if (alterTableOperation instanceof DropPartitionsOperation) {
DropPartitionsOperation dropPartitionsOperation = (DropPartitionsOperation) alterTableOperation;
ObjectPath tablePath = dropPartitionsOperation.getTableIdentifier().toObjectPath();
boolean ifExists = dropPartitionsOperation.ifExists();
for (CatalogPartitionSpec spec : dropPartitionsOperation.getPartitionSpecs()) {
catalog.dropPartition(tablePath, spec, ifExists);
}
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (TableAlreadyExistException | TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateViewOperation) {
CreateViewOperation createViewOperation = (CreateViewOperation) operation;
if (createViewOperation.isTemporary()) {
catalogManager.createTemporaryTable(
createViewOperation.getCatalogView(),
createViewOperation.getViewIdentifier(),
createViewOperation.isIgnoreIfExists());
} else {
catalogManager.createTable(
createViewOperation.getCatalogView(),
createViewOperation.getViewIdentifier(),
createViewOperation.isIgnoreIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof DropViewOperation) {
DropViewOperation dropViewOperation = (DropViewOperation) operation;
if (dropViewOperation.isTemporary()) {
catalogManager.dropTemporaryView(
dropViewOperation.getViewIdentifier(),
dropViewOperation.isIfExists());
} else {
catalogManager.dropView(
dropViewOperation.getViewIdentifier(),
dropViewOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof AlterViewOperation) {
AlterViewOperation alterViewOperation = (AlterViewOperation) operation;
Catalog catalog = getCatalogOrThrowException(alterViewOperation.getViewIdentifier().getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterViewOperation.asSummaryString());
try {
if (alterViewOperation instanceof AlterViewRenameOperation) {
AlterViewRenameOperation alterTableRenameOp = (AlterViewRenameOperation) operation;
catalog.renameTable(
alterTableRenameOp.getViewIdentifier().toObjectPath(),
alterTableRenameOp.getNewViewIdentifier().getObjectName(),
false);
} else if (alterViewOperation instanceof AlterViewPropertiesOperation) {
AlterViewPropertiesOperation alterTablePropertiesOp = (AlterViewPropertiesOperation) operation;
catalog.alterTable(
alterTablePropertiesOp.getViewIdentifier().toObjectPath(),
alterTablePropertiesOp.getCatalogView(),
false);
} else if (alterViewOperation instanceof AlterViewAsOperation) {
AlterViewAsOperation alterViewAsOperation = (AlterViewAsOperation) alterViewOperation;
catalog.alterTable(alterViewAsOperation.getViewIdentifier().toObjectPath(),
alterViewAsOperation.getNewView(),
false);
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (TableAlreadyExistException | TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateDatabaseOperation) {
CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(createDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(createDatabaseOperation.asSummaryString());
try {
catalog.createDatabase(
createDatabaseOperation.getDatabaseName(),
createDatabaseOperation.getCatalogDatabase(),
createDatabaseOperation.isIgnoreIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseAlreadyExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof DropDatabaseOperation) {
DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(dropDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(dropDatabaseOperation.asSummaryString());
try {
catalog.dropDatabase(
dropDatabaseOperation.getDatabaseName(),
dropDatabaseOperation.isIfExists(),
dropDatabaseOperation.isCascade());
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseNotExistException | DatabaseNotEmptyException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof AlterDatabaseOperation) {
AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(alterDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterDatabaseOperation.asSummaryString());
try {
catalog.alterDatabase(
alterDatabaseOperation.getDatabaseName(),
alterDatabaseOperation.getCatalogDatabase(),
false);
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateCatalogFunctionOperation) {
return createCatalogFunction((CreateCatalogFunctionOperation) operation);
} else if (operation instanceof CreateTempSystemFunctionOperation) {
return createSystemFunction((CreateTempSystemFunctionOperation) operation);
} else if (operation instanceof DropCatalogFunctionOperation) {
return dropCatalogFunction((DropCatalogFunctionOperation) operation);
} else if (operation instanceof DropTempSystemFunctionOperation) {
return dropSystemFunction((DropTempSystemFunctionOperation) operation);
} else if (operation instanceof AlterCatalogFunctionOperation) {
return alterCatalogFunction((AlterCatalogFunctionOperation) operation);
} else if (operation instanceof CreateCatalogOperation) {
return createCatalog((CreateCatalogOperation) operation);
} else if (operation instanceof DropCatalogOperation) {
DropCatalogOperation dropCatalogOperation = (DropCatalogOperation) operation;
String exMsg = getDDLOpExecuteErrorMsg(dropCatalogOperation.asSummaryString());
try {
catalogManager.unregisterCatalog(dropCatalogOperation.getCatalogName(),
dropCatalogOperation.isIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (CatalogException e) {
throw new ValidationException(exMsg, e);
}
} else if (operation instanceof UseCatalogOperation) {
UseCatalogOperation useCatalogOperation = (UseCatalogOperation) operation;
catalogManager.setCurrentCatalog(useCatalogOperation.getCatalogName());
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof UseDatabaseOperation) {
UseDatabaseOperation useDatabaseOperation = (UseDatabaseOperation) operation;
catalogManager.setCurrentCatalog(useDatabaseOperation.getCatalogName());
catalogManager.setCurrentDatabase(useDatabaseOperation.getDatabaseName());
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof ShowCatalogsOperation) {
return buildShowResult("catalog name", listCatalogs());
} else if (operation instanceof ShowCurrentCatalogOperation){
return buildShowResult("current catalog name", new String[]{catalogManager.getCurrentCatalog()});
} else if (operation instanceof ShowDatabasesOperation) {
return buildShowResult("database name", listDatabases());
} else if (operation instanceof ShowCurrentDatabaseOperation) {
return buildShowResult("current database name", new String[]{catalogManager.getCurrentDatabase()});
} else if (operation instanceof ShowTablesOperation) {
return buildShowResult("table name", listTables());
} else if (operation instanceof ShowFunctionsOperation) {
return buildShowResult("function name", listFunctions());
} else if (operation instanceof ShowViewsOperation) {
return buildShowResult("view name", listViews());
} else if (operation instanceof ShowPartitionsOperation) {
String exMsg = getDQLOpExecuteErrorMsg(operation.asSummaryString());
try {
ShowPartitionsOperation showPartitionsOperation = (ShowPartitionsOperation) operation;
Catalog catalog = getCatalogOrThrowException(showPartitionsOperation.getTableIdentifier().getCatalogName());
ObjectPath tablePath = showPartitionsOperation.getTableIdentifier().toObjectPath();
CatalogPartitionSpec partitionSpec = showPartitionsOperation.getPartitionSpec();
List<CatalogPartitionSpec> partitionSpecs = partitionSpec == null ? catalog.listPartitions(tablePath) : catalog.listPartitions(tablePath, partitionSpec);
List<String> partitionNames = new ArrayList<>(partitionSpecs.size());
for (CatalogPartitionSpec spec: partitionSpecs) {
List<String> partitionKVs = new ArrayList<>(spec.getPartitionSpec().size());
for (Map.Entry<String, String> partitionKV: spec.getPartitionSpec().entrySet()) {
partitionKVs.add(partitionKV.getKey() + "=" + partitionKV.getValue());
}
partitionNames.add(String.join("/", partitionKVs));
}
return buildShowResult("partition name", partitionNames.toArray(new String[0]));
} catch (TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof ExplainOperation) {
String explanation = planner.explain(Collections.singletonList(((ExplainOperation) operation).getChild()));
return TableResultImpl.builder()
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.tableSchema(TableSchema.builder().field("result", DataTypes.STRING()).build())
.data(Collections.singletonList(Row.of(explanation)))
.setPrintStyle(TableResultImpl.PrintStyle.rawContent())
.build();
} else if (operation instanceof DescribeTableOperation) {
DescribeTableOperation describeTableOperation = (DescribeTableOperation) operation;
Optional<CatalogManager.TableLookupResult> result =
catalogManager.getTable(describeTableOperation.getSqlIdentifier());
if (result.isPresent()) {
return buildDescribeResult(result.get().getResolvedSchema());
} else {
throw new ValidationException(String.format(
"Tables or views with the identifier '%s' doesn't exist",
describeTableOperation.getSqlIdentifier().asSummaryString()));
}
} else if (operation instanceof QueryOperation) {
return executeInternal((QueryOperation) operation);
} else {
throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG);
}
} | String exMsg = getDQLOpExecuteErrorMsg(operation.asSummaryString()); | private TableResult executeOperation(Operation operation) {
if (operation instanceof ModifyOperation) {
return executeInternal(Collections.singletonList((ModifyOperation) operation));
} else if (operation instanceof CreateTableOperation) {
CreateTableOperation createTableOperation = (CreateTableOperation) operation;
if (createTableOperation.isTemporary()) {
catalogManager.createTemporaryTable(
createTableOperation.getCatalogTable(),
createTableOperation.getTableIdentifier(),
createTableOperation.isIgnoreIfExists());
} else {
catalogManager.createTable(
createTableOperation.getCatalogTable(),
createTableOperation.getTableIdentifier(),
createTableOperation.isIgnoreIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof DropTableOperation) {
DropTableOperation dropTableOperation = (DropTableOperation) operation;
if (dropTableOperation.isTemporary()) {
catalogManager.dropTemporaryTable(
dropTableOperation.getTableIdentifier(),
dropTableOperation.isIfExists());
} else {
catalogManager.dropTable(
dropTableOperation.getTableIdentifier(),
dropTableOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof AlterTableOperation) {
AlterTableOperation alterTableOperation = (AlterTableOperation) operation;
Catalog catalog = getCatalogOrThrowException(alterTableOperation.getTableIdentifier().getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterTableOperation.asSummaryString());
try {
if (alterTableOperation instanceof AlterTableRenameOperation) {
AlterTableRenameOperation alterTableRenameOp = (AlterTableRenameOperation) operation;
catalog.renameTable(
alterTableRenameOp.getTableIdentifier().toObjectPath(),
alterTableRenameOp.getNewTableIdentifier().getObjectName(),
false);
} else if (alterTableOperation instanceof AlterTablePropertiesOperation) {
AlterTablePropertiesOperation alterTablePropertiesOp = (AlterTablePropertiesOperation) operation;
catalog.alterTable(
alterTablePropertiesOp.getTableIdentifier().toObjectPath(),
alterTablePropertiesOp.getCatalogTable(),
false);
} else if (alterTableOperation instanceof AlterTableAddConstraintOperation){
AlterTableAddConstraintOperation addConstraintOP =
(AlterTableAddConstraintOperation) operation;
CatalogTable oriTable = (CatalogTable) catalogManager
.getTable(addConstraintOP.getTableIdentifier())
.get()
.getTable();
TableSchema.Builder builder = TableSchemaUtils
.builderWithGivenSchema(oriTable.getSchema());
if (addConstraintOP.getConstraintName().isPresent()) {
builder.primaryKey(
addConstraintOP.getConstraintName().get(),
addConstraintOP.getColumnNames());
} else {
builder.primaryKey(addConstraintOP.getColumnNames());
}
CatalogTable newTable = new CatalogTableImpl(
builder.build(),
oriTable.getPartitionKeys(),
oriTable.getOptions(),
oriTable.getComment());
catalog.alterTable(
addConstraintOP.getTableIdentifier().toObjectPath(),
newTable,
false);
} else if (alterTableOperation instanceof AlterTableDropConstraintOperation){
AlterTableDropConstraintOperation dropConstraintOperation =
(AlterTableDropConstraintOperation) operation;
CatalogTable oriTable = (CatalogTable) catalogManager
.getTable(dropConstraintOperation.getTableIdentifier())
.get()
.getTable();
CatalogTable newTable = new CatalogTableImpl(
TableSchemaUtils.dropConstraint(
oriTable.getSchema(),
dropConstraintOperation.getConstraintName()),
oriTable.getPartitionKeys(),
oriTable.getOptions(),
oriTable.getComment());
catalog.alterTable(
dropConstraintOperation.getTableIdentifier().toObjectPath(),
newTable,
false);
} else if (alterTableOperation instanceof AlterPartitionPropertiesOperation) {
AlterPartitionPropertiesOperation alterPartPropsOp = (AlterPartitionPropertiesOperation) operation;
catalog.alterPartition(alterPartPropsOp.getTableIdentifier().toObjectPath(),
alterPartPropsOp.getPartitionSpec(),
alterPartPropsOp.getCatalogPartition(),
false);
} else if (alterTableOperation instanceof AlterTableSchemaOperation) {
AlterTableSchemaOperation alterTableSchemaOperation = (AlterTableSchemaOperation) alterTableOperation;
catalog.alterTable(alterTableSchemaOperation.getTableIdentifier().toObjectPath(),
alterTableSchemaOperation.getCatalogTable(),
false);
} else if (alterTableOperation instanceof AddPartitionsOperation) {
AddPartitionsOperation addPartitionsOperation = (AddPartitionsOperation) alterTableOperation;
List<CatalogPartitionSpec> specs = addPartitionsOperation.getPartitionSpecs();
List<CatalogPartition> partitions = addPartitionsOperation.getCatalogPartitions();
boolean ifNotExists = addPartitionsOperation.ifNotExists();
ObjectPath tablePath = addPartitionsOperation.getTableIdentifier().toObjectPath();
for (int i = 0; i < specs.size(); i++) {
catalog.createPartition(tablePath, specs.get(i), partitions.get(i), ifNotExists);
}
} else if (alterTableOperation instanceof DropPartitionsOperation) {
DropPartitionsOperation dropPartitionsOperation = (DropPartitionsOperation) alterTableOperation;
ObjectPath tablePath = dropPartitionsOperation.getTableIdentifier().toObjectPath();
boolean ifExists = dropPartitionsOperation.ifExists();
for (CatalogPartitionSpec spec : dropPartitionsOperation.getPartitionSpecs()) {
catalog.dropPartition(tablePath, spec, ifExists);
}
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (TableAlreadyExistException | TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateViewOperation) {
CreateViewOperation createViewOperation = (CreateViewOperation) operation;
if (createViewOperation.isTemporary()) {
catalogManager.createTemporaryTable(
createViewOperation.getCatalogView(),
createViewOperation.getViewIdentifier(),
createViewOperation.isIgnoreIfExists());
} else {
catalogManager.createTable(
createViewOperation.getCatalogView(),
createViewOperation.getViewIdentifier(),
createViewOperation.isIgnoreIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof DropViewOperation) {
DropViewOperation dropViewOperation = (DropViewOperation) operation;
if (dropViewOperation.isTemporary()) {
catalogManager.dropTemporaryView(
dropViewOperation.getViewIdentifier(),
dropViewOperation.isIfExists());
} else {
catalogManager.dropView(
dropViewOperation.getViewIdentifier(),
dropViewOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof AlterViewOperation) {
AlterViewOperation alterViewOperation = (AlterViewOperation) operation;
Catalog catalog = getCatalogOrThrowException(alterViewOperation.getViewIdentifier().getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterViewOperation.asSummaryString());
try {
if (alterViewOperation instanceof AlterViewRenameOperation) {
AlterViewRenameOperation alterTableRenameOp = (AlterViewRenameOperation) operation;
catalog.renameTable(
alterTableRenameOp.getViewIdentifier().toObjectPath(),
alterTableRenameOp.getNewViewIdentifier().getObjectName(),
false);
} else if (alterViewOperation instanceof AlterViewPropertiesOperation) {
AlterViewPropertiesOperation alterTablePropertiesOp = (AlterViewPropertiesOperation) operation;
catalog.alterTable(
alterTablePropertiesOp.getViewIdentifier().toObjectPath(),
alterTablePropertiesOp.getCatalogView(),
false);
} else if (alterViewOperation instanceof AlterViewAsOperation) {
AlterViewAsOperation alterViewAsOperation = (AlterViewAsOperation) alterViewOperation;
catalog.alterTable(alterViewAsOperation.getViewIdentifier().toObjectPath(),
alterViewAsOperation.getNewView(),
false);
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (TableAlreadyExistException | TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateDatabaseOperation) {
CreateDatabaseOperation createDatabaseOperation = (CreateDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(createDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(createDatabaseOperation.asSummaryString());
try {
catalog.createDatabase(
createDatabaseOperation.getDatabaseName(),
createDatabaseOperation.getCatalogDatabase(),
createDatabaseOperation.isIgnoreIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseAlreadyExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof DropDatabaseOperation) {
DropDatabaseOperation dropDatabaseOperation = (DropDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(dropDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(dropDatabaseOperation.asSummaryString());
try {
catalog.dropDatabase(
dropDatabaseOperation.getDatabaseName(),
dropDatabaseOperation.isIfExists(),
dropDatabaseOperation.isCascade());
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseNotExistException | DatabaseNotEmptyException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof AlterDatabaseOperation) {
AlterDatabaseOperation alterDatabaseOperation = (AlterDatabaseOperation) operation;
Catalog catalog = getCatalogOrThrowException(alterDatabaseOperation.getCatalogName());
String exMsg = getDDLOpExecuteErrorMsg(alterDatabaseOperation.asSummaryString());
try {
catalog.alterDatabase(
alterDatabaseOperation.getDatabaseName(),
alterDatabaseOperation.getCatalogDatabase(),
false);
return TableResultImpl.TABLE_RESULT_OK;
} catch (DatabaseNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof CreateCatalogFunctionOperation) {
return createCatalogFunction((CreateCatalogFunctionOperation) operation);
} else if (operation instanceof CreateTempSystemFunctionOperation) {
return createSystemFunction((CreateTempSystemFunctionOperation) operation);
} else if (operation instanceof DropCatalogFunctionOperation) {
return dropCatalogFunction((DropCatalogFunctionOperation) operation);
} else if (operation instanceof DropTempSystemFunctionOperation) {
return dropSystemFunction((DropTempSystemFunctionOperation) operation);
} else if (operation instanceof AlterCatalogFunctionOperation) {
return alterCatalogFunction((AlterCatalogFunctionOperation) operation);
} else if (operation instanceof CreateCatalogOperation) {
return createCatalog((CreateCatalogOperation) operation);
} else if (operation instanceof DropCatalogOperation) {
DropCatalogOperation dropCatalogOperation = (DropCatalogOperation) operation;
String exMsg = getDDLOpExecuteErrorMsg(dropCatalogOperation.asSummaryString());
try {
catalogManager.unregisterCatalog(dropCatalogOperation.getCatalogName(),
dropCatalogOperation.isIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (CatalogException e) {
throw new ValidationException(exMsg, e);
}
} else if (operation instanceof UseCatalogOperation) {
UseCatalogOperation useCatalogOperation = (UseCatalogOperation) operation;
catalogManager.setCurrentCatalog(useCatalogOperation.getCatalogName());
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof UseDatabaseOperation) {
UseDatabaseOperation useDatabaseOperation = (UseDatabaseOperation) operation;
catalogManager.setCurrentCatalog(useDatabaseOperation.getCatalogName());
catalogManager.setCurrentDatabase(useDatabaseOperation.getDatabaseName());
return TableResultImpl.TABLE_RESULT_OK;
} else if (operation instanceof ShowCatalogsOperation) {
return buildShowResult("catalog name", listCatalogs());
} else if (operation instanceof ShowCurrentCatalogOperation){
return buildShowResult("current catalog name", new String[]{catalogManager.getCurrentCatalog()});
} else if (operation instanceof ShowDatabasesOperation) {
return buildShowResult("database name", listDatabases());
} else if (operation instanceof ShowCurrentDatabaseOperation) {
return buildShowResult("current database name", new String[]{catalogManager.getCurrentDatabase()});
} else if (operation instanceof ShowTablesOperation) {
return buildShowResult("table name", listTables());
} else if (operation instanceof ShowFunctionsOperation) {
return buildShowResult("function name", listFunctions());
} else if (operation instanceof ShowViewsOperation) {
return buildShowResult("view name", listViews());
} else if (operation instanceof ShowPartitionsOperation) {
String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
ShowPartitionsOperation showPartitionsOperation = (ShowPartitionsOperation) operation;
Catalog catalog = getCatalogOrThrowException(showPartitionsOperation.getTableIdentifier().getCatalogName());
ObjectPath tablePath = showPartitionsOperation.getTableIdentifier().toObjectPath();
CatalogPartitionSpec partitionSpec = showPartitionsOperation.getPartitionSpec();
List<CatalogPartitionSpec> partitionSpecs = partitionSpec == null ? catalog.listPartitions(tablePath) : catalog.listPartitions(tablePath, partitionSpec);
List<String> partitionNames = new ArrayList<>(partitionSpecs.size());
for (CatalogPartitionSpec spec: partitionSpecs) {
List<String> partitionKVs = new ArrayList<>(spec.getPartitionSpec().size());
for (Map.Entry<String, String> partitionKV: spec.getPartitionSpec().entrySet()) {
partitionKVs.add(partitionKV.getKey() + "=" + partitionKV.getValue());
}
partitionNames.add(String.join("/", partitionKVs));
}
return buildShowResult("partition name", partitionNames.toArray(new String[0]));
} catch (TableNotExistException e) {
throw new ValidationException(exMsg, e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
} else if (operation instanceof ExplainOperation) {
String explanation = planner.explain(Collections.singletonList(((ExplainOperation) operation).getChild()));
return TableResultImpl.builder()
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.tableSchema(TableSchema.builder().field("result", DataTypes.STRING()).build())
.data(Collections.singletonList(Row.of(explanation)))
.setPrintStyle(TableResultImpl.PrintStyle.rawContent())
.build();
} else if (operation instanceof DescribeTableOperation) {
DescribeTableOperation describeTableOperation = (DescribeTableOperation) operation;
Optional<CatalogManager.TableLookupResult> result =
catalogManager.getTable(describeTableOperation.getSqlIdentifier());
if (result.isPresent()) {
return buildDescribeResult(result.get().getResolvedSchema());
} else {
throw new ValidationException(String.format(
"Tables or views with the identifier '%s' doesn't exist",
describeTableOperation.getSqlIdentifier().asSummaryString()));
}
} else if (operation instanceof QueryOperation) {
return executeInternal((QueryOperation) operation);
} else {
throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG);
}
} | class TableEnvironmentImpl implements TableEnvironmentInternal {
private static final boolean IS_STREAM_TABLE = true;
private final CatalogManager catalogManager;
private final ModuleManager moduleManager;
private final OperationTreeBuilder operationTreeBuilder;
private final List<ModifyOperation> bufferedModifyOperations = new ArrayList<>();
protected final TableConfig tableConfig;
protected final Executor execEnv;
protected final FunctionCatalog functionCatalog;
protected final Planner planner;
protected final Parser parser;
private final boolean isStreamingMode;
private final ClassLoader userClassLoader;
private static final String UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG =
"Unsupported SQL query! sqlUpdate() only accepts a single SQL statement of type " +
"INSERT, CREATE TABLE, DROP TABLE, ALTER TABLE, USE CATALOG, USE [CATALOG.]DATABASE, " +
"CREATE DATABASE, DROP DATABASE, ALTER DATABASE, CREATE FUNCTION, DROP FUNCTION, ALTER FUNCTION, " +
"CREATE CATALOG, DROP CATALOG, CREATE VIEW, DROP VIEW.";
private static final String UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG =
"Unsupported SQL query! executeSql() only accepts a single SQL statement of type " +
"CREATE TABLE, DROP TABLE, ALTER TABLE, CREATE DATABASE, DROP DATABASE, ALTER DATABASE, " +
"CREATE FUNCTION, DROP FUNCTION, ALTER FUNCTION, CREATE CATALOG, DROP CATALOG, " +
"USE CATALOG, USE [CATALOG.]DATABASE, SHOW CATALOGS, SHOW DATABASES, SHOW TABLES, SHOW FUNCTIONS, SHOW PARTITIONS" +
"CREATE VIEW, DROP VIEW, SHOW VIEWS, INSERT, DESCRIBE.";
/**
* Provides necessary methods for {@link ConnectTableDescriptor}.
*/
private final Registration registration = new Registration() {
@Override
public void createTemporaryTable(String path, CatalogBaseTable table) {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(
unresolvedIdentifier);
catalogManager.createTemporaryTable(table, objectIdentifier, false);
}
};
protected TableEnvironmentImpl(
CatalogManager catalogManager,
ModuleManager moduleManager,
TableConfig tableConfig,
Executor executor,
FunctionCatalog functionCatalog,
Planner planner,
boolean isStreamingMode,
ClassLoader userClassLoader) {
this.catalogManager = catalogManager;
this.catalogManager.setCatalogTableSchemaResolver(
new CatalogTableSchemaResolver(planner.getParser(), isStreamingMode));
this.moduleManager = moduleManager;
this.execEnv = executor;
this.tableConfig = tableConfig;
this.functionCatalog = functionCatalog;
this.planner = planner;
this.parser = planner.getParser();
this.isStreamingMode = isStreamingMode;
this.userClassLoader = userClassLoader;
this.operationTreeBuilder = OperationTreeBuilder.create(
tableConfig,
functionCatalog.asLookup(parser::parseIdentifier),
catalogManager.getDataTypeFactory(),
path -> {
try {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
Optional<CatalogQueryOperation> catalogQueryOperation = scanInternal(unresolvedIdentifier);
return catalogQueryOperation.map(t -> ApiExpressionUtils.tableRef(path, t));
} catch (SqlParserException ex) {
return Optional.empty();
}
},
isStreamingMode
);
}
public static TableEnvironmentImpl create(EnvironmentSettings settings) {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
TableConfig tableConfig = new TableConfig();
ModuleManager moduleManager = new ModuleManager();
CatalogManager catalogManager = CatalogManager.newBuilder()
.classLoader(classLoader)
.config(tableConfig.getConfiguration())
.defaultCatalog(
settings.getBuiltInCatalogName(),
new GenericInMemoryCatalog(
settings.getBuiltInCatalogName(),
settings.getBuiltInDatabaseName()))
.build();
FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager);
Map<String, String> executorProperties = settings.toExecutorProperties();
Executor executor = ComponentFactoryService.find(ExecutorFactory.class, executorProperties)
.create(executorProperties);
Map<String, String> plannerProperties = settings.toPlannerProperties();
Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
.create(
plannerProperties,
executor,
tableConfig,
functionCatalog,
catalogManager);
return new TableEnvironmentImpl(
catalogManager,
moduleManager,
tableConfig,
executor,
functionCatalog,
planner,
settings.isStreamingMode(),
classLoader
);
}
@Override
public Table fromValues(Object... values) {
return fromValues(Arrays.asList(values));
}
@Override
public Table fromValues(AbstractDataType<?> rowType, Object... values) {
return fromValues(rowType, Arrays.asList(values));
}
@Override
public Table fromValues(Expression... values) {
return createTable(operationTreeBuilder.values(values));
}
@Override
public Table fromValues(AbstractDataType<?> rowType, Expression... values) {
final DataType resolvedDataType = catalogManager.getDataTypeFactory().createDataType(rowType);
return createTable(operationTreeBuilder.values(resolvedDataType, values));
}
@Override
public Table fromValues(Iterable<?> values) {
Expression[] exprs = StreamSupport.stream(values.spliterator(), false)
.map(ApiExpressionUtils::objectToExpression)
.toArray(Expression[]::new);
return fromValues(exprs);
}
@Override
public Table fromValues(AbstractDataType<?> rowType, Iterable<?> values) {
Expression[] exprs = StreamSupport.stream(values.spliterator(), false)
.map(ApiExpressionUtils::objectToExpression)
.toArray(Expression[]::new);
return fromValues(rowType, exprs);
}
@VisibleForTesting
public Planner getPlanner() {
return planner;
}
@Override
public Table fromTableSource(TableSource<?> source) {
return createTable(new TableSourceQueryOperation<>(source, !IS_STREAM_TABLE));
}
@Override
public void registerCatalog(String catalogName, Catalog catalog) {
catalogManager.registerCatalog(catalogName, catalog);
}
@Override
public Optional<Catalog> getCatalog(String catalogName) {
return catalogManager.getCatalog(catalogName);
}
@Override
public void loadModule(String moduleName, Module module) {
moduleManager.loadModule(moduleName, module);
}
@Override
public void unloadModule(String moduleName) {
moduleManager.unloadModule(moduleName);
}
@Override
public void registerFunction(String name, ScalarFunction function) {
functionCatalog.registerTempSystemScalarFunction(
name,
function);
}
@Override
public void createTemporarySystemFunction(String name, Class<? extends UserDefinedFunction> functionClass) {
final UserDefinedFunction functionInstance = UserDefinedFunctionHelper.instantiateFunction(functionClass);
createTemporarySystemFunction(name, functionInstance);
}
@Override
public void createTemporarySystemFunction(String name, UserDefinedFunction functionInstance) {
functionCatalog.registerTemporarySystemFunction(
name,
functionInstance,
false);
}
@Override
public boolean dropTemporarySystemFunction(String name) {
return functionCatalog.dropTemporarySystemFunction(
name,
true);
}
@Override
public void createFunction(String path, Class<? extends UserDefinedFunction> functionClass) {
createFunction(path, functionClass, false);
}
@Override
public void createFunction(String path, Class<? extends UserDefinedFunction> functionClass, boolean ignoreIfExists) {
final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
functionCatalog.registerCatalogFunction(
unresolvedIdentifier,
functionClass,
ignoreIfExists);
}
@Override
public boolean dropFunction(String path) {
final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
return functionCatalog.dropCatalogFunction(
unresolvedIdentifier,
true);
}
@Override
public void createTemporaryFunction(String path, Class<? extends UserDefinedFunction> functionClass) {
final UserDefinedFunction functionInstance = UserDefinedFunctionHelper.instantiateFunction(functionClass);
createTemporaryFunction(path, functionInstance);
}
@Override
public void createTemporaryFunction(String path, UserDefinedFunction functionInstance) {
final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
functionCatalog.registerTemporaryCatalogFunction(
unresolvedIdentifier,
functionInstance,
false);
}
@Override
public boolean dropTemporaryFunction(String path) {
final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
return functionCatalog.dropTemporaryCatalogFunction(
unresolvedIdentifier,
true);
}
@Override
public void registerTable(String name, Table table) {
UnresolvedIdentifier identifier = UnresolvedIdentifier.of(name);
createTemporaryView(identifier, table);
}
@Override
public void createTemporaryView(String path, Table view) {
UnresolvedIdentifier identifier = parser.parseIdentifier(path);
createTemporaryView(identifier, view);
}
private void createTemporaryView(UnresolvedIdentifier identifier, Table view) {
if (((TableImpl) view).getTableEnvironment() != this) {
throw new TableException(
"Only table API objects that belong to this TableEnvironment can be registered.");
}
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(identifier);
QueryOperation queryOperation = qualifyQueryOperation(tableIdentifier, view.getQueryOperation());
CatalogBaseTable tableTable = new QueryOperationCatalogView(queryOperation);
catalogManager.createTemporaryTable(tableTable, tableIdentifier, false);
}
@Override
public Table scan(String... tablePath) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(tablePath);
return scanInternal(unresolvedIdentifier)
.map(this::createTable)
.orElseThrow(() -> new ValidationException(String.format(
"Table %s was not found.",
unresolvedIdentifier)));
}
@Override
public Table from(String path) {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
return scanInternal(unresolvedIdentifier)
.map(this::createTable)
.orElseThrow(() -> new ValidationException(String.format(
"Table %s was not found.",
unresolvedIdentifier)));
}
@Override
public void insertInto(String targetPath, Table table) {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(targetPath);
insertIntoInternal(unresolvedIdentifier, table);
}
@Override
public void insertInto(Table table, String sinkPath, String... sinkPathContinued) {
List<String> fullPath = new ArrayList<>(Arrays.asList(sinkPathContinued));
fullPath.add(0, sinkPath);
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(fullPath);
insertIntoInternal(unresolvedIdentifier, table);
}
private void insertIntoInternal(UnresolvedIdentifier unresolvedIdentifier, Table table) {
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
List<ModifyOperation> modifyOperations = Collections.singletonList(
new CatalogSinkModifyOperation(
objectIdentifier,
table.getQueryOperation()));
buffer(modifyOperations);
}
private Optional<CatalogQueryOperation> scanInternal(UnresolvedIdentifier identifier) {
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(identifier);
return catalogManager.getTable(tableIdentifier)
.map(t -> new CatalogQueryOperation(tableIdentifier, t.getResolvedSchema()));
}
@Override
public ConnectTableDescriptor connect(ConnectorDescriptor connectorDescriptor) {
return new StreamTableDescriptor(registration, connectorDescriptor);
}
@Override
public String[] listCatalogs() {
return catalogManager.listCatalogs()
.stream()
.sorted()
.toArray(String[]::new);
}
@Override
public String[] listModules() {
return moduleManager.listModules().toArray(new String[0]);
}
@Override
public String[] listDatabases() {
return catalogManager.getCatalog(catalogManager.getCurrentCatalog())
.get()
.listDatabases()
.toArray(new String[0]);
}
@Override
public String[] listTables() {
return catalogManager.listTables()
.stream()
.sorted()
.toArray(String[]::new);
}
@Override
public String[] listViews() {
return catalogManager.listViews()
.stream()
.sorted()
.toArray(String[]::new);
}
@Override
public String[] listTemporaryTables() {
return catalogManager.listTemporaryTables()
.stream()
.sorted()
.toArray(String[]::new);
}
@Override
public String[] listTemporaryViews() {
return catalogManager.listTemporaryViews()
.stream()
.sorted()
.toArray(String[]::new);
}
@Override
public boolean dropTemporaryTable(String path) {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
try {
catalogManager.dropTemporaryTable(identifier, false);
return true;
} catch (ValidationException e) {
return false;
}
}
@Override
public boolean dropTemporaryView(String path) {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
try {
catalogManager.dropTemporaryView(identifier, false);
return true;
} catch (ValidationException e) {
return false;
}
}
@Override
public String[] listUserDefinedFunctions() {
return functionCatalog.getUserDefinedFunctions();
}
@Override
public String[] listFunctions() {
return functionCatalog.getFunctions();
}
@Override
public String explain(Table table) {
return explain(table, false);
}
@Override
public String explain(Table table, boolean extended) {
return planner.explain(Collections.singletonList(table.getQueryOperation()), getExplainDetails(extended));
}
@Override
public String explain(boolean extended) {
List<Operation> operations = bufferedModifyOperations.stream()
.map(o -> (Operation) o).collect(Collectors.toList());
return planner.explain(operations, getExplainDetails(extended));
}
@Override
public String explainSql(String statement, ExplainDetail... extraDetails) {
List<Operation> operations = parser.parse(statement);
if (operations.size() != 1) {
throw new TableException("Unsupported SQL query! explainSql() only accepts a single SQL query.");
}
return planner.explain(operations, extraDetails);
}
@Override
public String explainInternal(List<Operation> operations, ExplainDetail... extraDetails) {
return planner.explain(operations, extraDetails);
}
@Override
public String[] getCompletionHints(String statement, int position) {
return planner.getCompletionHints(statement, position);
}
@Override
public Table sqlQuery(String query) {
List<Operation> operations = parser.parse(query);
if (operations.size() != 1) {
throw new ValidationException(
"Unsupported SQL query! sqlQuery() only accepts a single SQL query.");
}
Operation operation = operations.get(0);
if (operation instanceof QueryOperation && !(operation instanceof ModifyOperation)) {
return createTable((QueryOperation) operation);
} else {
throw new ValidationException(
"Unsupported SQL query! sqlQuery() only accepts a single SQL query of type " +
"SELECT, UNION, INTERSECT, EXCEPT, VALUES, and ORDER_BY.");
}
}
@Override
public TableResult executeSql(String statement) {
List<Operation> operations = parser.parse(statement);
if (operations.size() != 1) {
throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG);
}
return executeOperation(operations.get(0));
}
@Override
public StatementSet createStatementSet() {
return new StatementSetImpl(this);
}
@Override
public TableResult executeInternal(List<ModifyOperation> operations) {
List<Transformation<?>> transformations = translate(operations);
List<String> sinkIdentifierNames = extractSinkIdentifierNames(operations);
String jobName = "insert-into_" + String.join(",", sinkIdentifierNames);
Pipeline pipeline = execEnv.createPipeline(transformations, tableConfig, jobName);
try {
JobClient jobClient = execEnv.executeAsync(pipeline);
TableSchema.Builder builder = TableSchema.builder();
Object[] affectedRowCounts = new Long[operations.size()];
for (int i = 0; i < operations.size(); ++i) {
builder.field(sinkIdentifierNames.get(i), DataTypes.BIGINT());
affectedRowCounts[i] = -1L;
}
return TableResultImpl.builder()
.jobClient(jobClient)
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.tableSchema(builder.build())
.data(Collections.singletonList(Row.of(affectedRowCounts)))
.build();
} catch (Exception e) {
throw new TableException("Failed to execute sql", e);
}
}
@Override
public TableResult executeInternal(QueryOperation operation) {
SelectSinkOperation sinkOperation = new SelectSinkOperation(operation);
List<Transformation<?>> transformations = translate(Collections.singletonList(sinkOperation));
Pipeline pipeline = execEnv.createPipeline(transformations, tableConfig, "collect");
try {
JobClient jobClient = execEnv.executeAsync(pipeline);
SelectResultProvider resultProvider = sinkOperation.getSelectResultProvider();
resultProvider.setJobClient(jobClient);
return TableResultImpl.builder()
.jobClient(jobClient)
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.tableSchema(operation.getTableSchema())
.data(resultProvider.getResultIterator())
.setPrintStyle(TableResultImpl.PrintStyle.tableau(
PrintUtils.MAX_COLUMN_WIDTH, PrintUtils.NULL_COLUMN, true, isStreamingMode))
.build();
} catch (Exception e) {
throw new TableException("Failed to execute sql", e);
}
}
@Override
public void sqlUpdate(String stmt) {
List<Operation> operations = parser.parse(stmt);
if (operations.size() != 1) {
throw new TableException(UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG);
}
Operation operation = operations.get(0);
if (operation instanceof ModifyOperation) {
buffer(Collections.singletonList((ModifyOperation) operation));
} else if (operation instanceof CreateTableOperation ||
operation instanceof DropTableOperation ||
operation instanceof AlterTableOperation ||
operation instanceof CreateViewOperation ||
operation instanceof DropViewOperation ||
operation instanceof CreateDatabaseOperation ||
operation instanceof DropDatabaseOperation ||
operation instanceof AlterDatabaseOperation ||
operation instanceof CreateCatalogFunctionOperation ||
operation instanceof CreateTempSystemFunctionOperation ||
operation instanceof DropCatalogFunctionOperation ||
operation instanceof DropTempSystemFunctionOperation ||
operation instanceof AlterCatalogFunctionOperation ||
operation instanceof CreateCatalogOperation ||
operation instanceof DropCatalogOperation ||
operation instanceof UseCatalogOperation ||
operation instanceof UseDatabaseOperation) {
executeOperation(operation);
} else {
throw new TableException(UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG);
}
}
private TableResult createCatalog(CreateCatalogOperation operation) {
String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
String catalogName = operation.getCatalogName();
Map<String, String> properties = operation.getProperties();
final CatalogFactory factory = TableFactoryService.find(
CatalogFactory.class,
properties,
userClassLoader);
Catalog catalog = factory.createCatalog(catalogName, properties);
catalogManager.registerCatalog(catalogName, catalog);
return TableResultImpl.TABLE_RESULT_OK;
} catch (CatalogException e) {
throw new ValidationException(exMsg, e);
}
}
private TableResult buildShowResult(String columnName, String[] objects) {
return buildResult(
new String[]{columnName},
new DataType[]{DataTypes.STRING()},
Arrays.stream(objects).map((c) -> new String[]{c}).toArray(String[][]::new));
}
private TableResult buildDescribeResult(TableSchema schema) {
Map<String, String> fieldToWatermark =
schema.getWatermarkSpecs()
.stream()
.collect(Collectors.toMap(WatermarkSpec::getRowtimeAttribute, WatermarkSpec::getWatermarkExpr));
Map<String, String> fieldToPrimaryKey = new HashMap<>();
schema.getPrimaryKey().ifPresent((p) -> {
List<String> columns = p.getColumns();
columns.forEach((c) -> fieldToPrimaryKey.put(c, String.format("PRI(%s)", String.join(", ", columns))));
});
Object[][] rows =
schema.getTableColumns()
.stream()
.map((c) -> {
LogicalType logicalType = c.getType().getLogicalType();
return new Object[]{
c.getName(),
StringUtils.removeEnd(logicalType.toString(), " NOT NULL"),
logicalType.isNullable(),
fieldToPrimaryKey.getOrDefault(c.getName(), null),
c.getExpr().orElse(null),
fieldToWatermark.getOrDefault(c.getName(), null)};
}).toArray(Object[][]::new);
return buildResult(
new String[]{"name", "type", "null", "key", "computed column", "watermark"},
new DataType[]{DataTypes.STRING(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING()},
rows);
}
private TableResult buildResult(String[] headers, DataType[] types, Object[][] rows) {
return TableResultImpl.builder()
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.tableSchema(
TableSchema.builder().fields(
headers,
types).build())
.data(Arrays.stream(rows).map(Row::of).collect(Collectors.toList()))
.setPrintStyle(TableResultImpl.PrintStyle.tableau(Integer.MAX_VALUE, "", false, false))
.build();
}
/**
* extract sink identifier names from {@link ModifyOperation}s.
*
* <p>If there are multiple ModifyOperations have same name,
* an index suffix will be added at the end of the name to ensure each name is unique.
*/
private List<String> extractSinkIdentifierNames(List<ModifyOperation> operations) {
List<String> tableNames = new ArrayList<>(operations.size());
Map<String, Integer> tableNameToCount = new HashMap<>();
for (ModifyOperation operation : operations) {
if (operation instanceof CatalogSinkModifyOperation) {
ObjectIdentifier identifier = ((CatalogSinkModifyOperation) operation).getTableIdentifier();
String fullName = identifier.asSummaryString();
tableNames.add(fullName);
tableNameToCount.put(fullName, tableNameToCount.getOrDefault(fullName, 0) + 1);
} else {
throw new UnsupportedOperationException("Unsupported operation: " + operation);
}
}
Map<String, Integer> tableNameToIndex = new HashMap<>();
return tableNames.stream().map(tableName -> {
if (tableNameToCount.get(tableName) == 1) {
return tableName;
} else {
Integer index = tableNameToIndex.getOrDefault(tableName, 0) + 1;
tableNameToIndex.put(tableName, index);
return tableName + "_" + index;
}
}
).collect(Collectors.toList());
}
/** Get catalog from catalogName or throw a ValidationException if the catalog not exists. */
private Catalog getCatalogOrThrowException(String catalogName) {
return getCatalog(catalogName)
.orElseThrow(() -> new ValidationException(String.format("Catalog %s does not exist", catalogName)));
}
private String getDDLOpExecuteErrorMsg(String action) {
return String.format("Could not execute %s", action);
}
private String getDQLOpExecuteErrorMsg(String action) {
return String.format("Could not execute %s", action);
}
@Override
public String getCurrentCatalog() {
return catalogManager.getCurrentCatalog();
}
@Override
public void useCatalog(String catalogName) {
catalogManager.setCurrentCatalog(catalogName);
}
@Override
public String getCurrentDatabase() {
return catalogManager.getCurrentDatabase();
}
@Override
public void useDatabase(String databaseName) {
catalogManager.setCurrentDatabase(databaseName);
}
@Override
public TableConfig getConfig() {
return tableConfig;
}
@Override
public JobExecutionResult execute(String jobName) throws Exception {
Pipeline pipeline = execEnv.createPipeline(translateAndClearBuffer(), tableConfig, jobName);
return execEnv.execute(pipeline);
}
@Override
public Parser getParser() {
return parser;
}
@Override
public CatalogManager getCatalogManager() {
return catalogManager;
}
/**
* Subclasses can override this method to transform the given QueryOperation to a new one with
* the qualified object identifier. This is needed for some QueryOperations, e.g. JavaDataStreamQueryOperation,
* which doesn't know the registered identifier when created ({@code fromDataStream(DataStream)}.
* But the identifier is required when converting this QueryOperation to RelNode.
*/
protected QueryOperation qualifyQueryOperation(ObjectIdentifier identifier, QueryOperation queryOperation) {
return queryOperation;
}
/**
* Subclasses can override this method to add additional checks.
*
* @param tableSource tableSource to validate
*/
protected void validateTableSource(TableSource<?> tableSource) {
TableSourceValidation.validateTableSource(tableSource, tableSource.getTableSchema());
}
/**
* Translate the buffered operations to Transformations, and clear the buffer.
*
* <p>The buffer will be clear even if the `translate` fails. In most cases,
* the failure is not retryable (e.g. type mismatch, can't generate physical plan).
* If the buffer is not clear after failure, the following `translate` will also fail.
*/
protected List<Transformation<?>> translateAndClearBuffer() {
List<Transformation<?>> transformations;
try {
transformations = translate(bufferedModifyOperations);
} finally {
bufferedModifyOperations.clear();
}
return transformations;
}
private List<Transformation<?>> translate(List<ModifyOperation> modifyOperations) {
return planner.translate(modifyOperations);
}
private void buffer(List<ModifyOperation> modifyOperations) {
bufferedModifyOperations.addAll(modifyOperations);
}
@VisibleForTesting
protected ExplainDetail[] getExplainDetails(boolean extended) {
if (extended) {
if (isStreamingMode) {
return new ExplainDetail[] { ExplainDetail.ESTIMATED_COST, ExplainDetail.CHANGELOG_MODE };
} else {
return new ExplainDetail[] { ExplainDetail.ESTIMATED_COST };
}
} else {
return new ExplainDetail[0];
}
}
@Override
public void registerTableSourceInternal(String name, TableSource<?> tableSource) {
validateTableSource(tableSource);
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier);
if (table.isPresent()) {
if (table.get() instanceof ConnectorCatalogTable<?, ?>) {
ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get();
if (sourceSinkTable.getTableSource().isPresent()) {
throw new ValidationException(String.format(
"Table '%s' already exists. Please choose a different name.", name));
} else {
ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink(
tableSource,
sourceSinkTable.getTableSink().get(),
!IS_STREAM_TABLE);
catalogManager.dropTemporaryTable(objectIdentifier, false);
catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false);
}
} else {
throw new ValidationException(String.format(
"Table '%s' already exists. Please choose a different name.", name));
}
} else {
ConnectorCatalogTable source = ConnectorCatalogTable.source(tableSource, !IS_STREAM_TABLE);
catalogManager.createTemporaryTable(source, objectIdentifier, false);
}
}
@Override
public void registerTableSinkInternal(String name, TableSink<?> tableSink) {
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier);
if (table.isPresent()) {
if (table.get() instanceof ConnectorCatalogTable<?, ?>) {
ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get();
if (sourceSinkTable.getTableSink().isPresent()) {
throw new ValidationException(String.format(
"Table '%s' already exists. Please choose a different name.", name));
} else {
ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable
.sourceAndSink(sourceSinkTable.getTableSource().get(), tableSink, !IS_STREAM_TABLE);
catalogManager.dropTemporaryTable(objectIdentifier, false);
catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false);
}
} else {
throw new ValidationException(String.format(
"Table '%s' already exists. Please choose a different name.", name));
}
} else {
ConnectorCatalogTable sink = ConnectorCatalogTable.sink(tableSink, !IS_STREAM_TABLE);
catalogManager.createTemporaryTable(sink, objectIdentifier, false);
}
}
private Optional<CatalogBaseTable> getTemporaryTable(ObjectIdentifier identifier) {
return catalogManager.getTable(identifier)
.filter(CatalogManager.TableLookupResult::isTemporary)
.map(CatalogManager.TableLookupResult::getTable);
}
private TableResult createCatalogFunction(
CreateCatalogFunctionOperation createCatalogFunctionOperation) {
String exMsg = getDDLOpExecuteErrorMsg(createCatalogFunctionOperation.asSummaryString());
try {
if (createCatalogFunctionOperation.isTemporary()) {
functionCatalog.registerTemporaryCatalogFunction(
UnresolvedIdentifier.of(createCatalogFunctionOperation.getFunctionIdentifier().toList()),
createCatalogFunctionOperation.getCatalogFunction(),
createCatalogFunctionOperation.isIgnoreIfExists());
} else {
Catalog catalog = getCatalogOrThrowException(
createCatalogFunctionOperation.getFunctionIdentifier().getCatalogName());
catalog.createFunction(
createCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(),
createCatalogFunctionOperation.getCatalogFunction(),
createCatalogFunctionOperation.isIgnoreIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (FunctionAlreadyExistException e) {
throw new ValidationException(e.getMessage(), e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
}
private TableResult alterCatalogFunction(AlterCatalogFunctionOperation alterCatalogFunctionOperation) {
String exMsg = getDDLOpExecuteErrorMsg(alterCatalogFunctionOperation.asSummaryString());
try {
CatalogFunction function = alterCatalogFunctionOperation.getCatalogFunction();
if (alterCatalogFunctionOperation.isTemporary()) {
throw new ValidationException(
"Alter temporary catalog function is not supported");
} else {
Catalog catalog = getCatalogOrThrowException(
alterCatalogFunctionOperation.getFunctionIdentifier().getCatalogName());
catalog.alterFunction(
alterCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(),
function,
alterCatalogFunctionOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (FunctionNotExistException e) {
throw new ValidationException(e.getMessage(), e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
}
private TableResult dropCatalogFunction(DropCatalogFunctionOperation dropCatalogFunctionOperation) {
String exMsg = getDDLOpExecuteErrorMsg(dropCatalogFunctionOperation.asSummaryString());
try {
if (dropCatalogFunctionOperation.isTemporary()) {
functionCatalog.dropTempCatalogFunction(
dropCatalogFunctionOperation.getFunctionIdentifier(),
dropCatalogFunctionOperation.isIfExists());
} else {
Catalog catalog = getCatalogOrThrowException
(dropCatalogFunctionOperation.getFunctionIdentifier().getCatalogName());
catalog.dropFunction(
dropCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(),
dropCatalogFunctionOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (FunctionNotExistException e) {
throw new ValidationException(e.getMessage(), e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
}
private TableResult createSystemFunction(CreateTempSystemFunctionOperation operation) {
String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
functionCatalog.registerTemporarySystemFunction(
operation.getFunctionName(),
operation.getFunctionClass(),
operation.getFunctionLanguage(),
operation.isIgnoreIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (Exception e) {
throw new TableException(exMsg, e);
}
}
private TableResult dropSystemFunction(DropTempSystemFunctionOperation operation) {
try {
functionCatalog.dropTemporarySystemFunction(
operation.getFunctionName(),
operation.isIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (Exception e) {
throw new TableException(getDDLOpExecuteErrorMsg(operation.asSummaryString()), e);
}
}
protected TableImpl createTable(QueryOperation tableOperation) {
return TableImpl.createTable(
this,
tableOperation,
operationTreeBuilder,
functionCatalog.asLookup(parser::parseIdentifier));
}
} | class TableEnvironmentImpl implements TableEnvironmentInternal {
private static final boolean IS_STREAM_TABLE = true;
private final CatalogManager catalogManager;
private final ModuleManager moduleManager;
private final OperationTreeBuilder operationTreeBuilder;
private final List<ModifyOperation> bufferedModifyOperations = new ArrayList<>();
protected final TableConfig tableConfig;
protected final Executor execEnv;
protected final FunctionCatalog functionCatalog;
protected final Planner planner;
protected final Parser parser;
private final boolean isStreamingMode;
private final ClassLoader userClassLoader;
private static final String UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG =
"Unsupported SQL query! sqlUpdate() only accepts a single SQL statement of type " +
"INSERT, CREATE TABLE, DROP TABLE, ALTER TABLE, USE CATALOG, USE [CATALOG.]DATABASE, " +
"CREATE DATABASE, DROP DATABASE, ALTER DATABASE, CREATE FUNCTION, DROP FUNCTION, ALTER FUNCTION, " +
"CREATE CATALOG, DROP CATALOG, CREATE VIEW, DROP VIEW.";
private static final String UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG =
"Unsupported SQL query! executeSql() only accepts a single SQL statement of type " +
"CREATE TABLE, DROP TABLE, ALTER TABLE, CREATE DATABASE, DROP DATABASE, ALTER DATABASE, " +
"CREATE FUNCTION, DROP FUNCTION, ALTER FUNCTION, CREATE CATALOG, DROP CATALOG, " +
"USE CATALOG, USE [CATALOG.]DATABASE, SHOW CATALOGS, SHOW DATABASES, SHOW TABLES, SHOW FUNCTIONS, SHOW PARTITIONS" +
"CREATE VIEW, DROP VIEW, SHOW VIEWS, INSERT, DESCRIBE.";
/**
* Provides necessary methods for {@link ConnectTableDescriptor}.
*/
private final Registration registration = new Registration() {
@Override
public void createTemporaryTable(String path, CatalogBaseTable table) {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(
unresolvedIdentifier);
catalogManager.createTemporaryTable(table, objectIdentifier, false);
}
};
protected TableEnvironmentImpl(
CatalogManager catalogManager,
ModuleManager moduleManager,
TableConfig tableConfig,
Executor executor,
FunctionCatalog functionCatalog,
Planner planner,
boolean isStreamingMode,
ClassLoader userClassLoader) {
this.catalogManager = catalogManager;
this.catalogManager.setCatalogTableSchemaResolver(
new CatalogTableSchemaResolver(planner.getParser(), isStreamingMode));
this.moduleManager = moduleManager;
this.execEnv = executor;
this.tableConfig = tableConfig;
this.functionCatalog = functionCatalog;
this.planner = planner;
this.parser = planner.getParser();
this.isStreamingMode = isStreamingMode;
this.userClassLoader = userClassLoader;
this.operationTreeBuilder = OperationTreeBuilder.create(
tableConfig,
functionCatalog.asLookup(parser::parseIdentifier),
catalogManager.getDataTypeFactory(),
path -> {
try {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
Optional<CatalogQueryOperation> catalogQueryOperation = scanInternal(unresolvedIdentifier);
return catalogQueryOperation.map(t -> ApiExpressionUtils.tableRef(path, t));
} catch (SqlParserException ex) {
return Optional.empty();
}
},
isStreamingMode
);
}
public static TableEnvironmentImpl create(EnvironmentSettings settings) {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
TableConfig tableConfig = new TableConfig();
ModuleManager moduleManager = new ModuleManager();
CatalogManager catalogManager = CatalogManager.newBuilder()
.classLoader(classLoader)
.config(tableConfig.getConfiguration())
.defaultCatalog(
settings.getBuiltInCatalogName(),
new GenericInMemoryCatalog(
settings.getBuiltInCatalogName(),
settings.getBuiltInDatabaseName()))
.build();
FunctionCatalog functionCatalog = new FunctionCatalog(tableConfig, catalogManager, moduleManager);
Map<String, String> executorProperties = settings.toExecutorProperties();
Executor executor = ComponentFactoryService.find(ExecutorFactory.class, executorProperties)
.create(executorProperties);
Map<String, String> plannerProperties = settings.toPlannerProperties();
Planner planner = ComponentFactoryService.find(PlannerFactory.class, plannerProperties)
.create(
plannerProperties,
executor,
tableConfig,
functionCatalog,
catalogManager);
return new TableEnvironmentImpl(
catalogManager,
moduleManager,
tableConfig,
executor,
functionCatalog,
planner,
settings.isStreamingMode(),
classLoader
);
}
@Override
public Table fromValues(Object... values) {
return fromValues(Arrays.asList(values));
}
@Override
public Table fromValues(AbstractDataType<?> rowType, Object... values) {
return fromValues(rowType, Arrays.asList(values));
}
@Override
public Table fromValues(Expression... values) {
return createTable(operationTreeBuilder.values(values));
}
@Override
public Table fromValues(AbstractDataType<?> rowType, Expression... values) {
final DataType resolvedDataType = catalogManager.getDataTypeFactory().createDataType(rowType);
return createTable(operationTreeBuilder.values(resolvedDataType, values));
}
@Override
public Table fromValues(Iterable<?> values) {
Expression[] exprs = StreamSupport.stream(values.spliterator(), false)
.map(ApiExpressionUtils::objectToExpression)
.toArray(Expression[]::new);
return fromValues(exprs);
}
@Override
public Table fromValues(AbstractDataType<?> rowType, Iterable<?> values) {
Expression[] exprs = StreamSupport.stream(values.spliterator(), false)
.map(ApiExpressionUtils::objectToExpression)
.toArray(Expression[]::new);
return fromValues(rowType, exprs);
}
@VisibleForTesting
public Planner getPlanner() {
return planner;
}
@Override
public Table fromTableSource(TableSource<?> source) {
return createTable(new TableSourceQueryOperation<>(source, !IS_STREAM_TABLE));
}
@Override
public void registerCatalog(String catalogName, Catalog catalog) {
catalogManager.registerCatalog(catalogName, catalog);
}
@Override
public Optional<Catalog> getCatalog(String catalogName) {
return catalogManager.getCatalog(catalogName);
}
@Override
public void loadModule(String moduleName, Module module) {
moduleManager.loadModule(moduleName, module);
}
@Override
public void unloadModule(String moduleName) {
moduleManager.unloadModule(moduleName);
}
@Override
public void registerFunction(String name, ScalarFunction function) {
functionCatalog.registerTempSystemScalarFunction(
name,
function);
}
@Override
public void createTemporarySystemFunction(String name, Class<? extends UserDefinedFunction> functionClass) {
final UserDefinedFunction functionInstance = UserDefinedFunctionHelper.instantiateFunction(functionClass);
createTemporarySystemFunction(name, functionInstance);
}
@Override
public void createTemporarySystemFunction(String name, UserDefinedFunction functionInstance) {
functionCatalog.registerTemporarySystemFunction(
name,
functionInstance,
false);
}
@Override
public boolean dropTemporarySystemFunction(String name) {
return functionCatalog.dropTemporarySystemFunction(
name,
true);
}
@Override
public void createFunction(String path, Class<? extends UserDefinedFunction> functionClass) {
createFunction(path, functionClass, false);
}
@Override
public void createFunction(String path, Class<? extends UserDefinedFunction> functionClass, boolean ignoreIfExists) {
final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
functionCatalog.registerCatalogFunction(
unresolvedIdentifier,
functionClass,
ignoreIfExists);
}
@Override
public boolean dropFunction(String path) {
final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
return functionCatalog.dropCatalogFunction(
unresolvedIdentifier,
true);
}
@Override
public void createTemporaryFunction(String path, Class<? extends UserDefinedFunction> functionClass) {
final UserDefinedFunction functionInstance = UserDefinedFunctionHelper.instantiateFunction(functionClass);
createTemporaryFunction(path, functionInstance);
}
@Override
public void createTemporaryFunction(String path, UserDefinedFunction functionInstance) {
final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
functionCatalog.registerTemporaryCatalogFunction(
unresolvedIdentifier,
functionInstance,
false);
}
@Override
public boolean dropTemporaryFunction(String path) {
final UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
return functionCatalog.dropTemporaryCatalogFunction(
unresolvedIdentifier,
true);
}
@Override
public void registerTable(String name, Table table) {
UnresolvedIdentifier identifier = UnresolvedIdentifier.of(name);
createTemporaryView(identifier, table);
}
@Override
public void createTemporaryView(String path, Table view) {
UnresolvedIdentifier identifier = parser.parseIdentifier(path);
createTemporaryView(identifier, view);
}
private void createTemporaryView(UnresolvedIdentifier identifier, Table view) {
if (((TableImpl) view).getTableEnvironment() != this) {
throw new TableException(
"Only table API objects that belong to this TableEnvironment can be registered.");
}
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(identifier);
QueryOperation queryOperation = qualifyQueryOperation(tableIdentifier, view.getQueryOperation());
CatalogBaseTable tableTable = new QueryOperationCatalogView(queryOperation);
catalogManager.createTemporaryTable(tableTable, tableIdentifier, false);
}
@Override
public Table scan(String... tablePath) {
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(tablePath);
return scanInternal(unresolvedIdentifier)
.map(this::createTable)
.orElseThrow(() -> new ValidationException(String.format(
"Table %s was not found.",
unresolvedIdentifier)));
}
@Override
public Table from(String path) {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
return scanInternal(unresolvedIdentifier)
.map(this::createTable)
.orElseThrow(() -> new ValidationException(String.format(
"Table %s was not found.",
unresolvedIdentifier)));
}
@Override
public void insertInto(String targetPath, Table table) {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(targetPath);
insertIntoInternal(unresolvedIdentifier, table);
}
@Override
public void insertInto(Table table, String sinkPath, String... sinkPathContinued) {
List<String> fullPath = new ArrayList<>(Arrays.asList(sinkPathContinued));
fullPath.add(0, sinkPath);
UnresolvedIdentifier unresolvedIdentifier = UnresolvedIdentifier.of(fullPath);
insertIntoInternal(unresolvedIdentifier, table);
}
private void insertIntoInternal(UnresolvedIdentifier unresolvedIdentifier, Table table) {
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
List<ModifyOperation> modifyOperations = Collections.singletonList(
new CatalogSinkModifyOperation(
objectIdentifier,
table.getQueryOperation()));
buffer(modifyOperations);
}
private Optional<CatalogQueryOperation> scanInternal(UnresolvedIdentifier identifier) {
ObjectIdentifier tableIdentifier = catalogManager.qualifyIdentifier(identifier);
return catalogManager.getTable(tableIdentifier)
.map(t -> new CatalogQueryOperation(tableIdentifier, t.getResolvedSchema()));
}
@Override
public ConnectTableDescriptor connect(ConnectorDescriptor connectorDescriptor) {
return new StreamTableDescriptor(registration, connectorDescriptor);
}
@Override
public String[] listCatalogs() {
return catalogManager.listCatalogs()
.stream()
.sorted()
.toArray(String[]::new);
}
@Override
public String[] listModules() {
return moduleManager.listModules().toArray(new String[0]);
}
@Override
public String[] listDatabases() {
return catalogManager.getCatalog(catalogManager.getCurrentCatalog())
.get()
.listDatabases()
.toArray(new String[0]);
}
@Override
public String[] listTables() {
return catalogManager.listTables()
.stream()
.sorted()
.toArray(String[]::new);
}
@Override
public String[] listViews() {
return catalogManager.listViews()
.stream()
.sorted()
.toArray(String[]::new);
}
@Override
public String[] listTemporaryTables() {
return catalogManager.listTemporaryTables()
.stream()
.sorted()
.toArray(String[]::new);
}
@Override
public String[] listTemporaryViews() {
return catalogManager.listTemporaryViews()
.stream()
.sorted()
.toArray(String[]::new);
}
@Override
public boolean dropTemporaryTable(String path) {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
try {
catalogManager.dropTemporaryTable(identifier, false);
return true;
} catch (ValidationException e) {
return false;
}
}
@Override
public boolean dropTemporaryView(String path) {
UnresolvedIdentifier unresolvedIdentifier = parser.parseIdentifier(path);
ObjectIdentifier identifier = catalogManager.qualifyIdentifier(unresolvedIdentifier);
try {
catalogManager.dropTemporaryView(identifier, false);
return true;
} catch (ValidationException e) {
return false;
}
}
@Override
public String[] listUserDefinedFunctions() {
return functionCatalog.getUserDefinedFunctions();
}
@Override
public String[] listFunctions() {
return functionCatalog.getFunctions();
}
@Override
public String explain(Table table) {
return explain(table, false);
}
@Override
public String explain(Table table, boolean extended) {
return planner.explain(Collections.singletonList(table.getQueryOperation()), getExplainDetails(extended));
}
@Override
public String explain(boolean extended) {
List<Operation> operations = bufferedModifyOperations.stream()
.map(o -> (Operation) o).collect(Collectors.toList());
return planner.explain(operations, getExplainDetails(extended));
}
@Override
public String explainSql(String statement, ExplainDetail... extraDetails) {
List<Operation> operations = parser.parse(statement);
if (operations.size() != 1) {
throw new TableException("Unsupported SQL query! explainSql() only accepts a single SQL query.");
}
return planner.explain(operations, extraDetails);
}
@Override
public String explainInternal(List<Operation> operations, ExplainDetail... extraDetails) {
return planner.explain(operations, extraDetails);
}
@Override
public String[] getCompletionHints(String statement, int position) {
return planner.getCompletionHints(statement, position);
}
@Override
public Table sqlQuery(String query) {
List<Operation> operations = parser.parse(query);
if (operations.size() != 1) {
throw new ValidationException(
"Unsupported SQL query! sqlQuery() only accepts a single SQL query.");
}
Operation operation = operations.get(0);
if (operation instanceof QueryOperation && !(operation instanceof ModifyOperation)) {
return createTable((QueryOperation) operation);
} else {
throw new ValidationException(
"Unsupported SQL query! sqlQuery() only accepts a single SQL query of type " +
"SELECT, UNION, INTERSECT, EXCEPT, VALUES, and ORDER_BY.");
}
}
@Override
public TableResult executeSql(String statement) {
List<Operation> operations = parser.parse(statement);
if (operations.size() != 1) {
throw new TableException(UNSUPPORTED_QUERY_IN_EXECUTE_SQL_MSG);
}
return executeOperation(operations.get(0));
}
@Override
public StatementSet createStatementSet() {
return new StatementSetImpl(this);
}
@Override
public TableResult executeInternal(List<ModifyOperation> operations) {
List<Transformation<?>> transformations = translate(operations);
List<String> sinkIdentifierNames = extractSinkIdentifierNames(operations);
String jobName = "insert-into_" + String.join(",", sinkIdentifierNames);
Pipeline pipeline = execEnv.createPipeline(transformations, tableConfig, jobName);
try {
JobClient jobClient = execEnv.executeAsync(pipeline);
TableSchema.Builder builder = TableSchema.builder();
Object[] affectedRowCounts = new Long[operations.size()];
for (int i = 0; i < operations.size(); ++i) {
builder.field(sinkIdentifierNames.get(i), DataTypes.BIGINT());
affectedRowCounts[i] = -1L;
}
return TableResultImpl.builder()
.jobClient(jobClient)
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.tableSchema(builder.build())
.data(Collections.singletonList(Row.of(affectedRowCounts)))
.build();
} catch (Exception e) {
throw new TableException("Failed to execute sql", e);
}
}
@Override
public TableResult executeInternal(QueryOperation operation) {
SelectSinkOperation sinkOperation = new SelectSinkOperation(operation);
List<Transformation<?>> transformations = translate(Collections.singletonList(sinkOperation));
Pipeline pipeline = execEnv.createPipeline(transformations, tableConfig, "collect");
try {
JobClient jobClient = execEnv.executeAsync(pipeline);
SelectResultProvider resultProvider = sinkOperation.getSelectResultProvider();
resultProvider.setJobClient(jobClient);
return TableResultImpl.builder()
.jobClient(jobClient)
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.tableSchema(operation.getTableSchema())
.data(resultProvider.getResultIterator())
.setPrintStyle(TableResultImpl.PrintStyle.tableau(
PrintUtils.MAX_COLUMN_WIDTH, PrintUtils.NULL_COLUMN, true, isStreamingMode))
.build();
} catch (Exception e) {
throw new TableException("Failed to execute sql", e);
}
}
@Override
public void sqlUpdate(String stmt) {
List<Operation> operations = parser.parse(stmt);
if (operations.size() != 1) {
throw new TableException(UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG);
}
Operation operation = operations.get(0);
if (operation instanceof ModifyOperation) {
buffer(Collections.singletonList((ModifyOperation) operation));
} else if (operation instanceof CreateTableOperation ||
operation instanceof DropTableOperation ||
operation instanceof AlterTableOperation ||
operation instanceof CreateViewOperation ||
operation instanceof DropViewOperation ||
operation instanceof CreateDatabaseOperation ||
operation instanceof DropDatabaseOperation ||
operation instanceof AlterDatabaseOperation ||
operation instanceof CreateCatalogFunctionOperation ||
operation instanceof CreateTempSystemFunctionOperation ||
operation instanceof DropCatalogFunctionOperation ||
operation instanceof DropTempSystemFunctionOperation ||
operation instanceof AlterCatalogFunctionOperation ||
operation instanceof CreateCatalogOperation ||
operation instanceof DropCatalogOperation ||
operation instanceof UseCatalogOperation ||
operation instanceof UseDatabaseOperation) {
executeOperation(operation);
} else {
throw new TableException(UNSUPPORTED_QUERY_IN_SQL_UPDATE_MSG);
}
}
private TableResult createCatalog(CreateCatalogOperation operation) {
String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
String catalogName = operation.getCatalogName();
Map<String, String> properties = operation.getProperties();
final CatalogFactory factory = TableFactoryService.find(
CatalogFactory.class,
properties,
userClassLoader);
Catalog catalog = factory.createCatalog(catalogName, properties);
catalogManager.registerCatalog(catalogName, catalog);
return TableResultImpl.TABLE_RESULT_OK;
} catch (CatalogException e) {
throw new ValidationException(exMsg, e);
}
}
private TableResult buildShowResult(String columnName, String[] objects) {
return buildResult(
new String[]{columnName},
new DataType[]{DataTypes.STRING()},
Arrays.stream(objects).map((c) -> new String[]{c}).toArray(String[][]::new));
}
private TableResult buildDescribeResult(TableSchema schema) {
Map<String, String> fieldToWatermark =
schema.getWatermarkSpecs()
.stream()
.collect(Collectors.toMap(WatermarkSpec::getRowtimeAttribute, WatermarkSpec::getWatermarkExpr));
Map<String, String> fieldToPrimaryKey = new HashMap<>();
schema.getPrimaryKey().ifPresent((p) -> {
List<String> columns = p.getColumns();
columns.forEach((c) -> fieldToPrimaryKey.put(c, String.format("PRI(%s)", String.join(", ", columns))));
});
Object[][] rows =
schema.getTableColumns()
.stream()
.map((c) -> {
LogicalType logicalType = c.getType().getLogicalType();
return new Object[]{
c.getName(),
StringUtils.removeEnd(logicalType.toString(), " NOT NULL"),
logicalType.isNullable(),
fieldToPrimaryKey.getOrDefault(c.getName(), null),
c.getExpr().orElse(null),
fieldToWatermark.getOrDefault(c.getName(), null)};
}).toArray(Object[][]::new);
return buildResult(
new String[]{"name", "type", "null", "key", "computed column", "watermark"},
new DataType[]{DataTypes.STRING(), DataTypes.STRING(), DataTypes.BOOLEAN(), DataTypes.STRING(), DataTypes.STRING(), DataTypes.STRING()},
rows);
}
private TableResult buildResult(String[] headers, DataType[] types, Object[][] rows) {
return TableResultImpl.builder()
.resultKind(ResultKind.SUCCESS_WITH_CONTENT)
.tableSchema(
TableSchema.builder().fields(
headers,
types).build())
.data(Arrays.stream(rows).map(Row::of).collect(Collectors.toList()))
.setPrintStyle(TableResultImpl.PrintStyle.tableau(Integer.MAX_VALUE, "", false, false))
.build();
}
/**
* extract sink identifier names from {@link ModifyOperation}s.
*
* <p>If there are multiple ModifyOperations have same name,
* an index suffix will be added at the end of the name to ensure each name is unique.
*/
private List<String> extractSinkIdentifierNames(List<ModifyOperation> operations) {
List<String> tableNames = new ArrayList<>(operations.size());
Map<String, Integer> tableNameToCount = new HashMap<>();
for (ModifyOperation operation : operations) {
if (operation instanceof CatalogSinkModifyOperation) {
ObjectIdentifier identifier = ((CatalogSinkModifyOperation) operation).getTableIdentifier();
String fullName = identifier.asSummaryString();
tableNames.add(fullName);
tableNameToCount.put(fullName, tableNameToCount.getOrDefault(fullName, 0) + 1);
} else {
throw new UnsupportedOperationException("Unsupported operation: " + operation);
}
}
Map<String, Integer> tableNameToIndex = new HashMap<>();
return tableNames.stream().map(tableName -> {
if (tableNameToCount.get(tableName) == 1) {
return tableName;
} else {
Integer index = tableNameToIndex.getOrDefault(tableName, 0) + 1;
tableNameToIndex.put(tableName, index);
return tableName + "_" + index;
}
}
).collect(Collectors.toList());
}
/** Get catalog from catalogName or throw a ValidationException if the catalog not exists. */
private Catalog getCatalogOrThrowException(String catalogName) {
return getCatalog(catalogName)
.orElseThrow(() -> new ValidationException(String.format("Catalog %s does not exist", catalogName)));
}
private String getDDLOpExecuteErrorMsg(String action) {
return String.format("Could not execute %s", action);
}
@Override
public String getCurrentCatalog() {
return catalogManager.getCurrentCatalog();
}
@Override
public void useCatalog(String catalogName) {
catalogManager.setCurrentCatalog(catalogName);
}
@Override
public String getCurrentDatabase() {
return catalogManager.getCurrentDatabase();
}
@Override
public void useDatabase(String databaseName) {
catalogManager.setCurrentDatabase(databaseName);
}
@Override
public TableConfig getConfig() {
return tableConfig;
}
@Override
public JobExecutionResult execute(String jobName) throws Exception {
Pipeline pipeline = execEnv.createPipeline(translateAndClearBuffer(), tableConfig, jobName);
return execEnv.execute(pipeline);
}
@Override
public Parser getParser() {
return parser;
}
@Override
public CatalogManager getCatalogManager() {
return catalogManager;
}
/**
* Subclasses can override this method to transform the given QueryOperation to a new one with
* the qualified object identifier. This is needed for some QueryOperations, e.g. JavaDataStreamQueryOperation,
* which doesn't know the registered identifier when created ({@code fromDataStream(DataStream)}.
* But the identifier is required when converting this QueryOperation to RelNode.
*/
protected QueryOperation qualifyQueryOperation(ObjectIdentifier identifier, QueryOperation queryOperation) {
return queryOperation;
}
/**
* Subclasses can override this method to add additional checks.
*
* @param tableSource tableSource to validate
*/
protected void validateTableSource(TableSource<?> tableSource) {
TableSourceValidation.validateTableSource(tableSource, tableSource.getTableSchema());
}
/**
* Translate the buffered operations to Transformations, and clear the buffer.
*
* <p>The buffer will be clear even if the `translate` fails. In most cases,
* the failure is not retryable (e.g. type mismatch, can't generate physical plan).
* If the buffer is not clear after failure, the following `translate` will also fail.
*/
protected List<Transformation<?>> translateAndClearBuffer() {
List<Transformation<?>> transformations;
try {
transformations = translate(bufferedModifyOperations);
} finally {
bufferedModifyOperations.clear();
}
return transformations;
}
private List<Transformation<?>> translate(List<ModifyOperation> modifyOperations) {
return planner.translate(modifyOperations);
}
private void buffer(List<ModifyOperation> modifyOperations) {
bufferedModifyOperations.addAll(modifyOperations);
}
@VisibleForTesting
protected ExplainDetail[] getExplainDetails(boolean extended) {
if (extended) {
if (isStreamingMode) {
return new ExplainDetail[] { ExplainDetail.ESTIMATED_COST, ExplainDetail.CHANGELOG_MODE };
} else {
return new ExplainDetail[] { ExplainDetail.ESTIMATED_COST };
}
} else {
return new ExplainDetail[0];
}
}
@Override
public void registerTableSourceInternal(String name, TableSource<?> tableSource) {
validateTableSource(tableSource);
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier);
if (table.isPresent()) {
if (table.get() instanceof ConnectorCatalogTable<?, ?>) {
ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get();
if (sourceSinkTable.getTableSource().isPresent()) {
throw new ValidationException(String.format(
"Table '%s' already exists. Please choose a different name.", name));
} else {
ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable.sourceAndSink(
tableSource,
sourceSinkTable.getTableSink().get(),
!IS_STREAM_TABLE);
catalogManager.dropTemporaryTable(objectIdentifier, false);
catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false);
}
} else {
throw new ValidationException(String.format(
"Table '%s' already exists. Please choose a different name.", name));
}
} else {
ConnectorCatalogTable source = ConnectorCatalogTable.source(tableSource, !IS_STREAM_TABLE);
catalogManager.createTemporaryTable(source, objectIdentifier, false);
}
}
@Override
public void registerTableSinkInternal(String name, TableSink<?> tableSink) {
ObjectIdentifier objectIdentifier = catalogManager.qualifyIdentifier(UnresolvedIdentifier.of(name));
Optional<CatalogBaseTable> table = getTemporaryTable(objectIdentifier);
if (table.isPresent()) {
if (table.get() instanceof ConnectorCatalogTable<?, ?>) {
ConnectorCatalogTable<?, ?> sourceSinkTable = (ConnectorCatalogTable<?, ?>) table.get();
if (sourceSinkTable.getTableSink().isPresent()) {
throw new ValidationException(String.format(
"Table '%s' already exists. Please choose a different name.", name));
} else {
ConnectorCatalogTable sourceAndSink = ConnectorCatalogTable
.sourceAndSink(sourceSinkTable.getTableSource().get(), tableSink, !IS_STREAM_TABLE);
catalogManager.dropTemporaryTable(objectIdentifier, false);
catalogManager.createTemporaryTable(sourceAndSink, objectIdentifier, false);
}
} else {
throw new ValidationException(String.format(
"Table '%s' already exists. Please choose a different name.", name));
}
} else {
ConnectorCatalogTable sink = ConnectorCatalogTable.sink(tableSink, !IS_STREAM_TABLE);
catalogManager.createTemporaryTable(sink, objectIdentifier, false);
}
}
private Optional<CatalogBaseTable> getTemporaryTable(ObjectIdentifier identifier) {
return catalogManager.getTable(identifier)
.filter(CatalogManager.TableLookupResult::isTemporary)
.map(CatalogManager.TableLookupResult::getTable);
}
private TableResult createCatalogFunction(
CreateCatalogFunctionOperation createCatalogFunctionOperation) {
String exMsg = getDDLOpExecuteErrorMsg(createCatalogFunctionOperation.asSummaryString());
try {
if (createCatalogFunctionOperation.isTemporary()) {
functionCatalog.registerTemporaryCatalogFunction(
UnresolvedIdentifier.of(createCatalogFunctionOperation.getFunctionIdentifier().toList()),
createCatalogFunctionOperation.getCatalogFunction(),
createCatalogFunctionOperation.isIgnoreIfExists());
} else {
Catalog catalog = getCatalogOrThrowException(
createCatalogFunctionOperation.getFunctionIdentifier().getCatalogName());
catalog.createFunction(
createCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(),
createCatalogFunctionOperation.getCatalogFunction(),
createCatalogFunctionOperation.isIgnoreIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (FunctionAlreadyExistException e) {
throw new ValidationException(e.getMessage(), e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
}
private TableResult alterCatalogFunction(AlterCatalogFunctionOperation alterCatalogFunctionOperation) {
String exMsg = getDDLOpExecuteErrorMsg(alterCatalogFunctionOperation.asSummaryString());
try {
CatalogFunction function = alterCatalogFunctionOperation.getCatalogFunction();
if (alterCatalogFunctionOperation.isTemporary()) {
throw new ValidationException(
"Alter temporary catalog function is not supported");
} else {
Catalog catalog = getCatalogOrThrowException(
alterCatalogFunctionOperation.getFunctionIdentifier().getCatalogName());
catalog.alterFunction(
alterCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(),
function,
alterCatalogFunctionOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (FunctionNotExistException e) {
throw new ValidationException(e.getMessage(), e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
}
private TableResult dropCatalogFunction(DropCatalogFunctionOperation dropCatalogFunctionOperation) {
String exMsg = getDDLOpExecuteErrorMsg(dropCatalogFunctionOperation.asSummaryString());
try {
if (dropCatalogFunctionOperation.isTemporary()) {
functionCatalog.dropTempCatalogFunction(
dropCatalogFunctionOperation.getFunctionIdentifier(),
dropCatalogFunctionOperation.isIfExists());
} else {
Catalog catalog = getCatalogOrThrowException
(dropCatalogFunctionOperation.getFunctionIdentifier().getCatalogName());
catalog.dropFunction(
dropCatalogFunctionOperation.getFunctionIdentifier().toObjectPath(),
dropCatalogFunctionOperation.isIfExists());
}
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (FunctionNotExistException e) {
throw new ValidationException(e.getMessage(), e);
} catch (Exception e) {
throw new TableException(exMsg, e);
}
}
private TableResult createSystemFunction(CreateTempSystemFunctionOperation operation) {
String exMsg = getDDLOpExecuteErrorMsg(operation.asSummaryString());
try {
functionCatalog.registerTemporarySystemFunction(
operation.getFunctionName(),
operation.getFunctionClass(),
operation.getFunctionLanguage(),
operation.isIgnoreIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (Exception e) {
throw new TableException(exMsg, e);
}
}
private TableResult dropSystemFunction(DropTempSystemFunctionOperation operation) {
try {
functionCatalog.dropTemporarySystemFunction(
operation.getFunctionName(),
operation.isIfExists());
return TableResultImpl.TABLE_RESULT_OK;
} catch (ValidationException e) {
throw e;
} catch (Exception e) {
throw new TableException(getDDLOpExecuteErrorMsg(operation.asSummaryString()), e);
}
}
protected TableImpl createTable(QueryOperation tableOperation) {
return TableImpl.createTable(
this,
tableOperation,
operationTreeBuilder,
functionCatalog.asLookup(parser::parseIdentifier));
}
} |
Can we combine queries with `;`? E.g.: ```java BeamSqlLine.testMain( new String[] { "-e", "CREATE TABLE table_test (col_a VARCHAR, col_b VARCHAR) TYPE 'test'; \n" + "INSERT INTO table_test SELECT '3', 'foo'; \n" + "INSERT INTO table_test SELECT '3', 'bar'; \n" + "SELECT col_a, count(*) FROM table_test GROUP BY col_a;" }); ``` | public void testSqlLine_GroupBy() throws Exception {
BeamSqlLine.testMain(
new String[] {
"-e",
"CREATE TABLE table_test (col_a VARCHAR, col_b VARCHAR) TYPE 'test';",
"-e",
"INSERT INTO table_test SELECT '3', 'foo';",
"-e",
"INSERT INTO table_test SELECT '3', 'bar';",
"-e",
"INSERT INTO table_test SELECT '4', 'foo';",
"-e",
"SELECT col_a, count(*) FROM table_test GROUP BY col_a;"
},
new PrintStream(byteArrayOutputStream));
verifyStringOutputTrue(byteArrayOutputStream.toString(), "3", "2");
verifyStringOutputTrue(byteArrayOutputStream.toString(), "4", "1");
} | "INSERT INTO table_test SELECT '4', 'foo';", | public void testSqlLine_GroupBy() throws Exception {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
String[] args =
buildArgs(
"CREATE TABLE table_test (col_a VARCHAR, col_b VARCHAR) TYPE 'test';",
"INSERT INTO table_test SELECT '3', 'foo';",
"INSERT INTO table_test SELECT '3', 'bar';",
"INSERT INTO table_test SELECT '4', 'foo';",
"SELECT col_a, count(*) FROM table_test GROUP BY col_a;");
BeamSqlLine.runSqlLine(args, null, byteArrayOutputStream, null);
List<List<String>> lines = toLines(byteArrayOutputStream);
assertThat(
Arrays.asList(Arrays.asList("3", "2"), Arrays.asList("4", "1")),
everyItem(IsIn.isOneOf(lines.toArray())));
} | class BeamSqlLineTest {
@Rule public TemporaryFolder folder = new TemporaryFolder();
public ByteArrayOutputStream byteArrayOutputStream;
@Before
public void setUp() {
byteArrayOutputStream = new ByteArrayOutputStream();
}
@Test
public void testSqlLine_emptyArgs() throws Exception {
BeamSqlLine.main(new String[] {});
}
@Test
public void testSqlLine_nullCommand() throws Exception {
BeamSqlLine.main(new String[] {"-e", ""});
}
@Test
public void testSqlLine_simple() throws Exception {
BeamSqlLine.main(new String[] {"-e", "SELECT 1;"});
}
@Test
public void testSqlLine_parse() throws Exception {
BeamSqlLine.main(new String[] {"-e", "SELECT 'beam';"});
}
@Test
public void testSqlLine_ddl() throws Exception {
BeamSqlLine.main(
new String[] {
"-e", "CREATE TABLE test (id INTEGER) TYPE 'text';", "-e", "DROP TABLE test;"
});
}
@Test
public void classLoader_readFile() throws Exception {
File simpleTable = folder.newFile();
BeamSqlLine.main(
new String[] {
"-e",
"CREATE TABLE test (id INTEGER) TYPE 'text' LOCATION '"
+ simpleTable.getAbsolutePath()
+ "';",
"-e",
"SELECT * FROM test;",
"-e",
"DROP TABLE test;"
});
}
@Test
public void testSqlLine_select() throws Exception {
BeamSqlLine.testMain(
new String[] {"-e", "SELECT 3, 'hello', DATE '2018-05-28';"},
new PrintStream(byteArrayOutputStream));
verifyStringOutputTrue(byteArrayOutputStream.toString(), "3", "hello", "2018-05-28");
verifyStringOutputFalse(byteArrayOutputStream.toString(), "4", "hella", "2018-09-09");
}
@Test
public void testSqlLine_selectFromTable() throws Exception {
BeamSqlLine.testMain(
new String[] {
"-e",
"CREATE TABLE table_test (col_a VARCHAR, col_b VARCHAR, col_c VARCHAR, col_x TINYINT, col_y INT, col_z BIGINT) TYPE 'test';",
"-e",
"INSERT INTO table_test VALUES ('a', 'b', 'c', 1, 2, 3);",
"-e",
"SELECT * FROM table_test;"
},
new PrintStream(byteArrayOutputStream));
verifyStringOutputTrue(
byteArrayOutputStream.toString(), "col_a", "col_b", "col_c", "col_x", "col_y", "col_z");
verifyStringOutputTrue(byteArrayOutputStream.toString(), "a", "b", "c", "1", "2", "3");
}
@Test
public void testSqlLine_insertSelect() throws Exception {
BeamSqlLine.testMain(
new String[] {
"-e",
"CREATE TABLE table_test (col_a VARCHAR, col_b VARCHAR) TYPE 'test';",
"-e",
"INSERT INTO table_test SELECT '3', 'hello';",
"-e",
"SELECT * FROM table_test;"
},
new PrintStream(byteArrayOutputStream));
verifyStringOutputTrue(byteArrayOutputStream.toString(), "3", "hello");
}
@Test
@Test
public void testSqlLine_fixedWindow() throws Exception {
BeamSqlLine.testMain(
new String[] {
"-e",
"CREATE TABLE table_test (col_a VARCHAR, col_b TIMESTAMP) TYPE 'test';",
"-e",
"INSERT INTO table_test SELECT '3', TIMESTAMP '2018-07-01 21:26:06';",
"-e",
"INSERT INTO table_test SELECT '3', TIMESTAMP '2018-07-01 21:26:07';",
"-e",
"SELECT TUMBLE_START(col_b, INTERVAL '1' SECOND), count(*) FROM table_test GROUP BY TUMBLE(col_b, INTERVAL '1' SECOND);"
},
new PrintStream(byteArrayOutputStream));
verifyStringOutputTrue(byteArrayOutputStream.toString(), "2018-07-01 21:26:06", "1");
verifyStringOutputTrue(byteArrayOutputStream.toString(), "2018-07-01 21:26:07", "1");
}
@Test
public void testSqlLine_slidingWindow() throws Exception {
BeamSqlLine.testMain(
new String[] {
"-e",
"CREATE TABLE table_test (col_a VARCHAR, col_b TIMESTAMP) TYPE 'test';",
"-e",
"INSERT INTO table_test SELECT '3', TIMESTAMP '2018-07-01 21:26:06';",
"-e",
"INSERT INTO table_test SELECT '4', TIMESTAMP '2018-07-01 21:26:07';",
"-e",
"INSERT INTO table_test SELECT '6', TIMESTAMP '2018-07-01 21:26:08';",
"-e",
"INSERT INTO table_test SELECT '7', TIMESTAMP '2018-07-01 21:26:09';",
"-e",
"SELECT HOP_END(col_b, INTERVAL '1' SECOND, INTERVAL '2' SECOND), count(*) FROM table_test GROUP BY HOP(col_b, INTERVAL '1' SECOND, INTERVAL '2' SECOND);"
},
new PrintStream(byteArrayOutputStream));
verifyStringOutputTrue(byteArrayOutputStream.toString(), "2018-07-01 21:26:07", "1");
verifyStringOutputTrue(byteArrayOutputStream.toString(), "2018-07-01 21:26:08", "2");
verifyStringOutputTrue(byteArrayOutputStream.toString(), "2018-07-01 21:26:09", "2");
verifyStringOutputTrue(byteArrayOutputStream.toString(), "2018-07-01 21:26:10", "2");
verifyStringOutputTrue(byteArrayOutputStream.toString(), "2018-07-01 21:26:11", "1");
}
private void verifyStringOutputTrue(String queryOutput, String... strs) {
for (String str : strs) {
Assert.assertTrue(queryOutput.contains(str));
}
}
private void verifyStringOutputFalse(String queryOutput, String... strs) {
for (String str : strs) {
Assert.assertFalse(queryOutput.contains(str));
}
}
} | class BeamSqlLineTest {
private static final String QUERY_ARG = "-e";
@Rule public TemporaryFolder folder = new TemporaryFolder();
@Test
public void testSqlLine_emptyArgs() throws Exception {
BeamSqlLine.main(new String[] {});
}
@Test
public void testSqlLine_nullCommand() throws Exception {
BeamSqlLine.main(new String[] {"-e", ""});
}
@Test
public void testSqlLine_simple() throws Exception {
BeamSqlLine.main(new String[] {"-e", "SELECT 1;"});
}
@Test
public void testSqlLine_parse() throws Exception {
BeamSqlLine.main(new String[] {"-e", "SELECT 'beam';"});
}
@Test
public void testSqlLine_ddl() throws Exception {
BeamSqlLine.main(
new String[] {
"-e", "CREATE TABLE test (id INTEGER) TYPE 'text';", "-e", "DROP TABLE test;"
});
}
@Test
public void classLoader_readFile() throws Exception {
File simpleTable = folder.newFile();
BeamSqlLine.main(
new String[] {
"-e",
"CREATE TABLE test (id INTEGER) TYPE 'text' LOCATION '"
+ simpleTable.getAbsolutePath()
+ "';",
"-e",
"SELECT * FROM test;",
"-e",
"DROP TABLE test;"
});
}
@Test
public void testSqlLine_select() throws Exception {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
String[] args = buildArgs("SELECT 3, 'hello', DATE '2018-05-28';");
BeamSqlLine.runSqlLine(args, null, byteArrayOutputStream, null);
List<List<String>> lines = toLines(byteArrayOutputStream);
assertThat(
Arrays.asList(Arrays.asList("3", "hello", "2018-05-28")),
everyItem(IsIn.isOneOf(lines.toArray())));
}
@Test
public void testSqlLine_selectFromTable() throws Exception {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
String[] args =
buildArgs(
"CREATE TABLE table_test (col_a VARCHAR, col_b VARCHAR, "
+ "col_c VARCHAR, col_x TINYINT, col_y INT, col_z BIGINT) TYPE 'test';",
"INSERT INTO table_test VALUES ('a', 'b', 'c', 1, 2, 3);",
"SELECT * FROM table_test;");
BeamSqlLine.runSqlLine(args, null, byteArrayOutputStream, null);
List<List<String>> lines = toLines(byteArrayOutputStream);
assertThat(
Arrays.asList(
Arrays.asList("col_a", "col_b", "col_c", "col_x", "col_y", "col_z"),
Arrays.asList("a", "b", "c", "1", "2", "3")),
everyItem(IsIn.isOneOf(lines.toArray())));
}
@Test
public void testSqlLine_insertSelect() throws Exception {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
String[] args =
buildArgs(
"CREATE TABLE table_test (col_a VARCHAR, col_b VARCHAR) TYPE 'test';",
"INSERT INTO table_test SELECT '3', 'hello';",
"SELECT * FROM table_test;");
BeamSqlLine.runSqlLine(args, null, byteArrayOutputStream, null);
List<List<String>> lines = toLines(byteArrayOutputStream);
assertThat(
Arrays.asList(Arrays.asList("3", "hello")), everyItem(IsIn.isOneOf(lines.toArray())));
}
@Test
@Test
public void testSqlLine_fixedWindow() throws Exception {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
String[] args =
buildArgs(
"CREATE TABLE table_test (col_a VARCHAR, col_b TIMESTAMP) TYPE 'test';",
"INSERT INTO table_test SELECT '3', TIMESTAMP '2018-07-01 21:26:06';",
"INSERT INTO table_test SELECT '3', TIMESTAMP '2018-07-01 21:26:07';",
"SELECT TUMBLE_START(col_b, INTERVAL '1' SECOND), count(*) FROM table_test "
+ "GROUP BY TUMBLE(col_b, INTERVAL '1' SECOND);");
BeamSqlLine.runSqlLine(args, null, byteArrayOutputStream, null);
List<List<String>> lines = toLines(byteArrayOutputStream);
assertThat(
Arrays.asList(
Arrays.asList("2018-07-01 21:26:06", "1"), Arrays.asList("2018-07-01 21:26:07", "1")),
everyItem(IsIn.isOneOf(lines.toArray())));
}
@Test
public void testSqlLine_slidingWindow() throws Exception {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
String[] args =
buildArgs(
"CREATE TABLE table_test (col_a VARCHAR, col_b TIMESTAMP) TYPE 'test';",
"INSERT INTO table_test SELECT '3', TIMESTAMP '2018-07-01 21:26:06';",
"INSERT INTO table_test SELECT '4', TIMESTAMP '2018-07-01 21:26:07';",
"INSERT INTO table_test SELECT '6', TIMESTAMP '2018-07-01 21:26:08';",
"INSERT INTO table_test SELECT '7', TIMESTAMP '2018-07-01 21:26:09';",
"SELECT HOP_END(col_b, INTERVAL '1' SECOND, INTERVAL '2' SECOND), count(*) FROM "
+ "table_test GROUP BY HOP(col_b, INTERVAL '1' SECOND, INTERVAL '2' SECOND);");
BeamSqlLine.runSqlLine(args, null, byteArrayOutputStream, null);
List<List<String>> lines = toLines(byteArrayOutputStream);
assertThat(
Arrays.asList(
Arrays.asList("2018-07-01 21:26:07", "1"),
Arrays.asList("2018-07-01 21:26:08", "2"),
Arrays.asList("2018-07-01 21:26:09", "2"),
Arrays.asList("2018-07-01 21:26:10", "2"),
Arrays.asList("2018-07-01 21:26:11", "1")),
everyItem(IsIn.isOneOf(lines.toArray())));
}
private String[] buildArgs(String... strs) {
List<String> argsList = new ArrayList();
for (String str : strs) {
argsList.add(QUERY_ARG);
argsList.add(str);
}
return argsList.toArray(new String[argsList.size()]);
}
private List<List<String>> toLines(ByteArrayOutputStream outputStream) {
List<String> outputLines = Arrays.asList(outputStream.toString().split("\n"));
return outputLines.stream().map(BeamSqlLineTest::splitFields).collect(toList());
}
private static List<String> splitFields(String outputLine) {
return Arrays.stream(outputLine.split("\\|"))
.map(field -> field.trim())
.filter(field -> field.length() != 0)
.collect(toList());
}
} |
I don't think so. If the key is available locally and is valid the client will always try to perform the operation locally. Might want to change that behavior to what you suggested in a future PR. | private void unpackAndValidateId(String keyId) {
if (CoreUtils.isNullOrEmpty(keyId)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid"));
}
try {
URL url = new URL(keyId);
String[] tokens = url.getPath().split("/");
String endpoint = url.getProtocol() + ":
String keyName = (tokens.length >= 3 ? tokens[2] : null);
String version = (tokens.length >= 4 ? tokens[3] : null);
this.keyCollection = (tokens.length >= 2 ? tokens[1] : null);
if (Strings.isNullOrEmpty(endpoint)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key endpoint in key identifier is invalid."));
} else if (Strings.isNullOrEmpty(keyName)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key name in key identifier is invalid."));
} else if (Strings.isNullOrEmpty(version)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key version in key identifier is invalid."));
}
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed.", e));
}
} | throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed.", e)); | private void unpackAndValidateId(String keyId) {
if (CoreUtils.isNullOrEmpty(keyId)) {
throw logger.logExceptionAsError(new IllegalArgumentException("Key Id is invalid"));
}
try {
URL url = new URL(keyId);
String[] tokens = url.getPath().split("/");
String endpoint = url.getProtocol() + ":
String keyName = (tokens.length >= 3 ? tokens[2] : null);
String version = (tokens.length >= 4 ? tokens[3] : null);
this.keyCollection = (tokens.length >= 2 ? tokens[1] : null);
if (Strings.isNullOrEmpty(endpoint)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key endpoint in key identifier is invalid."));
} else if (Strings.isNullOrEmpty(keyName)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key name in key identifier is invalid."));
} else if (Strings.isNullOrEmpty(version)) {
throw logger.logExceptionAsError(
new IllegalArgumentException("Key version in key identifier is invalid."));
}
} catch (MalformedURLException e) {
throw logger.logExceptionAsError(new IllegalArgumentException("The key identifier is malformed.", e));
}
} | class CryptographyAsyncClient {
static final String KEY_VAULT_SCOPE = "https:
static final String SECRETS_COLLECTION = "secrets";
static final String KEYVAULT_TRACING_NAMESPACE_VALUE = "Microsoft.KeyVault";
JsonWebKey key;
private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class);
private final CryptographyService service;
private final String keyId;
private CryptographyServiceClient cryptographyServiceClient;
private LocalKeyCryptographyClient localKeyCryptographyClient;
private String keyCollection;
/**
* Creates a {@link CryptographyAsyncClient} that uses a given {@link HttpPipeline pipeline} to service requests.
*
* @param keyId The Azure Key Vault key identifier to use for cryptography operations.
* @param pipeline {@link HttpPipeline} that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) {
unpackAndValidateId(keyId);
this.keyId = keyId;
this.service = RestProxy.create(CryptographyService.class, pipeline);
this.cryptographyServiceClient = new CryptographyServiceClient(keyId, service, version);
this.key = null;
}
/**
* Creates a {@link CryptographyAsyncClient} that uses a {@link JsonWebKey} to perform local cryptography
* operations.
*
* @param jsonWebKey The {@link JsonWebKey} to use for local cryptography operations.
*/
CryptographyAsyncClient(JsonWebKey jsonWebKey) {
Objects.requireNonNull(jsonWebKey, "The JSON Web Key is required.");
if (!jsonWebKey.isValid()) {
throw new IllegalArgumentException("The JSON Web Key is not valid.");
}
if (jsonWebKey.getKeyOps() == null) {
throw new IllegalArgumentException("The JSON Web Key's key operations property is not configured.");
}
if (jsonWebKey.getKeyType() == null) {
throw new IllegalArgumentException("The JSON Web Key's key type property is not configured.");
}
this.key = jsonWebKey;
this.keyId = null;
this.service = null;
this.cryptographyServiceClient = null;
initializeCryptoClients();
}
private void initializeCryptoClients() {
if (localKeyCryptographyClient != null) {
return;
}
if (key.getKeyType().equals(RSA) || key.getKeyType().equals(RSA_HSM)) {
this.localKeyCryptographyClient = new RsaKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else if (key.getKeyType().equals(EC) || key.getKeyType().equals(EC_HSM)) {
this.localKeyCryptographyClient = new EcKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else if (key.getKeyType().equals(OCT) || key.getKeyType().equals(OCT_HSM)) {
this.localKeyCryptographyClient =
new SymmetricKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"The JSON Web Key type: %s is not supported.", this.key.getKeyType().toString())));
}
}
Mono<String> getKeyId() {
return Mono.defer(() -> Mono.just(this.keyId));
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission for non-local operations.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.getKey}
*
* @return A {@link Mono} containing the requested {@link KeyVaultKey key}.
*
* @throws ResourceNotFoundException When the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<KeyVaultKey> getKey() {
try {
return getKeyWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission for non-local operations.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.getKeyWithResponse}
*
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* requested {@link KeyVaultKey key}.
*
* @throws ResourceNotFoundException When the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<KeyVaultKey>> getKeyWithResponse() {
try {
return withContext(this::getKeyWithResponse);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) {
if (cryptographyServiceClient != null) {
return cryptographyServiceClient.getKey(context);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(
"Operation not supported when an Azure Key Vault key identifier was not provided when creating this "
+ "client"));
}
}
Mono<JsonWebKey> getSecretKey() {
try {
return withContext(context -> cryptographyServiceClient.getSecretKey(context))
.flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports
* a single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys, the
* public portion of the key is used for encryption. This operation requires the {@code keys/encrypt} permission
* for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting
* the specified {@code plainText}. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param algorithm The algorithm to be used for encryption.
* @param plainText The content to be encrypted.
*
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
*
* @throws NullPointerException If {@code algorithm} or {@code plainText} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for encryption.
* @throws UnsupportedOperationException If the encrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plainText) {
return encrypt(new EncryptOptions(algorithm, plainText, null, null), null);
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports
* a single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys, the
* public portion of the key is used for encryption. This operation requires the {@code keys/encrypt} permission
* for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting
* the specified {@code plainText}. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param encryptOptions The parameters to use in the encryption operation.
*
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
*
* @throws NullPointerException If {@code algorithm} or {@code plainText} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for encryption.
* @throws UnsupportedOperationException If the encrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptOptions encryptOptions) {
Objects.requireNonNull(encryptOptions, "'encryptOptions' cannot be null.");
try {
return withContext(context -> encrypt(encryptOptions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<EncryptResult> encrypt(EncryptOptions encryptOptions, Context context) {
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.encrypt(encryptOptions, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.ENCRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Encrypt operation is missing permission/not supported for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.encryptAsync(encryptOptions, context, key);
});
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm
* to be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires
* the {@code keys/decrypt} permission for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting
* the specified encrypted content. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param algorithm The algorithm to be used for decryption.
* @param cipherText The content to be decrypted.
*
* @return A {@link Mono} containing the decrypted blob.
*
* @throws NullPointerException If {@code algorithm} or {@code cipherText} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for decryption.
* @throws UnsupportedOperationException If the decrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] cipherText) {
return decrypt(new DecryptOptions(algorithm, cipherText, null, null, null));
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm
* to be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires
* the {@code keys/decrypt} permission for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting
* the specified encrypted content. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param decryptOptions The parameters to use in the decryption operation.
*
* @return A {@link Mono} containing the decrypted blob.
*
* @throws NullPointerException If {@code algorithm} or {@code cipherText} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for decryption.
* @throws UnsupportedOperationException If the decrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(DecryptOptions decryptOptions) {
Objects.requireNonNull(decryptOptions, "'decryptOptions' cannot be null.");
try {
return withContext(context -> decrypt(decryptOptions, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<DecryptResult> decrypt(DecryptOptions decryptOptions, Context context) {
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.decrypt(decryptOptions, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.DECRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Decrypt operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.decryptAsync(decryptOptions, context, key);
});
}
/**
* Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and
* symmetric keys. This operation requires the {@code keys/sign} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the
* signature from the digest. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
*
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
*
* @throws NullPointerException If {@code algorithm} or {@code digest} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for signing.
* @throws UnsupportedOperationException If the sign operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) {
try {
return withContext(context -> sign(algorithm, digest, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.sign(algorithm, digest, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Sign operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.signAsync(algorithm, digest, context, key);
});
}
/**
* Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric
* keys. In case of asymmetric keys public portion of the key is used to verify the signature. This operation
* requires the {@code keys/verify} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include: {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature was created.
* @param signature The signature to be verified.
*
* @return A {@link Mono} containing a {@link VerifyResult}
* {@link VerifyResult
*
* @throws NullPointerException If {@code algorithm}, {@code digest} or {@code signature} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for verifying.
* @throws UnsupportedOperationException If the verify operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) {
try {
return withContext(context -> verify(algorithm, digest, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verify(algorithm, digest, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key);
});
}
/**
* Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both
* symmetric and asymmetric keys. This operation requires the {@code keys/wrapKey} permission for non-local
* operations.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified
* key content. Possible values include:
* {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a
* response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param key The key content to be wrapped.
*
* @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult
* contains the wrapped key result.
*
* @throws NullPointerException If {@code algorithm} or {@code key} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for wrap operation.
* @throws UnsupportedOperationException If the wrap operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) {
try {
return withContext(context -> wrapKey(algorithm, key, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) {
Objects.requireNonNull(algorithm, "Key wrap algorithm cannot be null.");
Objects.requireNonNull(key, "Key content to be wrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.wrapKey(algorithm, key, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.WRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Wrap Key operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key);
});
}
/**
* Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation
* is the reverse of the wrap operation. The unwrap operation supports asymmetric and symmetric keys to unwrap. This
* operation requires the {@code keys/unwrapKey} permission for non-local operations.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the
* specified encrypted key content. Possible values for asymmetric keys include:
* {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* Possible values for symmetric keys include: {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param encryptedKey The encrypted key content to unwrap.
*
* @return A {@link Mono} containing an {@link UnwrapResult} whose {@link UnwrapResult
* key} contains the unwrapped key result.
*
* @throws NullPointerException If {@code algorithm} or {@code encryptedKey} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for wrap operation.
* @throws UnsupportedOperationException If the unwrap operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) {
try {
return withContext(context -> unwrapKey(algorithm, encryptedKey, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) {
Objects.requireNonNull(algorithm, "Key wrap algorithm cannot be null.");
Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.UNWRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Unwrap Key operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key);
});
}
/**
* Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric
* and symmetric keys. This operation requires the {@code keys/sign} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest.
* Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a
* response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData
*
* @param algorithm The algorithm to use for signing.
* @param data The content from which signature is to be created.
*
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
*
* @throws NullPointerException If {@code algorithm} or {@code data} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for signing.
* @throws UnsupportedOperationException If the sign operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) {
try {
return withContext(context -> signData(algorithm, data, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.signData(algorithm, data, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Sign Operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key);
});
}
/**
* Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric
* keys and asymmetric keys. In case of asymmetric keys public portion of the key is used to verify the signature.
* This operation requires the {@code keys/verify} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData
*
* @param algorithm The algorithm to use for signing.
* @param data The raw content against which signature is to be verified.
* @param signature The signature to be verified.
*
* @return A {@link Mono} containing a {@link VerifyResult}
* {@link VerifyResult
*
* @throws NullPointerException If {@code algorithm}, {@code data} or {@code signature} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for verifying.
* @throws UnsupportedOperationException If the verify operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) {
try {
return withContext(context -> verifyData(algorithm, data, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verifyData(algorithm, data, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key);
});
}
private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) {
return operations.contains(keyOperation);
}
private Mono<Boolean> ensureValidKeyAvailable() {
boolean keyNotAvailable = (key == null && keyCollection != null);
boolean keyNotValid = (key != null && !key.isValid());
if (keyNotAvailable || keyNotValid) {
if (keyCollection.equals(SECRETS_COLLECTION)) {
return getSecretKey().map(jsonWebKey -> {
key = (jsonWebKey);
if (key.isValid()) {
initializeCryptoClients();
return true;
} else {
return false;
}
});
} else {
return getKey().map(keyVaultKey -> {
key = (keyVaultKey.getKey());
if (key.isValid()) {
initializeCryptoClients();
return true;
} else {
return false;
}
});
}
} else {
return Mono.defer(() -> Mono.just(true));
}
}
CryptographyServiceClient getCryptographyServiceClient() {
return cryptographyServiceClient;
}
void setCryptographyServiceClient(CryptographyServiceClient serviceClient) {
this.cryptographyServiceClient = serviceClient;
}
} | class CryptographyAsyncClient {
static final String KEY_VAULT_SCOPE = "https:
static final String SECRETS_COLLECTION = "secrets";
static final String KEYVAULT_TRACING_NAMESPACE_VALUE = "Microsoft.KeyVault";
JsonWebKey key;
private final ClientLogger logger = new ClientLogger(CryptographyAsyncClient.class);
private final CryptographyService service;
private final HttpPipeline pipeline;
private final String keyId;
private CryptographyServiceClient cryptographyServiceClient;
private LocalKeyCryptographyClient localKeyCryptographyClient;
private String keyCollection;
/**
* Creates a {@link CryptographyAsyncClient} that uses a given {@link HttpPipeline pipeline} to service requests.
*
* @param keyId The Azure Key Vault key identifier to use for cryptography operations.
* @param pipeline {@link HttpPipeline} that the HTTP requests and responses flow through.
* @param version {@link CryptographyServiceVersion} of the service to be used when making requests.
*/
CryptographyAsyncClient(String keyId, HttpPipeline pipeline, CryptographyServiceVersion version) {
unpackAndValidateId(keyId);
this.keyId = keyId;
this.pipeline = pipeline;
this.service = RestProxy.create(CryptographyService.class, pipeline);
this.cryptographyServiceClient = new CryptographyServiceClient(keyId, service, version);
this.key = null;
}
/**
* Creates a {@link CryptographyAsyncClient} that uses a {@link JsonWebKey} to perform local cryptography
* operations.
*
* @param jsonWebKey The {@link JsonWebKey} to use for local cryptography operations.
*/
CryptographyAsyncClient(JsonWebKey jsonWebKey) {
Objects.requireNonNull(jsonWebKey, "The JSON Web Key is required.");
if (!jsonWebKey.isValid()) {
throw new IllegalArgumentException("The JSON Web Key is not valid.");
}
if (jsonWebKey.getKeyOps() == null) {
throw new IllegalArgumentException("The JSON Web Key's key operations property is not configured.");
}
if (jsonWebKey.getKeyType() == null) {
throw new IllegalArgumentException("The JSON Web Key's key type property is not configured.");
}
this.key = jsonWebKey;
this.keyId = jsonWebKey.getId();
this.pipeline = null;
this.service = null;
this.cryptographyServiceClient = null;
initializeCryptoClients();
}
private void initializeCryptoClients() {
if (localKeyCryptographyClient != null) {
return;
}
if (key.getKeyType().equals(RSA) || key.getKeyType().equals(RSA_HSM)) {
this.localKeyCryptographyClient = new RsaKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else if (key.getKeyType().equals(EC) || key.getKeyType().equals(EC_HSM)) {
this.localKeyCryptographyClient = new EcKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else if (key.getKeyType().equals(OCT) || key.getKeyType().equals(OCT_HSM)) {
this.localKeyCryptographyClient = new AesKeyCryptographyClient(this.key, this.cryptographyServiceClient);
} else {
throw logger.logExceptionAsError(new IllegalArgumentException(String.format(
"The JSON Web Key type: %s is not supported.", this.key.getKeyType().toString())));
}
}
/**
* Gets the {@link HttpPipeline} powering this client.
*
* @return The pipeline.
*/
HttpPipeline getHttpPipeline() {
return this.pipeline;
}
Mono<String> getKeyId() {
return Mono.defer(() -> Mono.just(this.keyId));
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission for non-local operations.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.getKey}
*
* @return A {@link Mono} containing the requested {@link KeyVaultKey key}.
*
* @throws ResourceNotFoundException When the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<KeyVaultKey> getKey() {
try {
return getKeyWithResponse().flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Gets the public part of the configured key. The get key operation is applicable to all key types and it requires
* the {@code keys/get} permission for non-local operations.
*
* <p><strong>Code Samples</strong></p>
* <p>Gets the configured key in the client. Subscribes to the call asynchronously and prints out the returned key
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.getKeyWithResponse}
*
* @return A {@link Mono} containing a {@link Response} whose {@link Response
* requested {@link KeyVaultKey key}.
*
* @throws ResourceNotFoundException When the configured key doesn't exist in the key vault.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<KeyVaultKey>> getKeyWithResponse() {
try {
return withContext(this::getKeyWithResponse);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<Response<KeyVaultKey>> getKeyWithResponse(Context context) {
if (cryptographyServiceClient != null) {
return cryptographyServiceClient.getKey(context);
} else {
throw logger.logExceptionAsError(new UnsupportedOperationException(
"Operation not supported when in operating local-only mode"));
}
}
Mono<JsonWebKey> getSecretKey() {
try {
return withContext(context -> cryptographyServiceClient.getSecretKey(context))
.flatMap(FluxUtil::toMono);
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports
* a single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys, the
* public portion of the key is used for encryption. This operation requires the {@code keys/encrypt} permission
* for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the
* specified {@code plaintext}. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param algorithm The algorithm to be used for encryption.
* @param plaintext The content to be encrypted.
*
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
*
* @throws NullPointerException If {@code algorithm} or {@code plaintext} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for encryption.
* @throws UnsupportedOperationException If the encrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptionAlgorithm algorithm, byte[] plaintext) {
return encrypt(new EncryptParameters(algorithm, plaintext, null, null), null);
}
/**
* Encrypts an arbitrary sequence of bytes using the configured key. Note that the encrypt operation only supports
* a single block of data, the size of which is dependent on the target key and the encryption algorithm to be used.
* The encrypt operation is supported for both symmetric keys and asymmetric keys. In case of asymmetric keys, the
* public portion of the key is used for encryption. This operation requires the {@code keys/encrypt} permission
* for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for encrypting the
* specified {@code plaintext}. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Encrypts the content. Subscribes to the call asynchronously and prints out the encrypted content details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.encrypt
*
* @param encryptParameters The parameters to use in the encryption operation.
*
* @return A {@link Mono} containing a {@link EncryptResult} whose {@link EncryptResult
* contains the encrypted content.
*
* @throws NullPointerException If {@code algorithm} or {@code plaintext} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for encryption.
* @throws UnsupportedOperationException If the encrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EncryptResult> encrypt(EncryptParameters encryptParameters) {
Objects.requireNonNull(encryptParameters, "'encryptParameters' cannot be null.");
try {
return withContext(context -> encrypt(encryptParameters, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<EncryptResult> encrypt(EncryptParameters encryptParameters, Context context) {
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.encrypt(encryptParameters, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.ENCRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Encrypt operation is missing permission/not supported for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.encryptAsync(encryptParameters, context, key);
});
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm
* to be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires
* the {@code keys/decrypt} permission for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting
* the specified encrypted content. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param algorithm The algorithm to be used for decryption.
* @param ciphertext The content to be decrypted.
*
* @return A {@link Mono} containing the decrypted blob.
*
* @throws NullPointerException If {@code algorithm} or {@code ciphertext} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for decryption.
* @throws UnsupportedOperationException If the decrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(EncryptionAlgorithm algorithm, byte[] ciphertext) {
return decrypt(new DecryptParameters(algorithm, ciphertext, null, null, null));
}
/**
* Decrypts a single block of encrypted data using the configured key and specified algorithm. Note that only a
* single block of data may be decrypted, the size of this block is dependent on the target key and the algorithm
* to be used. The decrypt operation is supported for both asymmetric and symmetric keys. This operation requires
* the {@code keys/decrypt} permission for non-local operations.
*
* <p>The {@link EncryptionAlgorithm encryption algorithm} indicates the type of algorithm to use for decrypting
* the specified encrypted content. Possible values for asymmetric keys include:
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Decrypts the encrypted content. Subscribes to the call asynchronously and prints out the decrypted content
* details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.decrypt
*
* @param decryptParameters The parameters to use in the decryption operation.
*
* @return A {@link Mono} containing the decrypted blob.
*
* @throws NullPointerException If {@code algorithm} or {@code ciphertext} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for decryption.
* @throws UnsupportedOperationException If the decrypt operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<DecryptResult> decrypt(DecryptParameters decryptParameters) {
Objects.requireNonNull(decryptParameters, "'decryptParameters' cannot be null.");
try {
return withContext(context -> decrypt(decryptParameters, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<DecryptResult> decrypt(DecryptParameters decryptParameters, Context context) {
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.decrypt(decryptParameters, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.DECRYPT)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Decrypt operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.decryptAsync(decryptParameters, context, key);
});
}
/**
* Creates a signature from a digest using the configured key. The sign operation supports both asymmetric and
* symmetric keys. This operation requires the {@code keys/sign} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to create the
* signature from the digest. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Sings the digest. Subscribes to the call asynchronously and prints out the signature details when a response
* has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.sign
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature is to be created.
*
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
*
* @throws NullPointerException If {@code algorithm} or {@code digest} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for signing.
* @throws UnsupportedOperationException If the sign operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest) {
try {
return withContext(context -> sign(algorithm, digest, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> sign(SignatureAlgorithm algorithm, byte[] digest, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.sign(algorithm, digest, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Sign operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.signAsync(algorithm, digest, context, key);
});
}
/**
* Verifies a signature using the configured key. The verify operation supports both symmetric keys and asymmetric
* keys. In case of asymmetric keys public portion of the key is used to verify the signature. This operation
* requires the {@code keys/verify} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include: {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the specified digest. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verify
*
* @param algorithm The algorithm to use for signing.
* @param digest The content from which signature was created.
* @param signature The signature to be verified.
*
* @return A {@link Mono} containing a {@link VerifyResult}
* {@link VerifyResult
*
* @throws NullPointerException If {@code algorithm}, {@code digest} or {@code signature} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for verifying.
* @throws UnsupportedOperationException If the verify operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature) {
try {
return withContext(context -> verify(algorithm, digest, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verify(SignatureAlgorithm algorithm, byte[] digest, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(digest, "Digest content cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verify(algorithm, digest, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify operation is not allowed for key with id: %s", key.getId()))));
}
return localKeyCryptographyClient.verifyAsync(algorithm, digest, signature, context, key);
});
}
/**
* Wraps a symmetric key using the configured key. The wrap operation supports wrapping a symmetric key with both
* symmetric and asymmetric keys. This operation requires the {@code keys/wrapKey} permission for non-local
* operations.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for wrapping the specified
* key content. Possible values include:
* {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* Possible values for symmetric keys include: {@link EncryptionAlgorithm
* {@link EncryptionAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Wraps the key content. Subscribes to the call asynchronously and prints out the wrapped key details when a
* response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.wrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param key The key content to be wrapped.
*
* @return A {@link Mono} containing a {@link WrapResult} whose {@link WrapResult
* contains the wrapped key result.
*
* @throws NullPointerException If {@code algorithm} or {@code key} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for wrap operation.
* @throws UnsupportedOperationException If the wrap operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key) {
try {
return withContext(context -> wrapKey(algorithm, key, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<WrapResult> wrapKey(KeyWrapAlgorithm algorithm, byte[] key, Context context) {
Objects.requireNonNull(algorithm, "Key wrap algorithm cannot be null.");
Objects.requireNonNull(key, "Key content to be wrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.wrapKey(algorithm, key, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.WRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Wrap Key operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.wrapKeyAsync(algorithm, key, context, this.key);
});
}
/**
* Unwraps a symmetric key using the configured key that was initially used for wrapping that key. This operation
* is the reverse of the wrap operation. The unwrap operation supports asymmetric and symmetric keys to unwrap. This
* operation requires the {@code keys/unwrapKey} permission for non-local operations.
*
* <p>The {@link KeyWrapAlgorithm wrap algorithm} indicates the type of algorithm to use for unwrapping the
* specified encrypted key content. Possible values for asymmetric keys include:
* {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* Possible values for symmetric keys include: {@link KeyWrapAlgorithm
* {@link KeyWrapAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Unwraps the key content. Subscribes to the call asynchronously and prints out the unwrapped key details when
* a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.unwrapKey
*
* @param algorithm The encryption algorithm to use for wrapping the key.
* @param encryptedKey The encrypted key content to unwrap.
*
* @return A {@link Mono} containing an {@link UnwrapResult} whose {@link UnwrapResult
* key} contains the unwrapped key result.
*
* @throws NullPointerException If {@code algorithm} or {@code encryptedKey} are {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for wrap operation.
* @throws UnsupportedOperationException If the unwrap operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey) {
try {
return withContext(context -> unwrapKey(algorithm, encryptedKey, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<UnwrapResult> unwrapKey(KeyWrapAlgorithm algorithm, byte[] encryptedKey, Context context) {
Objects.requireNonNull(algorithm, "Key wrap algorithm cannot be null.");
Objects.requireNonNull(encryptedKey, "Encrypted key content to be unwrapped cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.unwrapKey(algorithm, encryptedKey, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.UNWRAP_KEY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Unwrap Key operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.unwrapKeyAsync(algorithm, encryptedKey, context, key);
});
}
/**
* Creates a signature from the raw data using the configured key. The sign data operation supports both asymmetric
* and symmetric keys. This operation requires the {@code keys/sign} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to sign the digest.
* Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Signs the raw data. Subscribes to the call asynchronously and prints out the signature details when a
* response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.signData
*
* @param algorithm The algorithm to use for signing.
* @param data The content from which signature is to be created.
*
* @return A {@link Mono} containing a {@link SignResult} whose {@link SignResult
* the created signature.
*
* @throws NullPointerException If {@code algorithm} or {@code data} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for signing.
* @throws UnsupportedOperationException If the sign operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data) {
try {
return withContext(context -> signData(algorithm, data, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<SignResult> signData(SignatureAlgorithm algorithm, byte[] data, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data to be signed cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.signData(algorithm, data, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.SIGN)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Sign Operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.signDataAsync(algorithm, data, context, key);
});
}
/**
* Verifies a signature against the raw data using the configured key. The verify operation supports both symmetric
* keys and asymmetric keys. In case of asymmetric keys public portion of the key is used to verify the signature.
* This operation requires the {@code keys/verify} permission for non-local operations.
*
* <p>The {@link SignatureAlgorithm signature algorithm} indicates the type of algorithm to use to verify the
* signature. Possible values include:
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
* {@link SignatureAlgorithm
*
* <p><strong>Code Samples</strong></p>
* <p>Verifies the signature against the raw data. Subscribes to the call asynchronously and prints out the
* verification details when a response has been received.</p>
*
* {@codesnippet com.azure.security.keyvault.keys.cryptography.CryptographyAsyncClient.verifyData
*
* @param algorithm The algorithm to use for signing.
* @param data The raw content against which signature is to be verified.
* @param signature The signature to be verified.
*
* @return A {@link Mono} containing a {@link VerifyResult}
* {@link VerifyResult
*
* @throws NullPointerException If {@code algorithm}, {@code data} or {@code signature} is {@code null}.
* @throws ResourceNotFoundException If the key cannot be found for verifying.
* @throws UnsupportedOperationException If the verify operation is not supported or configured on the key.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature) {
try {
return withContext(context -> verifyData(algorithm, data, signature, context));
} catch (RuntimeException ex) {
return monoError(logger, ex);
}
}
Mono<VerifyResult> verifyData(SignatureAlgorithm algorithm, byte[] data, byte[] signature, Context context) {
Objects.requireNonNull(algorithm, "Signature algorithm cannot be null.");
Objects.requireNonNull(data, "Data cannot be null.");
Objects.requireNonNull(signature, "Signature to be verified cannot be null.");
return ensureValidKeyAvailable().flatMap(available -> {
if (!available) {
return cryptographyServiceClient.verifyData(algorithm, data, signature, context);
}
if (!checkKeyPermissions(this.key.getKeyOps(), KeyOperation.VERIFY)) {
return Mono.error(logger.logExceptionAsError(new UnsupportedOperationException(String.format(
"Verify operation is not allowed for key with id: %s", this.key.getId()))));
}
return localKeyCryptographyClient.verifyDataAsync(algorithm, data, signature, context, key);
});
}
private boolean checkKeyPermissions(List<KeyOperation> operations, KeyOperation keyOperation) {
return operations.contains(keyOperation);
}
private Mono<Boolean> ensureValidKeyAvailable() {
boolean keyNotAvailable = (key == null && keyCollection != null);
boolean keyNotValid = (key != null && !key.isValid());
if (keyNotAvailable || keyNotValid) {
if (keyCollection.equals(SECRETS_COLLECTION)) {
return getSecretKey().map(jsonWebKey -> {
key = (jsonWebKey);
if (key.isValid()) {
initializeCryptoClients();
return true;
} else {
return false;
}
});
} else {
return getKey().map(keyVaultKey -> {
key = (keyVaultKey.getKey());
if (key.isValid()) {
initializeCryptoClients();
return true;
} else {
return false;
}
});
}
} else {
return Mono.defer(() -> Mono.just(true));
}
}
CryptographyServiceClient getCryptographyServiceClient() {
return cryptographyServiceClient;
}
void setCryptographyServiceClient(CryptographyServiceClient serviceClient) {
this.cryptographyServiceClient = serviceClient;
}
} |
We should probably also do the other cleanup, like removing DNS entries? | public LockedApplication storeWithUpdatedConfig(LockedApplication application, ApplicationPackage applicationPackage) {
applicationPackageValidator.validate(application.get(), applicationPackage, clock.instant());
application = application.with(applicationPackage.deploymentSpec());
application = application.with(applicationPackage.validationOverrides());
var existingInstances = application.get().instances().keySet();
var declaredInstances = applicationPackage.deploymentSpec().instanceNames();
for (var name : declaredInstances)
if ( ! existingInstances.contains(name))
application = withNewInstance(application, application.get().id().instance(name));
for (InstanceName name : existingInstances) {
application = withoutDeletedDeployments(application, name);
}
for (InstanceName instance : declaredInstances)
if (applicationPackage.deploymentSpec().requireInstance(instance).concerns(Environment.prod))
application = controller.routing().assignRotations(application, instance);
controller.jobController().deploymentStatus(application.get());
for (var name : existingInstances)
if ( ! declaredInstances.contains(name))
controller.notificationsDb().removeNotifications(NotificationSource.from(application.get().id().instance(name)));
store(application);
return application;
} | controller.notificationsDb().removeNotifications(NotificationSource.from(application.get().id().instance(name))); | public LockedApplication storeWithUpdatedConfig(LockedApplication application, ApplicationPackage applicationPackage) {
applicationPackageValidator.validate(application.get(), applicationPackage, clock.instant());
application = application.with(applicationPackage.deploymentSpec());
application = application.with(applicationPackage.validationOverrides());
var existingInstances = application.get().instances().keySet();
var declaredInstances = applicationPackage.deploymentSpec().instanceNames();
for (var name : declaredInstances)
if ( ! existingInstances.contains(name))
application = withNewInstance(application, application.get().id().instance(name));
for (InstanceName name : existingInstances) {
application = withoutDeletedDeployments(application, name);
}
for (InstanceName instance : declaredInstances)
if (applicationPackage.deploymentSpec().requireInstance(instance).concerns(Environment.prod))
application = controller.routing().assignRotations(application, instance);
controller.jobController().deploymentStatus(application.get());
for (var name : existingInstances)
if ( ! declaredInstances.contains(name))
controller.notificationsDb().removeNotifications(NotificationSource.from(application.get().id().instance(name)));
store(application);
return application;
} | class ApplicationController {
private static final Logger log = Logger.getLogger(ApplicationController.class.getName());
/** The controller owning this */
private final Controller controller;
/** For persistence */
private final CuratorDb curator;
private final ArtifactRepository artifactRepository;
private final ApplicationStore applicationStore;
private final AccessControl accessControl;
private final ConfigServer configServer;
private final Clock clock;
private final DeploymentTrigger deploymentTrigger;
private final ApplicationPackageValidator applicationPackageValidator;
private final EndpointCertificates endpointCertificates;
private final StringFlag dockerImageRepoFlag;
private final BillingController billingController;
ApplicationController(Controller controller, CuratorDb curator, AccessControl accessControl, Clock clock,
FlagSource flagSource, BillingController billingController) {
this.controller = controller;
this.curator = curator;
this.accessControl = accessControl;
this.configServer = controller.serviceRegistry().configServer();
this.clock = clock;
this.artifactRepository = controller.serviceRegistry().artifactRepository();
this.applicationStore = controller.serviceRegistry().applicationStore();
this.dockerImageRepoFlag = PermanentFlags.DOCKER_IMAGE_REPO.bindTo(flagSource);
this.billingController = billingController;
deploymentTrigger = new DeploymentTrigger(controller, clock);
applicationPackageValidator = new ApplicationPackageValidator(controller);
endpointCertificates = new EndpointCertificates(controller,
controller.serviceRegistry().endpointCertificateProvider(),
controller.serviceRegistry().endpointCertificateValidator());
Once.after(Duration.ofMinutes(1), () -> {
Instant start = clock.instant();
int count = 0;
for (TenantAndApplicationId id : curator.readApplicationIds()) {
lockApplicationIfPresent(id, application -> {
for (InstanceName instance : application.get().deploymentSpec().instanceNames())
if (!application.get().instances().containsKey(instance))
application = withNewInstance(application, id.instance(instance));
store(application);
});
count++;
}
log.log(Level.INFO, String.format("Wrote %d applications in %s", count,
Duration.between(start, clock.instant())));
});
}
/** Returns the application with the given id, or null if it is not present */
public Optional<Application> getApplication(TenantAndApplicationId id) {
return curator.readApplication(id);
}
/** Returns the instance with the given id, or null if it is not present */
public Optional<Instance> getInstance(ApplicationId id) {
return getApplication(TenantAndApplicationId.from(id)).flatMap(application -> application.get(id.instance()));
}
/**
* Triggers reindexing for the given document types in the given clusters, for the given application.
*
* If no clusters are given, reindexing is triggered for the entire application; otherwise
* if no documents types are given, reindexing is triggered for all given clusters; otherwise
* reindexing is triggered for the cartesian product of the given clusters and document types.
*/
public void reindex(ApplicationId id, ZoneId zoneId, List<String> clusterNames, List<String> documentTypes, boolean indexedOnly) {
configServer.reindex(new DeploymentId(id, zoneId), clusterNames, documentTypes, indexedOnly);
}
/** Returns the reindexing status for the given application in the given zone. */
public ApplicationReindexing applicationReindexing(ApplicationId id, ZoneId zoneId) {
return configServer.getReindexing(new DeploymentId(id, zoneId));
}
/** Enables reindexing for the given application in the given zone. */
public void enableReindexing(ApplicationId id, ZoneId zoneId) {
configServer.enableReindexing(new DeploymentId(id, zoneId));
}
/** Disables reindexing for the given application in the given zone. */
public void disableReindexing(ApplicationId id, ZoneId zoneId) {
configServer.disableReindexing(new DeploymentId(id, zoneId));
}
/**
* Returns the application with the given id
*
* @throws IllegalArgumentException if it does not exist
*/
public Application requireApplication(TenantAndApplicationId id) {
return getApplication(id).orElseThrow(() -> new IllegalArgumentException(id + " not found"));
}
/**
* Returns the instance with the given id
*
* @throws IllegalArgumentException if it does not exist
*/
public Instance requireInstance(ApplicationId id) {
return getInstance(id).orElseThrow(() -> new IllegalArgumentException(id + " not found"));
}
/** Returns a snapshot of all applications */
public List<Application> asList() {
return curator.readApplications(false);
}
/**
* Returns a snapshot of all readable applications. Unlike {@link ApplicationController
* applications that cannot currently be read (e.g. due to serialization issues) and may return an incomplete
* snapshot.
*
* This should only be used in cases where acting on a subset of applications is better than none.
*/
public List<Application> readable() {
return curator.readApplications(true);
}
/** Returns the ID of all known applications. */
public List<TenantAndApplicationId> idList() {
return curator.readApplicationIds();
}
/** Returns a snapshot of all applications of a tenant */
public List<Application> asList(TenantName tenant) {
return curator.readApplications(tenant);
}
public ArtifactRepository artifacts() { return artifactRepository; }
public ApplicationStore applicationStore() { return applicationStore; }
/** Returns all currently reachable content clusters among the given deployments. */
public Map<ZoneId, List<String>> reachableContentClustersByZone(Collection<DeploymentId> ids) {
Map<ZoneId, List<String>> clusters = new TreeMap<>(Comparator.comparing(ZoneId::value));
for (DeploymentId id : ids)
if (isHealthy(id))
clusters.put(id.zoneId(), List.copyOf(configServer.getContentClusters(id)));
return Collections.unmodifiableMap(clusters);
}
/** Reads the oldest installed platform for the given application and zone from job history, or a node repo. */
private Optional<Version> oldestInstalledPlatform(JobStatus job) {
Version oldest = null;
for (Run run : job.runs().descendingMap().values()) {
Version version = run.versions().targetPlatform();
if (oldest == null || version.isBefore(oldest))
oldest = version;
if (run.status() == RunStatus.success)
return Optional.of(oldest);
}
return oldestInstalledPlatform(job.id());
}
/** Reads the oldest installed platform for the given application and zone from the node repo of that zone. */
private Optional<Version> oldestInstalledPlatform(JobId job) {
return configServer.nodeRepository().list(job.type().zone(controller.system()),
job.application(),
EnumSet.of(active, reserved))
.stream()
.map(Node::currentVersion)
.filter(version -> ! version.isEmpty())
.min(naturalOrder());
}
/** Returns the oldest Vespa version installed on any active or reserved production node for the given application. */
public Version oldestInstalledPlatform(TenantAndApplicationId id) {
return controller.jobController().deploymentStatus(requireApplication(id)).jobs()
.production().asList().stream()
.map(this::oldestInstalledPlatform)
.flatMap(Optional::stream)
.min(naturalOrder())
.orElse(controller.readSystemVersion());
}
/**
* Creates a new application for an existing tenant.
*
* @throws IllegalArgumentException if the application already exists
*/
public Application createApplication(TenantAndApplicationId id, Credentials credentials) {
try (Lock lock = lock(id)) {
if (getApplication(id).isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': Application already exists");
if (getApplication(dashToUnderscore(id)).isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': Application " + dashToUnderscore(id) + " already exists");
com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId.validate(id.application().value());
if (controller.tenants().get(id.tenant()).isEmpty())
throw new IllegalArgumentException("Could not create '" + id + "': This tenant does not exist");
accessControl.createApplication(id, credentials);
LockedApplication locked = new LockedApplication(new Application(id, clock.instant()), lock);
store(locked);
log.info("Created " + locked);
return locked.get();
}
}
/**
* Creates a new instance for an existing application.
*
* @throws IllegalArgumentException if the instance already exists, or has an invalid instance name.
*/
public void createInstance(ApplicationId id) {
lockApplicationOrThrow(TenantAndApplicationId.from(id), application -> {
store(withNewInstance(application, id));
});
}
/** Fetches the requested application package from the artifact store(s). */
public ApplicationPackage getApplicationPackage(ApplicationId id, ApplicationVersion version) {
return new ApplicationPackage(applicationStore.get(id.tenant(), id.application(), version));
}
/** Returns given application with a new instance */
public LockedApplication withNewInstance(LockedApplication application, ApplicationId instance) {
if (instance.instance().isTester())
throw new IllegalArgumentException("'" + instance + "' is a tester application!");
InstanceId.validate(instance.instance().value());
if (getInstance(instance).isPresent())
throw new IllegalArgumentException("Could not create '" + instance + "': Instance already exists");
if (getInstance(dashToUnderscore(instance)).isPresent())
throw new IllegalArgumentException("Could not create '" + instance + "': Instance " + dashToUnderscore(instance) + " already exists");
log.info("Created " + instance);
return application.withNewInstance(instance.instance());
}
/** Deploys an application package for an existing application instance. */
public ActivateResult deploy(JobId job, boolean deploySourceVersions) {
if (job.application().instance().isTester())
throw new IllegalArgumentException("'" + job.application() + "' is a tester application!");
TenantAndApplicationId applicationId = TenantAndApplicationId.from(job.application());
ZoneId zone = job.type().zone(controller.system());
try (Lock deploymentLock = lockForDeployment(job.application(), zone)) {
Set<ContainerEndpoint> containerEndpoints;
Optional<EndpointCertificateMetadata> endpointCertificateMetadata;
Optional<TenantRoles> tenantRoles = Optional.empty();
Run run = controller.jobController().last(job)
.orElseThrow(() -> new IllegalStateException("No known run of '" + job + "'"));
if (run.hasEnded())
throw new IllegalStateException("No deployment expected for " + job + " now, as no job is running");
Version platform = run.versions().sourcePlatform().filter(__ -> deploySourceVersions).orElse(run.versions().targetPlatform());
ApplicationVersion revision = run.versions().sourceApplication().filter(__ -> deploySourceVersions).orElse(run.versions().targetApplication());
ApplicationPackage applicationPackage = getApplicationPackage(job.application(), zone, revision);
try (Lock lock = lock(applicationId)) {
LockedApplication application = new LockedApplication(requireApplication(applicationId), lock);
Instance instance = application.get().require(job.application().instance());
rejectOldChange(instance, platform, revision, job, zone);
if ( ! applicationPackage.trustedCertificates().isEmpty()
&& run.testerCertificate().isPresent())
applicationPackage = applicationPackage.withTrustedCertificate(run.testerCertificate().get());
endpointCertificateMetadata = endpointCertificates.getMetadata(instance, zone, applicationPackage.deploymentSpec().instance(instance.name()));
containerEndpoints = controller.routing().containerEndpointsOf(application.get(), job.application().instance(), zone);
}
ActivateResult result = deploy(job.application(), applicationPackage, zone, platform, containerEndpoints, endpointCertificateMetadata, tenantRoles);
var quotaUsage = deploymentQuotaUsage(zone, job.application());
NotificationSource source = zone.environment().isManuallyDeployed() ?
NotificationSource.from(new DeploymentId(job.application(), zone)) : NotificationSource.from(applicationId);
List<String> warnings = Optional.ofNullable(result.prepareResponse().log)
.map(logs -> logs.stream()
.filter(log -> log.applicationPackage)
.filter(log -> LogLevel.parse(log.level).intValue() >= Level.WARNING.intValue())
.map(log -> log.message)
.sorted()
.distinct()
.collect(Collectors.toList()))
.orElseGet(List::of);
if (warnings.isEmpty()) controller.notificationsDb().removeNotification(source, Notification.Type.applicationPackage);
else controller.notificationsDb().setNotification(source, Notification.Type.applicationPackage, Notification.Level.warning, warnings);
lockApplicationOrThrow(applicationId, application ->
store(application.with(job.application().instance(),
instance -> instance.withNewDeployment(zone, revision, platform,
clock.instant(), warningsFrom(result),
quotaUsage))));
return result;
}
}
/** Stores the deployment spec and validation overrides from the application package, and runs cleanup. */
/** Deploy a system application to given zone */
public void deploy(SystemApplication application, ZoneId zone, Version version) {
if (application.hasApplicationPackage()) {
deploySystemApplicationPackage(application, zone, version);
} else {
configServer.nodeRepository().upgrade(zone, application.nodeType(), version);
}
}
/** Deploy a system application to given zone */
public ActivateResult deploySystemApplicationPackage(SystemApplication application, ZoneId zone, Version version) {
if (application.hasApplicationPackage()) {
ApplicationPackage applicationPackage = new ApplicationPackage(
artifactRepository.getSystemApplicationPackage(application.id(), zone, version)
);
return deploy(application.id(), applicationPackage, zone, version, Set.of(), /* No application cert */ Optional.empty(), Optional.empty());
} else {
throw new RuntimeException("This system application does not have an application package: " + application.id().toShortString());
}
}
/** Deploys the given tester application to the given zone. */
public ActivateResult deployTester(TesterId tester, ApplicationPackage applicationPackage, ZoneId zone, Version platform) {
return deploy(tester.id(), applicationPackage, zone, platform, Set.of(), /* No application cert for tester*/ Optional.empty(), Optional.empty());
}
private ActivateResult deploy(ApplicationId application, ApplicationPackage applicationPackage,
ZoneId zone, Version platform, Set<ContainerEndpoint> endpoints,
Optional<EndpointCertificateMetadata> endpointCertificateMetadata,
Optional<TenantRoles> tenantRoles) {
try {
Optional<DockerImage> dockerImageRepo = Optional.ofNullable(
dockerImageRepoFlag
.with(FetchVector.Dimension.ZONE_ID, zone.value())
.with(FetchVector.Dimension.APPLICATION_ID, application.serializedForm())
.value())
.filter(s -> !s.isBlank())
.map(DockerImage::fromString);
Optional<AthenzDomain> domain = controller.tenants().get(application.tenant())
.filter(tenant-> tenant instanceof AthenzTenant)
.map(tenant -> ((AthenzTenant)tenant).domain());
if (zone.environment().isManuallyDeployed())
controller.applications().applicationStore().putMeta(new DeploymentId(application, zone),
clock.instant(),
applicationPackage.metaDataZip());
Quota deploymentQuota = DeploymentQuotaCalculator.calculate(billingController.getQuota(application.tenant()),
asList(application.tenant()), application, zone, applicationPackage.deploymentSpec());
List<TenantSecretStore> tenantSecretStores = controller.tenants()
.get(application.tenant())
.filter(tenant-> tenant instanceof CloudTenant)
.map(tenant -> ((CloudTenant) tenant).tenantSecretStores())
.orElse(List.of());
List<X509Certificate> operatorCertificates = controller.supportAccess().activeGrantsFor(new DeploymentId(application, zone)).stream()
.map(SupportAccessGrant::certificate)
.collect(toList());
ConfigServer.PreparedApplication preparedApplication =
configServer.deploy(new DeploymentData(application, zone, applicationPackage.zippedContent(), platform,
endpoints, endpointCertificateMetadata, dockerImageRepo, domain,
tenantRoles, deploymentQuota, tenantSecretStores, operatorCertificates));
return new ActivateResult(new RevisionId(applicationPackage.hash()), preparedApplication.prepareResponse(),
applicationPackage.zippedContent().length);
} finally {
controller.routing().policies().refresh(application, applicationPackage.deploymentSpec(), zone);
}
}
private LockedApplication withoutDeletedDeployments(LockedApplication application, InstanceName instance) {
DeploymentSpec deploymentSpec = application.get().deploymentSpec();
List<ZoneId> deploymentsToRemove = application.get().require(instance).productionDeployments().values().stream()
.map(Deployment::zone)
.filter(zone -> deploymentSpec.instance(instance).isEmpty()
|| ! deploymentSpec.requireInstance(instance).deploysTo(zone.environment(),
zone.region()))
.collect(toList());
if (deploymentsToRemove.isEmpty())
return application;
if ( ! application.get().validationOverrides().allows(ValidationId.deploymentRemoval, clock.instant()))
throw new IllegalArgumentException(ValidationId.deploymentRemoval.value() + ": " + application.get().require(instance) +
" is deployed in " +
deploymentsToRemove.stream()
.map(zone -> zone.region().value())
.collect(joining(", ")) +
", but does not include " +
(deploymentsToRemove.size() > 1 ? "these zones" : "this zone") +
" in deployment.xml. " +
ValidationOverrides.toAllowMessage(ValidationId.deploymentRemoval));
boolean removeInstance = ! deploymentSpec.instanceNames().contains(instance)
&& application.get().require(instance).deployments().size() == deploymentsToRemove.size();
for (ZoneId zone : deploymentsToRemove)
application = deactivate(application, instance, zone);
if (removeInstance)
application = application.without(instance);
return application;
}
/**
* Deletes the the given application. All known instances of the applications will be deleted.
*
* @throws IllegalArgumentException if the application has deployments or the caller is not authorized
*/
public void deleteApplication(TenantAndApplicationId id, Credentials credentials) {
lockApplicationOrThrow(id, application -> {
var deployments = application.get().instances().values().stream()
.filter(instance -> ! instance.deployments().isEmpty())
.collect(toMap(instance -> instance.name(),
instance -> instance.deployments().keySet().stream()
.map(ZoneId::toString)
.collect(joining(", "))));
if ( ! deployments.isEmpty())
throw new IllegalArgumentException("Could not delete '" + application + "': It has active deployments: " + deployments);
for (Instance instance : application.get().instances().values()) {
controller.routing().removeEndpointsInDns(application.get(), instance.name());
application = application.without(instance.name());
}
applicationStore.removeAll(id.tenant(), id.application());
applicationStore.removeAllTesters(id.tenant(), id.application());
applicationStore.putMetaTombstone(id.tenant(), id.application(), clock.instant());
accessControl.deleteApplication(id, credentials);
curator.removeApplication(id);
controller.jobController().collectGarbage();
controller.notificationsDb().removeNotifications(NotificationSource.from(id));
log.info("Deleted " + id);
});
}
/**
* Deletes the the given application instance.
*
* @throws IllegalArgumentException if the application has deployments or the caller is not authorized
* @throws NotExistsException if the instance does not exist
*/
public void deleteInstance(ApplicationId instanceId) {
if (getInstance(instanceId).isEmpty())
throw new NotExistsException("Could not delete instance '" + instanceId + "': Instance not found");
lockApplicationOrThrow(TenantAndApplicationId.from(instanceId), application -> {
if ( ! application.get().require(instanceId.instance()).deployments().isEmpty())
throw new IllegalArgumentException("Could not delete '" + application + "': It has active deployments in: " +
application.get().require(instanceId.instance()).deployments().keySet().stream().map(ZoneId::toString)
.sorted().collect(joining(", ")));
if ( ! application.get().deploymentSpec().equals(DeploymentSpec.empty)
&& application.get().deploymentSpec().instanceNames().contains(instanceId.instance()))
throw new IllegalArgumentException("Can not delete '" + instanceId + "', which is specified in 'deployment.xml'; remove it there first");
controller.routing().removeEndpointsInDns(application.get(), instanceId.instance());
curator.writeApplication(application.without(instanceId.instance()).get());
controller.jobController().collectGarbage();
controller.notificationsDb().removeNotifications(NotificationSource.from(instanceId));
log.info("Deleted " + instanceId);
});
}
/**
* Replace any previous version of this application by this instance
*
* @param application a locked application to store
*/
public void store(LockedApplication application) {
curator.writeApplication(application.get());
}
/**
* Acquire a locked application to modify and store, if there is an application with the given id.
*
* @param applicationId ID of the application to lock and get.
* @param action Function which acts on the locked application.
*/
public void lockApplicationIfPresent(TenantAndApplicationId applicationId, Consumer<LockedApplication> action) {
try (Lock lock = lock(applicationId)) {
getApplication(applicationId).map(application -> new LockedApplication(application, lock)).ifPresent(action);
}
}
/**
* Acquire a locked application to modify and store, or throw an exception if no application has the given id.
*
* @param applicationId ID of the application to lock and require.
* @param action Function which acts on the locked application.
* @throws IllegalArgumentException when application does not exist.
*/
public void lockApplicationOrThrow(TenantAndApplicationId applicationId, Consumer<LockedApplication> action) {
try (Lock lock = lock(applicationId)) {
action.accept(new LockedApplication(requireApplication(applicationId), lock));
}
}
/**
* Tells config server to schedule a restart of all nodes in this deployment
*
* @param restartFilter Variables to filter which nodes to restart.
*/
public void restart(DeploymentId deploymentId, RestartFilter restartFilter) {
configServer.restart(deploymentId, restartFilter);
}
/**
* Asks the config server whether this deployment is currently healthy, i.e., serving traffic as usual.
* If this cannot be ascertained, we must assumed it is not.
*/
public boolean isHealthy(DeploymentId deploymentId) {
try {
return ! isSuspended(deploymentId);
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Failed getting suspension status of " + deploymentId + ": " + Exceptions.toMessageString(e));
return false;
}
}
/**
* Asks the config server whether this deployment is currently <i>suspended</i>:
* Not in a state where it should receive traffic.
*/
public boolean isSuspended(DeploymentId deploymentId) {
return configServer.isSuspended(deploymentId);
}
/** Sets suspension status of the given deployment in its zone. */
public void setSuspension(DeploymentId deploymentId, boolean suspend) {
configServer.setSuspension(deploymentId, suspend);
}
/** Deactivate application in the given zone */
public void deactivate(ApplicationId id, ZoneId zone) {
lockApplicationOrThrow(TenantAndApplicationId.from(id),
application -> store(deactivate(application, id.instance(), zone)));
}
/**
* Deactivates a locked application without storing it
*
* @return the application with the deployment in the given zone removed
*/
private LockedApplication deactivate(LockedApplication application, InstanceName instanceName, ZoneId zone) {
DeploymentId id = new DeploymentId(application.get().id().instance(instanceName), zone);
try {
configServer.deactivate(id);
} finally {
controller.routing().policies().refresh(application.get().id().instance(instanceName), application.get().deploymentSpec(), zone);
if (zone.environment().isManuallyDeployed())
applicationStore.putMetaTombstone(id, clock.instant());
if (!zone.environment().isTest())
controller.notificationsDb().removeNotifications(NotificationSource.from(id));
}
return application.with(instanceName, instance -> instance.withoutDeploymentIn(zone));
}
public DeploymentTrigger deploymentTrigger() { return deploymentTrigger; }
/**
* Returns a lock which provides exclusive rights to changing this application.
* Any operation which stores an application need to first acquire this lock, then read, modify
* and store the application, and finally release (close) the lock.
*/
Lock lock(TenantAndApplicationId application) {
return curator.lock(application);
}
/**
* Returns a lock which provides exclusive rights to deploying this application to the given zone.
*/
private Lock lockForDeployment(ApplicationId application, ZoneId zone) {
return curator.lockForDeployment(application, zone);
}
/**
* Verifies that the application can be deployed to the tenant, following these rules:
*
* 1. Verify that the Athenz service can be launched by the config server
* 2. If the principal is given, verify that the principal is tenant admin or admin of the tenant domain
* 3. If the principal is not given, verify that the Athenz domain of the tenant equals Athenz domain given in deployment.xml
*
* @param tenantName tenant where application should be deployed
* @param applicationPackage application package
* @param deployer principal initiating the deployment, possibly empty
*/
public void verifyApplicationIdentityConfiguration(TenantName tenantName, Optional<InstanceName> instanceName, Optional<ZoneId> zoneId, ApplicationPackage applicationPackage, Optional<Principal> deployer) {
Optional<AthenzDomain> identityDomain = applicationPackage.deploymentSpec().athenzDomain()
.map(domain -> new AthenzDomain(domain.value()));
if(identityDomain.isEmpty()) {
return;
}
if(! (accessControl instanceof AthenzFacade)) {
throw new IllegalArgumentException("Athenz domain and service specified in deployment.xml, but not supported by system.");
}
verifyAllowedLaunchAthenzService(applicationPackage.deploymentSpec());
Optional<AthenzUser> athenzUser = getUser(deployer);
if (athenzUser.isPresent()) {
var zone = zoneId.orElseThrow(() -> new IllegalArgumentException("Unable to evaluate access, no zone provided in deployment"));
var serviceToLaunch = instanceName
.flatMap(instance -> applicationPackage.deploymentSpec().instance(instance))
.flatMap(instanceSpec -> instanceSpec.athenzService(zone.environment(), zone.region()))
.or(() -> applicationPackage.deploymentSpec().athenzService())
.map(service -> new AthenzService(identityDomain.get(), service.value()));
if(serviceToLaunch.isPresent()) {
if (
! ((AthenzFacade) accessControl).canLaunch(athenzUser.get(), serviceToLaunch.get()) &&
! ((AthenzFacade) accessControl).hasTenantAdminAccess(athenzUser.get(), identityDomain.get())
) {
throw new IllegalArgumentException("User " + athenzUser.get().getFullName() + " is not allowed to launch " +
"service " + serviceToLaunch.get().getFullName() + ". " +
"Please reach out to the domain admin.");
}
} else {
throw new IllegalArgumentException("Athenz domain configured, but no service defined for deployment to " + zone.value());
}
} else {
Tenant tenant = controller.tenants().require(tenantName);
AthenzDomain tenantDomain = ((AthenzTenant) tenant).domain();
if ( ! Objects.equals(tenantDomain, identityDomain.get()))
throw new IllegalArgumentException("Athenz domain in deployment.xml: [" + identityDomain.get().getName() + "] " +
"must match tenant domain: [" + tenantDomain.getName() + "]");
}
}
private void rejectOldChange(Instance instance, Version platform, ApplicationVersion revision, JobId job, ZoneId zone) {
Deployment deployment = instance.deployments().get(zone);
if (deployment == null) return;
if (!zone.environment().isProduction()) return;
boolean platformIsOlder = platform.compareTo(deployment.version()) < 0 && !instance.change().isPinned();
boolean revisionIsOlder = revision.compareTo(deployment.applicationVersion()) < 0 &&
!(revision.isUnknown() && controller.system().isCd());
if (platformIsOlder || revisionIsOlder)
throw new IllegalArgumentException(String.format("Rejecting deployment of application %s to %s, as the requested versions (platform: %s, application: %s)" +
" are older than the currently deployed (platform: %s, application: %s).",
job.application(), zone, platform, revision, deployment.version(), deployment.applicationVersion()));
}
private TenantAndApplicationId dashToUnderscore(TenantAndApplicationId id) {
return TenantAndApplicationId.from(id.tenant().value(), id.application().value().replaceAll("-", "_"));
}
private ApplicationId dashToUnderscore(ApplicationId id) {
return dashToUnderscore(TenantAndApplicationId.from(id)).instance(id.instance());
}
private QuotaUsage deploymentQuotaUsage(ZoneId zoneId, ApplicationId applicationId) {
var application = configServer.nodeRepository().getApplication(zoneId, applicationId);
return DeploymentQuotaCalculator.calculateQuotaUsage(application);
}
private ApplicationPackage getApplicationPackage(ApplicationId application, ZoneId zone, ApplicationVersion revision) {
return new ApplicationPackage(revision.isUnknown() ? applicationStore.getDev(application, zone)
: applicationStore.get(application.tenant(), application.application(), revision));
}
/*
* Get the AthenzUser from this principal or Optional.empty if this does not represent a user.
*/
private Optional<AthenzUser> getUser(Optional<Principal> deployer) {
return deployer
.filter(AthenzPrincipal.class::isInstance)
.map(AthenzPrincipal.class::cast)
.map(AthenzPrincipal::getIdentity)
.filter(AthenzUser.class::isInstance)
.map(AthenzUser.class::cast);
}
/*
* Verifies that the configured athenz service (if any) can be launched.
*/
private void verifyAllowedLaunchAthenzService(DeploymentSpec deploymentSpec) {
deploymentSpec.athenzDomain().ifPresent(domain -> {
controller.zoneRegistry().zones().reachable().ids().forEach(zone -> {
AthenzIdentity configServerAthenzIdentity = controller.zoneRegistry().getConfigServerHttpsIdentity(zone);
deploymentSpec.athenzService().ifPresent(service -> {
verifyAthenzServiceCanBeLaunchedBy(configServerAthenzIdentity, new AthenzService(domain.value(), service.value()));
});
deploymentSpec.instances().forEach(spec -> {
spec.athenzService(zone.environment(), zone.region()).ifPresent(service -> {
verifyAthenzServiceCanBeLaunchedBy(configServerAthenzIdentity, new AthenzService(domain.value(), service.value()));
});
});
});
});
}
private void verifyAthenzServiceCanBeLaunchedBy(AthenzIdentity configServerAthenzIdentity, AthenzService athenzService) {
if ( ! ((AthenzFacade) accessControl).canLaunch(configServerAthenzIdentity, athenzService))
throw new IllegalArgumentException("Not allowed to launch Athenz service " + athenzService.getFullName());
}
/** Returns the latest known version within the given major. */
public Optional<Version> lastCompatibleVersion(int targetMajorVersion) {
return controller.readVersionStatus().versions().stream()
.map(VespaVersion::versionNumber)
.filter(version -> version.getMajor() == targetMajorVersion)
.max(naturalOrder());
}
/** Extract deployment warnings metric from deployment result */
private static Map<DeploymentMetrics.Warning, Integer> warningsFrom(ActivateResult result) {
if (result.prepareResponse().log == null) return Map.of();
Map<DeploymentMetrics.Warning, Integer> warnings = new HashMap<>();
for (Log log : result.prepareResponse().log) {
if (!"warn".equalsIgnoreCase(log.level) && !"warning".equalsIgnoreCase(log.level)) continue;
warnings.merge(DeploymentMetrics.Warning.all, 1, Integer::sum);
}
return Map.copyOf(warnings);
}
} | class ApplicationController {
private static final Logger log = Logger.getLogger(ApplicationController.class.getName());
/** The controller owning this */
private final Controller controller;
/** For persistence */
private final CuratorDb curator;
private final ArtifactRepository artifactRepository;
private final ApplicationStore applicationStore;
private final AccessControl accessControl;
private final ConfigServer configServer;
private final Clock clock;
private final DeploymentTrigger deploymentTrigger;
private final ApplicationPackageValidator applicationPackageValidator;
private final EndpointCertificates endpointCertificates;
private final StringFlag dockerImageRepoFlag;
private final BillingController billingController;
ApplicationController(Controller controller, CuratorDb curator, AccessControl accessControl, Clock clock,
FlagSource flagSource, BillingController billingController) {
this.controller = controller;
this.curator = curator;
this.accessControl = accessControl;
this.configServer = controller.serviceRegistry().configServer();
this.clock = clock;
this.artifactRepository = controller.serviceRegistry().artifactRepository();
this.applicationStore = controller.serviceRegistry().applicationStore();
this.dockerImageRepoFlag = PermanentFlags.DOCKER_IMAGE_REPO.bindTo(flagSource);
this.billingController = billingController;
deploymentTrigger = new DeploymentTrigger(controller, clock);
applicationPackageValidator = new ApplicationPackageValidator(controller);
endpointCertificates = new EndpointCertificates(controller,
controller.serviceRegistry().endpointCertificateProvider(),
controller.serviceRegistry().endpointCertificateValidator());
Once.after(Duration.ofMinutes(1), () -> {
Instant start = clock.instant();
int count = 0;
for (TenantAndApplicationId id : curator.readApplicationIds()) {
lockApplicationIfPresent(id, application -> {
for (InstanceName instance : application.get().deploymentSpec().instanceNames())
if (!application.get().instances().containsKey(instance))
application = withNewInstance(application, id.instance(instance));
store(application);
});
count++;
}
log.log(Level.INFO, String.format("Wrote %d applications in %s", count,
Duration.between(start, clock.instant())));
});
}
/** Returns the application with the given id, or null if it is not present */
public Optional<Application> getApplication(TenantAndApplicationId id) {
return curator.readApplication(id);
}
/** Returns the instance with the given id, or null if it is not present */
public Optional<Instance> getInstance(ApplicationId id) {
return getApplication(TenantAndApplicationId.from(id)).flatMap(application -> application.get(id.instance()));
}
/**
* Triggers reindexing for the given document types in the given clusters, for the given application.
*
* If no clusters are given, reindexing is triggered for the entire application; otherwise
* if no documents types are given, reindexing is triggered for all given clusters; otherwise
* reindexing is triggered for the cartesian product of the given clusters and document types.
*/
public void reindex(ApplicationId id, ZoneId zoneId, List<String> clusterNames, List<String> documentTypes, boolean indexedOnly) {
configServer.reindex(new DeploymentId(id, zoneId), clusterNames, documentTypes, indexedOnly);
}
/** Returns the reindexing status for the given application in the given zone. */
public ApplicationReindexing applicationReindexing(ApplicationId id, ZoneId zoneId) {
return configServer.getReindexing(new DeploymentId(id, zoneId));
}
/** Enables reindexing for the given application in the given zone. */
public void enableReindexing(ApplicationId id, ZoneId zoneId) {
configServer.enableReindexing(new DeploymentId(id, zoneId));
}
/** Disables reindexing for the given application in the given zone. */
public void disableReindexing(ApplicationId id, ZoneId zoneId) {
configServer.disableReindexing(new DeploymentId(id, zoneId));
}
/**
* Returns the application with the given id
*
* @throws IllegalArgumentException if it does not exist
*/
public Application requireApplication(TenantAndApplicationId id) {
return getApplication(id).orElseThrow(() -> new IllegalArgumentException(id + " not found"));
}
/**
* Returns the instance with the given id
*
* @throws IllegalArgumentException if it does not exist
*/
public Instance requireInstance(ApplicationId id) {
return getInstance(id).orElseThrow(() -> new IllegalArgumentException(id + " not found"));
}
/** Returns a snapshot of all applications */
public List<Application> asList() {
return curator.readApplications(false);
}
/**
* Returns a snapshot of all readable applications. Unlike {@link ApplicationController
* applications that cannot currently be read (e.g. due to serialization issues) and may return an incomplete
* snapshot.
*
* This should only be used in cases where acting on a subset of applications is better than none.
*/
public List<Application> readable() {
return curator.readApplications(true);
}
/** Returns the ID of all known applications. */
public List<TenantAndApplicationId> idList() {
return curator.readApplicationIds();
}
/** Returns a snapshot of all applications of a tenant */
public List<Application> asList(TenantName tenant) {
return curator.readApplications(tenant);
}
public ArtifactRepository artifacts() { return artifactRepository; }
public ApplicationStore applicationStore() { return applicationStore; }
/** Returns all currently reachable content clusters among the given deployments. */
public Map<ZoneId, List<String>> reachableContentClustersByZone(Collection<DeploymentId> ids) {
Map<ZoneId, List<String>> clusters = new TreeMap<>(Comparator.comparing(ZoneId::value));
for (DeploymentId id : ids)
if (isHealthy(id))
clusters.put(id.zoneId(), List.copyOf(configServer.getContentClusters(id)));
return Collections.unmodifiableMap(clusters);
}
/** Reads the oldest installed platform for the given application and zone from job history, or a node repo. */
private Optional<Version> oldestInstalledPlatform(JobStatus job) {
Version oldest = null;
for (Run run : job.runs().descendingMap().values()) {
Version version = run.versions().targetPlatform();
if (oldest == null || version.isBefore(oldest))
oldest = version;
if (run.status() == RunStatus.success)
return Optional.of(oldest);
}
return oldestInstalledPlatform(job.id());
}
/** Reads the oldest installed platform for the given application and zone from the node repo of that zone. */
private Optional<Version> oldestInstalledPlatform(JobId job) {
return configServer.nodeRepository().list(job.type().zone(controller.system()),
job.application(),
EnumSet.of(active, reserved))
.stream()
.map(Node::currentVersion)
.filter(version -> ! version.isEmpty())
.min(naturalOrder());
}
/** Returns the oldest Vespa version installed on any active or reserved production node for the given application. */
public Version oldestInstalledPlatform(TenantAndApplicationId id) {
return controller.jobController().deploymentStatus(requireApplication(id)).jobs()
.production().asList().stream()
.map(this::oldestInstalledPlatform)
.flatMap(Optional::stream)
.min(naturalOrder())
.orElse(controller.readSystemVersion());
}
/**
* Creates a new application for an existing tenant.
*
* @throws IllegalArgumentException if the application already exists
*/
public Application createApplication(TenantAndApplicationId id, Credentials credentials) {
try (Lock lock = lock(id)) {
if (getApplication(id).isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': Application already exists");
if (getApplication(dashToUnderscore(id)).isPresent())
throw new IllegalArgumentException("Could not create '" + id + "': Application " + dashToUnderscore(id) + " already exists");
com.yahoo.vespa.hosted.controller.api.identifiers.ApplicationId.validate(id.application().value());
if (controller.tenants().get(id.tenant()).isEmpty())
throw new IllegalArgumentException("Could not create '" + id + "': This tenant does not exist");
accessControl.createApplication(id, credentials);
LockedApplication locked = new LockedApplication(new Application(id, clock.instant()), lock);
store(locked);
log.info("Created " + locked);
return locked.get();
}
}
/**
* Creates a new instance for an existing application.
*
* @throws IllegalArgumentException if the instance already exists, or has an invalid instance name.
*/
public void createInstance(ApplicationId id) {
lockApplicationOrThrow(TenantAndApplicationId.from(id), application -> {
store(withNewInstance(application, id));
});
}
/** Fetches the requested application package from the artifact store(s). */
public ApplicationPackage getApplicationPackage(ApplicationId id, ApplicationVersion version) {
return new ApplicationPackage(applicationStore.get(id.tenant(), id.application(), version));
}
/** Returns given application with a new instance */
public LockedApplication withNewInstance(LockedApplication application, ApplicationId instance) {
if (instance.instance().isTester())
throw new IllegalArgumentException("'" + instance + "' is a tester application!");
InstanceId.validate(instance.instance().value());
if (getInstance(instance).isPresent())
throw new IllegalArgumentException("Could not create '" + instance + "': Instance already exists");
if (getInstance(dashToUnderscore(instance)).isPresent())
throw new IllegalArgumentException("Could not create '" + instance + "': Instance " + dashToUnderscore(instance) + " already exists");
log.info("Created " + instance);
return application.withNewInstance(instance.instance());
}
/** Deploys an application package for an existing application instance. */
public ActivateResult deploy(JobId job, boolean deploySourceVersions) {
if (job.application().instance().isTester())
throw new IllegalArgumentException("'" + job.application() + "' is a tester application!");
TenantAndApplicationId applicationId = TenantAndApplicationId.from(job.application());
ZoneId zone = job.type().zone(controller.system());
try (Lock deploymentLock = lockForDeployment(job.application(), zone)) {
Set<ContainerEndpoint> containerEndpoints;
Optional<EndpointCertificateMetadata> endpointCertificateMetadata;
Optional<TenantRoles> tenantRoles = Optional.empty();
Run run = controller.jobController().last(job)
.orElseThrow(() -> new IllegalStateException("No known run of '" + job + "'"));
if (run.hasEnded())
throw new IllegalStateException("No deployment expected for " + job + " now, as no job is running");
Version platform = run.versions().sourcePlatform().filter(__ -> deploySourceVersions).orElse(run.versions().targetPlatform());
ApplicationVersion revision = run.versions().sourceApplication().filter(__ -> deploySourceVersions).orElse(run.versions().targetApplication());
ApplicationPackage applicationPackage = getApplicationPackage(job.application(), zone, revision);
try (Lock lock = lock(applicationId)) {
LockedApplication application = new LockedApplication(requireApplication(applicationId), lock);
Instance instance = application.get().require(job.application().instance());
rejectOldChange(instance, platform, revision, job, zone);
if ( ! applicationPackage.trustedCertificates().isEmpty()
&& run.testerCertificate().isPresent())
applicationPackage = applicationPackage.withTrustedCertificate(run.testerCertificate().get());
endpointCertificateMetadata = endpointCertificates.getMetadata(instance, zone, applicationPackage.deploymentSpec().instance(instance.name()));
containerEndpoints = controller.routing().containerEndpointsOf(application.get(), job.application().instance(), zone);
}
ActivateResult result = deploy(job.application(), applicationPackage, zone, platform, containerEndpoints, endpointCertificateMetadata, tenantRoles);
var quotaUsage = deploymentQuotaUsage(zone, job.application());
NotificationSource source = zone.environment().isManuallyDeployed() ?
NotificationSource.from(new DeploymentId(job.application(), zone)) : NotificationSource.from(applicationId);
List<String> warnings = Optional.ofNullable(result.prepareResponse().log)
.map(logs -> logs.stream()
.filter(log -> log.applicationPackage)
.filter(log -> LogLevel.parse(log.level).intValue() >= Level.WARNING.intValue())
.map(log -> log.message)
.sorted()
.distinct()
.collect(Collectors.toList()))
.orElseGet(List::of);
if (warnings.isEmpty()) controller.notificationsDb().removeNotification(source, Notification.Type.applicationPackage);
else controller.notificationsDb().setNotification(source, Notification.Type.applicationPackage, Notification.Level.warning, warnings);
lockApplicationOrThrow(applicationId, application ->
store(application.with(job.application().instance(),
instance -> instance.withNewDeployment(zone, revision, platform,
clock.instant(), warningsFrom(result),
quotaUsage))));
return result;
}
}
/** Stores the deployment spec and validation overrides from the application package, and runs cleanup. */
/** Deploy a system application to given zone */
public void deploy(SystemApplication application, ZoneId zone, Version version) {
if (application.hasApplicationPackage()) {
deploySystemApplicationPackage(application, zone, version);
} else {
configServer.nodeRepository().upgrade(zone, application.nodeType(), version);
}
}
/** Deploy a system application to given zone */
public ActivateResult deploySystemApplicationPackage(SystemApplication application, ZoneId zone, Version version) {
if (application.hasApplicationPackage()) {
ApplicationPackage applicationPackage = new ApplicationPackage(
artifactRepository.getSystemApplicationPackage(application.id(), zone, version)
);
return deploy(application.id(), applicationPackage, zone, version, Set.of(), /* No application cert */ Optional.empty(), Optional.empty());
} else {
throw new RuntimeException("This system application does not have an application package: " + application.id().toShortString());
}
}
/** Deploys the given tester application to the given zone. */
public ActivateResult deployTester(TesterId tester, ApplicationPackage applicationPackage, ZoneId zone, Version platform) {
return deploy(tester.id(), applicationPackage, zone, platform, Set.of(), /* No application cert for tester*/ Optional.empty(), Optional.empty());
}
private ActivateResult deploy(ApplicationId application, ApplicationPackage applicationPackage,
ZoneId zone, Version platform, Set<ContainerEndpoint> endpoints,
Optional<EndpointCertificateMetadata> endpointCertificateMetadata,
Optional<TenantRoles> tenantRoles) {
try {
Optional<DockerImage> dockerImageRepo = Optional.ofNullable(
dockerImageRepoFlag
.with(FetchVector.Dimension.ZONE_ID, zone.value())
.with(FetchVector.Dimension.APPLICATION_ID, application.serializedForm())
.value())
.filter(s -> !s.isBlank())
.map(DockerImage::fromString);
Optional<AthenzDomain> domain = controller.tenants().get(application.tenant())
.filter(tenant-> tenant instanceof AthenzTenant)
.map(tenant -> ((AthenzTenant)tenant).domain());
if (zone.environment().isManuallyDeployed())
controller.applications().applicationStore().putMeta(new DeploymentId(application, zone),
clock.instant(),
applicationPackage.metaDataZip());
Quota deploymentQuota = DeploymentQuotaCalculator.calculate(billingController.getQuota(application.tenant()),
asList(application.tenant()), application, zone, applicationPackage.deploymentSpec());
List<TenantSecretStore> tenantSecretStores = controller.tenants()
.get(application.tenant())
.filter(tenant-> tenant instanceof CloudTenant)
.map(tenant -> ((CloudTenant) tenant).tenantSecretStores())
.orElse(List.of());
List<X509Certificate> operatorCertificates = controller.supportAccess().activeGrantsFor(new DeploymentId(application, zone)).stream()
.map(SupportAccessGrant::certificate)
.collect(toList());
ConfigServer.PreparedApplication preparedApplication =
configServer.deploy(new DeploymentData(application, zone, applicationPackage.zippedContent(), platform,
endpoints, endpointCertificateMetadata, dockerImageRepo, domain,
tenantRoles, deploymentQuota, tenantSecretStores, operatorCertificates));
return new ActivateResult(new RevisionId(applicationPackage.hash()), preparedApplication.prepareResponse(),
applicationPackage.zippedContent().length);
} finally {
controller.routing().policies().refresh(application, applicationPackage.deploymentSpec(), zone);
}
}
private LockedApplication withoutDeletedDeployments(LockedApplication application, InstanceName instance) {
DeploymentSpec deploymentSpec = application.get().deploymentSpec();
List<ZoneId> deploymentsToRemove = application.get().require(instance).productionDeployments().values().stream()
.map(Deployment::zone)
.filter(zone -> deploymentSpec.instance(instance).isEmpty()
|| ! deploymentSpec.requireInstance(instance).deploysTo(zone.environment(),
zone.region()))
.collect(toList());
if (deploymentsToRemove.isEmpty())
return application;
if ( ! application.get().validationOverrides().allows(ValidationId.deploymentRemoval, clock.instant()))
throw new IllegalArgumentException(ValidationId.deploymentRemoval.value() + ": " + application.get().require(instance) +
" is deployed in " +
deploymentsToRemove.stream()
.map(zone -> zone.region().value())
.collect(joining(", ")) +
", but does not include " +
(deploymentsToRemove.size() > 1 ? "these zones" : "this zone") +
" in deployment.xml. " +
ValidationOverrides.toAllowMessage(ValidationId.deploymentRemoval));
boolean removeInstance = ! deploymentSpec.instanceNames().contains(instance)
&& application.get().require(instance).deployments().size() == deploymentsToRemove.size();
for (ZoneId zone : deploymentsToRemove)
application = deactivate(application, instance, zone);
if (removeInstance)
application = application.without(instance);
return application;
}
/**
* Deletes the the given application. All known instances of the applications will be deleted.
*
* @throws IllegalArgumentException if the application has deployments or the caller is not authorized
*/
public void deleteApplication(TenantAndApplicationId id, Credentials credentials) {
lockApplicationOrThrow(id, application -> {
var deployments = application.get().instances().values().stream()
.filter(instance -> ! instance.deployments().isEmpty())
.collect(toMap(instance -> instance.name(),
instance -> instance.deployments().keySet().stream()
.map(ZoneId::toString)
.collect(joining(", "))));
if ( ! deployments.isEmpty())
throw new IllegalArgumentException("Could not delete '" + application + "': It has active deployments: " + deployments);
for (Instance instance : application.get().instances().values()) {
controller.routing().removeEndpointsInDns(application.get(), instance.name());
application = application.without(instance.name());
}
applicationStore.removeAll(id.tenant(), id.application());
applicationStore.removeAllTesters(id.tenant(), id.application());
applicationStore.putMetaTombstone(id.tenant(), id.application(), clock.instant());
accessControl.deleteApplication(id, credentials);
curator.removeApplication(id);
controller.jobController().collectGarbage();
controller.notificationsDb().removeNotifications(NotificationSource.from(id));
log.info("Deleted " + id);
});
}
/**
* Deletes the the given application instance.
*
* @throws IllegalArgumentException if the application has deployments or the caller is not authorized
* @throws NotExistsException if the instance does not exist
*/
public void deleteInstance(ApplicationId instanceId) {
if (getInstance(instanceId).isEmpty())
throw new NotExistsException("Could not delete instance '" + instanceId + "': Instance not found");
lockApplicationOrThrow(TenantAndApplicationId.from(instanceId), application -> {
if ( ! application.get().require(instanceId.instance()).deployments().isEmpty())
throw new IllegalArgumentException("Could not delete '" + application + "': It has active deployments in: " +
application.get().require(instanceId.instance()).deployments().keySet().stream().map(ZoneId::toString)
.sorted().collect(joining(", ")));
if ( ! application.get().deploymentSpec().equals(DeploymentSpec.empty)
&& application.get().deploymentSpec().instanceNames().contains(instanceId.instance()))
throw new IllegalArgumentException("Can not delete '" + instanceId + "', which is specified in 'deployment.xml'; remove it there first");
controller.routing().removeEndpointsInDns(application.get(), instanceId.instance());
curator.writeApplication(application.without(instanceId.instance()).get());
controller.jobController().collectGarbage();
controller.notificationsDb().removeNotifications(NotificationSource.from(instanceId));
log.info("Deleted " + instanceId);
});
}
/**
* Replace any previous version of this application by this instance
*
* @param application a locked application to store
*/
public void store(LockedApplication application) {
curator.writeApplication(application.get());
}
/**
* Acquire a locked application to modify and store, if there is an application with the given id.
*
* @param applicationId ID of the application to lock and get.
* @param action Function which acts on the locked application.
*/
public void lockApplicationIfPresent(TenantAndApplicationId applicationId, Consumer<LockedApplication> action) {
try (Lock lock = lock(applicationId)) {
getApplication(applicationId).map(application -> new LockedApplication(application, lock)).ifPresent(action);
}
}
/**
* Acquire a locked application to modify and store, or throw an exception if no application has the given id.
*
* @param applicationId ID of the application to lock and require.
* @param action Function which acts on the locked application.
* @throws IllegalArgumentException when application does not exist.
*/
public void lockApplicationOrThrow(TenantAndApplicationId applicationId, Consumer<LockedApplication> action) {
try (Lock lock = lock(applicationId)) {
action.accept(new LockedApplication(requireApplication(applicationId), lock));
}
}
/**
* Tells config server to schedule a restart of all nodes in this deployment
*
* @param restartFilter Variables to filter which nodes to restart.
*/
public void restart(DeploymentId deploymentId, RestartFilter restartFilter) {
configServer.restart(deploymentId, restartFilter);
}
/**
* Asks the config server whether this deployment is currently healthy, i.e., serving traffic as usual.
* If this cannot be ascertained, we must assumed it is not.
*/
public boolean isHealthy(DeploymentId deploymentId) {
try {
return ! isSuspended(deploymentId);
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Failed getting suspension status of " + deploymentId + ": " + Exceptions.toMessageString(e));
return false;
}
}
/**
* Asks the config server whether this deployment is currently <i>suspended</i>:
* Not in a state where it should receive traffic.
*/
public boolean isSuspended(DeploymentId deploymentId) {
return configServer.isSuspended(deploymentId);
}
/** Sets suspension status of the given deployment in its zone. */
public void setSuspension(DeploymentId deploymentId, boolean suspend) {
configServer.setSuspension(deploymentId, suspend);
}
/** Deactivate application in the given zone */
public void deactivate(ApplicationId id, ZoneId zone) {
lockApplicationOrThrow(TenantAndApplicationId.from(id),
application -> store(deactivate(application, id.instance(), zone)));
}
/**
* Deactivates a locked application without storing it
*
* @return the application with the deployment in the given zone removed
*/
private LockedApplication deactivate(LockedApplication application, InstanceName instanceName, ZoneId zone) {
DeploymentId id = new DeploymentId(application.get().id().instance(instanceName), zone);
try {
configServer.deactivate(id);
} finally {
controller.routing().policies().refresh(application.get().id().instance(instanceName), application.get().deploymentSpec(), zone);
if (zone.environment().isManuallyDeployed())
applicationStore.putMetaTombstone(id, clock.instant());
if (!zone.environment().isTest())
controller.notificationsDb().removeNotifications(NotificationSource.from(id));
}
return application.with(instanceName, instance -> instance.withoutDeploymentIn(zone));
}
public DeploymentTrigger deploymentTrigger() { return deploymentTrigger; }
/**
* Returns a lock which provides exclusive rights to changing this application.
* Any operation which stores an application need to first acquire this lock, then read, modify
* and store the application, and finally release (close) the lock.
*/
Lock lock(TenantAndApplicationId application) {
return curator.lock(application);
}
/**
* Returns a lock which provides exclusive rights to deploying this application to the given zone.
*/
private Lock lockForDeployment(ApplicationId application, ZoneId zone) {
return curator.lockForDeployment(application, zone);
}
/**
* Verifies that the application can be deployed to the tenant, following these rules:
*
* 1. Verify that the Athenz service can be launched by the config server
* 2. If the principal is given, verify that the principal is tenant admin or admin of the tenant domain
* 3. If the principal is not given, verify that the Athenz domain of the tenant equals Athenz domain given in deployment.xml
*
* @param tenantName tenant where application should be deployed
* @param applicationPackage application package
* @param deployer principal initiating the deployment, possibly empty
*/
public void verifyApplicationIdentityConfiguration(TenantName tenantName, Optional<InstanceName> instanceName, Optional<ZoneId> zoneId, ApplicationPackage applicationPackage, Optional<Principal> deployer) {
Optional<AthenzDomain> identityDomain = applicationPackage.deploymentSpec().athenzDomain()
.map(domain -> new AthenzDomain(domain.value()));
if(identityDomain.isEmpty()) {
return;
}
if(! (accessControl instanceof AthenzFacade)) {
throw new IllegalArgumentException("Athenz domain and service specified in deployment.xml, but not supported by system.");
}
verifyAllowedLaunchAthenzService(applicationPackage.deploymentSpec());
Optional<AthenzUser> athenzUser = getUser(deployer);
if (athenzUser.isPresent()) {
var zone = zoneId.orElseThrow(() -> new IllegalArgumentException("Unable to evaluate access, no zone provided in deployment"));
var serviceToLaunch = instanceName
.flatMap(instance -> applicationPackage.deploymentSpec().instance(instance))
.flatMap(instanceSpec -> instanceSpec.athenzService(zone.environment(), zone.region()))
.or(() -> applicationPackage.deploymentSpec().athenzService())
.map(service -> new AthenzService(identityDomain.get(), service.value()));
if(serviceToLaunch.isPresent()) {
if (
! ((AthenzFacade) accessControl).canLaunch(athenzUser.get(), serviceToLaunch.get()) &&
! ((AthenzFacade) accessControl).hasTenantAdminAccess(athenzUser.get(), identityDomain.get())
) {
throw new IllegalArgumentException("User " + athenzUser.get().getFullName() + " is not allowed to launch " +
"service " + serviceToLaunch.get().getFullName() + ". " +
"Please reach out to the domain admin.");
}
} else {
throw new IllegalArgumentException("Athenz domain configured, but no service defined for deployment to " + zone.value());
}
} else {
Tenant tenant = controller.tenants().require(tenantName);
AthenzDomain tenantDomain = ((AthenzTenant) tenant).domain();
if ( ! Objects.equals(tenantDomain, identityDomain.get()))
throw new IllegalArgumentException("Athenz domain in deployment.xml: [" + identityDomain.get().getName() + "] " +
"must match tenant domain: [" + tenantDomain.getName() + "]");
}
}
private void rejectOldChange(Instance instance, Version platform, ApplicationVersion revision, JobId job, ZoneId zone) {
Deployment deployment = instance.deployments().get(zone);
if (deployment == null) return;
if (!zone.environment().isProduction()) return;
boolean platformIsOlder = platform.compareTo(deployment.version()) < 0 && !instance.change().isPinned();
boolean revisionIsOlder = revision.compareTo(deployment.applicationVersion()) < 0 &&
!(revision.isUnknown() && controller.system().isCd());
if (platformIsOlder || revisionIsOlder)
throw new IllegalArgumentException(String.format("Rejecting deployment of application %s to %s, as the requested versions (platform: %s, application: %s)" +
" are older than the currently deployed (platform: %s, application: %s).",
job.application(), zone, platform, revision, deployment.version(), deployment.applicationVersion()));
}
private TenantAndApplicationId dashToUnderscore(TenantAndApplicationId id) {
return TenantAndApplicationId.from(id.tenant().value(), id.application().value().replaceAll("-", "_"));
}
private ApplicationId dashToUnderscore(ApplicationId id) {
return dashToUnderscore(TenantAndApplicationId.from(id)).instance(id.instance());
}
private QuotaUsage deploymentQuotaUsage(ZoneId zoneId, ApplicationId applicationId) {
var application = configServer.nodeRepository().getApplication(zoneId, applicationId);
return DeploymentQuotaCalculator.calculateQuotaUsage(application);
}
private ApplicationPackage getApplicationPackage(ApplicationId application, ZoneId zone, ApplicationVersion revision) {
return new ApplicationPackage(revision.isUnknown() ? applicationStore.getDev(application, zone)
: applicationStore.get(application.tenant(), application.application(), revision));
}
/*
* Get the AthenzUser from this principal or Optional.empty if this does not represent a user.
*/
private Optional<AthenzUser> getUser(Optional<Principal> deployer) {
return deployer
.filter(AthenzPrincipal.class::isInstance)
.map(AthenzPrincipal.class::cast)
.map(AthenzPrincipal::getIdentity)
.filter(AthenzUser.class::isInstance)
.map(AthenzUser.class::cast);
}
/*
* Verifies that the configured athenz service (if any) can be launched.
*/
private void verifyAllowedLaunchAthenzService(DeploymentSpec deploymentSpec) {
deploymentSpec.athenzDomain().ifPresent(domain -> {
controller.zoneRegistry().zones().reachable().ids().forEach(zone -> {
AthenzIdentity configServerAthenzIdentity = controller.zoneRegistry().getConfigServerHttpsIdentity(zone);
deploymentSpec.athenzService().ifPresent(service -> {
verifyAthenzServiceCanBeLaunchedBy(configServerAthenzIdentity, new AthenzService(domain.value(), service.value()));
});
deploymentSpec.instances().forEach(spec -> {
spec.athenzService(zone.environment(), zone.region()).ifPresent(service -> {
verifyAthenzServiceCanBeLaunchedBy(configServerAthenzIdentity, new AthenzService(domain.value(), service.value()));
});
});
});
});
}
private void verifyAthenzServiceCanBeLaunchedBy(AthenzIdentity configServerAthenzIdentity, AthenzService athenzService) {
if ( ! ((AthenzFacade) accessControl).canLaunch(configServerAthenzIdentity, athenzService))
throw new IllegalArgumentException("Not allowed to launch Athenz service " + athenzService.getFullName());
}
/** Returns the latest known version within the given major. */
public Optional<Version> lastCompatibleVersion(int targetMajorVersion) {
return controller.readVersionStatus().versions().stream()
.map(VespaVersion::versionNumber)
.filter(version -> version.getMajor() == targetMajorVersion)
.max(naturalOrder());
}
/** Extract deployment warnings metric from deployment result */
private static Map<DeploymentMetrics.Warning, Integer> warningsFrom(ActivateResult result) {
if (result.prepareResponse().log == null) return Map.of();
Map<DeploymentMetrics.Warning, Integer> warnings = new HashMap<>();
for (Log log : result.prepareResponse().log) {
if (!"warn".equalsIgnoreCase(log.level) && !"warning".equalsIgnoreCase(log.level)) continue;
warnings.merge(DeploymentMetrics.Warning.all, 1, Integer::sum);
}
return Map.copyOf(warnings);
}
} |
Updated. I kept the first %s for now. | public ExpressionChecker addExpr(String expression, Object expectedValue) {
TypeName resultTypeName = JAVA_CLASS_TO_TYPENAME.get(expectedValue.getClass());
checkArgument(
resultTypeName != null,
String.format(
"The type of the expected object '%s' is unknown in 'addExpr(String %s, Object %s)'"
+ " . Please use 'addExpr(String expr, Object expected, "
+ "FieldType type)' instead and provide the type of the expected object",
expectedValue, expression, expectedValue));
addExpr(expression, expectedValue, FieldType.of(resultTypeName));
return this;
} | "The type of the expected object '%s' is unknown in 'addExpr(String %s, Object %s)'" | public ExpressionChecker addExpr(String expression, Object expectedValue) {
TypeName resultTypeName = JAVA_CLASS_TO_TYPENAME.get(expectedValue.getClass());
checkArgument(
resultTypeName != null,
String.format(
"The type of the expected value '%s' is unknown in 'addExpr(String expression, "
+ "Object expectedValue)'. Please use 'addExpr(String expr, Object expected, "
+ "FieldType type)' instead and provide the type of the expected object",
expectedValue));
addExpr(expression, expectedValue, FieldType.of(resultTypeName));
return this;
} | class ExpressionChecker {
private transient List<ExpressionTestCase> exps = new ArrayList<>();
public ExpressionChecker addExpr(
String expression, Object expectedValue, FieldType resultFieldType) {
exps.add(ExpressionTestCase.of(expression, expectedValue, resultFieldType));
return this;
}
public void buildRunAndCheck() {
buildRunAndCheck(getTestPCollection());
}
/** Build the corresponding SQL, compile to Beam Pipeline, run it, and check the result. */
public void buildRunAndCheck(PCollection<Row> inputCollection) {
for (ExpressionTestCase testCase : exps) {
String expression = testCase.sqlExpr();
Object expectedValue = testCase.expectedResult();
String sql = String.format("SELECT %s FROM PCOLLECTION", expression);
Schema schema = Schema.builder().addField(expression, testCase.resultFieldType()).build();
PCollection<Row> output =
inputCollection.apply(testCase.toString(), SqlTransform.query(sql));
if (expectedValue instanceof Double) {
PAssert.that(output).satisfies(matchesScalar((double) expectedValue, PRECISION_DOUBLE));
} else if (expectedValue instanceof Float) {
PAssert.that(output).satisfies(matchesScalar((float) expectedValue, PRECISION_FLOAT));
} else {
PAssert.that(output)
.containsInAnyOrder(
TestUtils.RowsBuilder.of(schema).addRows(expectedValue).getRows());
}
}
inputCollection.getPipeline().run();
}
} | class ExpressionChecker {
private transient List<ExpressionTestCase> exps = new ArrayList<>();
public ExpressionChecker addExpr(
String expression, Object expectedValue, FieldType resultFieldType) {
exps.add(ExpressionTestCase.of(expression, expectedValue, resultFieldType));
return this;
}
public void buildRunAndCheck() {
buildRunAndCheck(getTestPCollection());
}
/** Build the corresponding SQL, compile to Beam Pipeline, run it, and check the result. */
public void buildRunAndCheck(PCollection<Row> inputCollection) {
for (ExpressionTestCase testCase : exps) {
String expression = testCase.sqlExpr();
Object expectedValue = testCase.expectedResult();
String sql = String.format("SELECT %s FROM PCOLLECTION", expression);
Schema schema = Schema.builder().addField(expression, testCase.resultFieldType()).build();
PCollection<Row> output =
inputCollection.apply(testCase.toString(), SqlTransform.query(sql));
if (expectedValue instanceof Double) {
PAssert.that(output).satisfies(matchesScalar((double) expectedValue, PRECISION_DOUBLE));
} else if (expectedValue instanceof Float) {
PAssert.that(output).satisfies(matchesScalar((float) expectedValue, PRECISION_FLOAT));
} else {
PAssert.that(output)
.containsInAnyOrder(
TestUtils.RowsBuilder.of(schema).addRows(expectedValue).getRows());
}
}
inputCollection.getPipeline().run();
}
} |
Negative case handles at https://github.com/ballerina-platform/ballerina-lang/pull/19723/files#diff-89edaaa1baefeea805366138354f0258R225. If maxAttempts = 0, we need to retry infinitely. so check this condition here. | public static boolean reconnect(WebSocketConnectionInfo connectionInfo) {
ObjectValue webSocketClient = connectionInfo.getWebSocketEndpoint();
RetryContext retryConnectorConfig = (RetryContext) webSocketClient.getNativeData(WebSocketConstants.
RETRY_CONFIG);
int interval = retryConnectorConfig.getInterval();
int maxInterval = retryConnectorConfig.getMaxInterval();
int maxAttempts = retryConnectorConfig.getMaxAttempts();
int noOfReconnectAttempts = retryConnectorConfig.getReconnectAttempts();
float backOfFactor = retryConnectorConfig.getBackOfFactor();
WebSocketService wsService = connectionInfo.getService();
Date date = new Date();
SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss");
if (((noOfReconnectAttempts < maxAttempts) && maxAttempts > 0) || maxAttempts == 0) {
retryConnectorConfig.setReconnectAttempts(noOfReconnectAttempts + 1);
String time = formatter.format(date.getTime());
logger.debug(WebSocketConstants.LOG_MESSAGE, time, "reconnecting...");
setCountDownLatch(calculateWaitingTime(interval, maxInterval, backOfFactor, noOfReconnectAttempts));
establishWebSocketConnection(webSocketClient, wsService);
return true;
}
logger.debug(WebSocketConstants.LOG_MESSAGE, STATEMENT_FOR_RECONNECT , webSocketClient.
getStringValue(WebSocketConstants.CLIENT_URL_CONFIG));
return false;
} | if (((noOfReconnectAttempts < maxAttempts) && maxAttempts > 0) || maxAttempts == 0) { | public static boolean reconnect(WebSocketConnectionInfo connectionInfo) {
ObjectValue webSocketClient = connectionInfo.getWebSocketEndpoint();
RetryContext retryConnectorConfig = (RetryContext) webSocketClient.getNativeData(WebSocketConstants.
RETRY_CONFIG);
int interval = retryConnectorConfig.getInterval();
int maxInterval = retryConnectorConfig.getMaxInterval();
int maxAttempts = retryConnectorConfig.getMaxAttempts();
int noOfReconnectAttempts = retryConnectorConfig.getReconnectAttempts();
double backOfFactor = retryConnectorConfig.getBackOfFactor();
WebSocketService wsService = connectionInfo.getService();
Date date = new Date();
SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss");
if (noOfReconnectAttempts < maxAttempts || maxAttempts == 0) {
retryConnectorConfig.setReconnectAttempts(noOfReconnectAttempts + 1);
String time = formatter.format(date.getTime());
logger.debug(WebSocketConstants.LOG_MESSAGE, time, "reconnecting...");
createDelay(calculateWaitingTime(interval, maxInterval, backOfFactor, noOfReconnectAttempts));
establishWebSocketConnection(webSocketClient, wsService);
return true;
}
logger.debug(WebSocketConstants.LOG_MESSAGE, "Maximum retry attempts but couldn't connect to the server: ",
webSocketClient.getStringValue(WebSocketConstants.CLIENT_URL_CONFIG));
return false;
} | class WebSocketUtil {
private static final Logger logger = LoggerFactory.getLogger(WebSocketUtil.class);
private static final String STATEMENT_FOR_RECONNECT = "Maximum retry attempts but couldn't connect " +
"to the server: ";
private static final String CLIENT_ENDPOINT_CONFIG = "config";
public static ObjectValue createAndPopulateWebSocketCaller(WebSocketConnection webSocketConnection,
WebSocketServerService wsService,
WebSocketConnectionManager connectionManager) {
ObjectValue webSocketCaller = BallerinaValues.createObjectValue(HttpConstants.PROTOCOL_HTTP_PKG_ID,
WebSocketConstants.WEBSOCKET_CALLER);
ObjectValue webSocketConnector = BallerinaValues.createObjectValue(
HttpConstants.PROTOCOL_HTTP_PKG_ID, WebSocketConstants.WEBSOCKET_CONNECTOR);
webSocketCaller.set(WebSocketConstants.LISTENER_CONNECTOR_FIELD, webSocketConnector);
populateWebSocketEndpoint(webSocketConnection, webSocketCaller);
WebSocketConnectionInfo connectionInfo =
new WebSocketConnectionInfo(wsService, webSocketConnection, webSocketCaller);
connectionManager.addConnection(webSocketConnection.getChannelId(), connectionInfo);
webSocketConnector.addNativeData(WebSocketConstants.NATIVE_DATA_WEBSOCKET_CONNECTION_INFO,
connectionInfo);
WebSocketObservabilityUtil.observeConnection(
connectionManager.getConnectionInfo(webSocketConnection.getChannelId()));
return webSocketCaller;
}
public static void populateWebSocketEndpoint(WebSocketConnection webSocketConnection, ObjectValue webSocketCaller) {
webSocketCaller.set(WebSocketConstants.LISTENER_ID_FIELD, webSocketConnection.getChannelId());
String negotiatedSubProtocol = webSocketConnection.getNegotiatedSubProtocol();
webSocketCaller.set(WebSocketConstants.LISTENER_NEGOTIATED_SUBPROTOCOLS_FIELD, negotiatedSubProtocol);
webSocketCaller.set(WebSocketConstants.LISTENER_IS_SECURE_FIELD, webSocketConnection.isSecure());
webSocketCaller.set(WebSocketConstants.LISTENER_IS_OPEN_FIELD, webSocketConnection.isOpen());
}
public static void handleWebSocketCallback(NonBlockingCallback callback,
ChannelFuture webSocketChannelFuture, Logger log) {
webSocketChannelFuture.addListener(future -> {
Throwable cause = future.cause();
if (!future.isSuccess() && cause != null) {
log.error("Error occurred ", cause);
callback.notifyFailure(WebSocketUtil.createErrorByType(cause));
} else {
callback.setReturnValues(null);
callback.notifySuccess();
}
});
}
public static void readFirstFrame(WebSocketConnection webSocketConnection, ObjectValue wsClient) {
webSocketConnection.readNextFrame();
wsClient.set(WebSocketConstants.CONNECTOR_IS_READY_FIELD, true);
}
/**
* Closes the connection with the unexpected failure status code.
*
* @param webSocketConnection the websocket connection to be closed.
*/
public static void closeDuringUnexpectedCondition(WebSocketConnection webSocketConnection) {
webSocketConnection.terminateConnection(1011, "Unexpected condition");
}
public static void setListenerOpenField(WebSocketConnectionInfo connectionInfo) throws IllegalAccessException {
connectionInfo.getWebSocketEndpoint().set(WebSocketConstants.LISTENER_IS_OPEN_FIELD,
connectionInfo.getWebSocketConnection().isOpen());
}
public static int findMaxFrameSize(MapValue<String, Object> configs) {
long size = configs.getIntValue(WebSocketConstants.ANNOTATION_ATTR_MAX_FRAME_SIZE);
if (size <= 0) {
return WebSocketConstants.DEFAULT_MAX_FRAME_SIZE;
}
try {
return Math.toIntExact(size);
} catch (ArithmeticException e) {
logger.warn("The value set for maxFrameSize needs to be less than " + Integer.MAX_VALUE +
". The maxFrameSize value is set to " + Integer.MAX_VALUE);
return Integer.MAX_VALUE;
}
}
public static int findTimeoutInSeconds(MapValue<String, Object> config, String key, int defaultValue) {
long timeout = config.getIntValue(key);
if (timeout < 0) {
return defaultValue;
}
try {
return Math.toIntExact(timeout);
} catch (ArithmeticException e) {
logger.warn("The value set for {} needs to be less than {} .The {} value is set to {} ", key,
Integer.MAX_VALUE, key, Integer.MAX_VALUE);
return Integer.MAX_VALUE;
}
}
public static String[] findNegotiableSubProtocols(MapValue<String, Object> configs) {
return configs.getArrayValue(WebSocketConstants.ANNOTATION_ATTR_SUB_PROTOCOLS).getStringArray();
}
static String getErrorMessage(Throwable err) {
if (err.getMessage() == null) {
return "Unexpected error occurred";
}
return err.getMessage();
}
/**
* Creates the appropriate ballerina errors using for the given throwable.
*
* @param throwable the throwable to be represented in Ballerina.
* @return the relevant WebSocketException with proper error code.
*/
public static WebSocketException createErrorByType(Throwable throwable) {
WebSocketConstants.ErrorCode errorCode = WebSocketConstants.ErrorCode.WsGenericError;
ErrorValue cause = null;
String message = getErrorMessage(throwable);
if (throwable instanceof CorruptedWebSocketFrameException) {
WebSocketCloseStatus status = ((CorruptedWebSocketFrameException) throwable).closeStatus();
if (status == WebSocketCloseStatus.MESSAGE_TOO_BIG) {
errorCode = WebSocketConstants.ErrorCode.WsPayloadTooBigError;
} else {
errorCode = WebSocketConstants.ErrorCode.WsProtocolError;
}
} else if (throwable instanceof SSLException) {
cause = createErrorCause(throwable.getMessage(), HttpErrorType.SSL_ERROR.getReason(),
HttpConstants.PROTOCOL_HTTP_PKG_ID);
message = "SSL/TLS Error";
} else if (throwable instanceof IllegalStateException) {
if (throwable.getMessage().contains("frame continuation")) {
errorCode = WebSocketConstants.ErrorCode.WsInvalidContinuationFrameError;
} else if (throwable.getMessage().toLowerCase(Locale.ENGLISH).contains("close frame")) {
errorCode = WebSocketConstants.ErrorCode.WsConnectionClosureError;
}
} else if (throwable instanceof IllegalAccessException &&
throwable.getMessage().equals(WebSocketConstants.THE_WEBSOCKET_CONNECTION_HAS_NOT_BEEN_MADE)) {
errorCode = WebSocketConstants.ErrorCode.WsConnectionError;
} else if (throwable instanceof TooLongFrameException) {
errorCode = WebSocketConstants.ErrorCode.WsPayloadTooBigError;
} else if (throwable instanceof CodecException) {
errorCode = WebSocketConstants.ErrorCode.WsProtocolError;
} else if (throwable instanceof WebSocketHandshakeException) {
errorCode = WebSocketConstants.ErrorCode.WsInvalidHandshakeError;
} else if (throwable instanceof IOException) {
errorCode = WebSocketConstants.ErrorCode.WsConnectionError;
cause = createErrorCause(throwable.getMessage(), IOConstants.ErrorCode.GenericError.errorCode(),
IOConstants.IO_PACKAGE_ID);
message = "IO Error";
}
return new WebSocketException(errorCode, message, cause);
}
private static ErrorValue createErrorCause(String message, String reason, BPackage packageName) {
MapValue<String, Object> detailRecordType = BallerinaValues.createRecordValue(
packageName, WebSocketConstants.WEBSOCKET_ERROR_DETAILS);
MapValue<String, Object> detailRecord = BallerinaValues.createRecord(detailRecordType, message, null);
return BallerinaErrors.createError(reason, detailRecord);
}
/**
* Establish connection with the endpoint.
*
* @param webSocketClient - the WebSocket client.
* @param wsService - the WebSocket service.
*/
public static void establishWebSocketConnection(ObjectValue webSocketClient, WebSocketService wsService) {
WebSocketClientConnectorListener clientConnectorListener = (WebSocketClientConnectorListener) webSocketClient.
getNativeData(WebSocketConstants.CLIENT_LISTENER);
WebSocketClientConnector clientConnector = (WebSocketClientConnector) webSocketClient.
getNativeData(WebSocketConstants.CLIENT_CONNECTOR);
boolean readyOnConnect = webSocketClient.getMapValue(CLIENT_ENDPOINT_CONFIG).getBooleanValue(
WebSocketConstants.CLIENT_READY_ON_CONNECT);
ClientHandshakeFuture handshakeFuture = clientConnector.connect();
handshakeFuture.setWebSocketConnectorListener(clientConnectorListener);
CountDownLatch countDownLatch = new CountDownLatch(1);
handshakeFuture.setClientHandshakeListener(new WebSocketClientHandshakeListener(webSocketClient, wsService,
clientConnectorListener, readyOnConnect, countDownLatch));
waitForHandshake(webSocketClient, countDownLatch);
}
/**
* Check whether the client's config has the retryConfig property.
*
* @param webSocketClient - the WebSocket client.
* @return If the client's config has the retry config, then return true.
*/
public static boolean hasRetryConfig(ObjectValue webSocketClient) {
return webSocketClient.getMapValue(CLIENT_ENDPOINT_CONFIG).
getMapValue(WebSocketConstants.RETRY_CONFIG) != null;
}
private static void waitForHandshake(ObjectValue webSocketClient, CountDownLatch countDownLatch) {
long timeout = WebSocketUtil.findTimeoutInSeconds((MapValue<String, Object>) webSocketClient.getMapValue(
HttpConstants.CLIENT_ENDPOINT_CONFIG), "handShakeTimeoutInSeconds", 300);
try {
if (!countDownLatch.await(timeout, TimeUnit.SECONDS)) {
throw new WebSocketException(WebSocketConstants.ErrorCode.WsGenericError,
"Waiting for WebSocket handshake has not been successful", WebSocketUtil.createErrorCause(
"Connection timeout", IOConstants.ErrorCode.ConnectionTimedOut.errorCode(),
IOConstants.IO_PACKAGE_ID));
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WebSocketException(WebSocketConstants.ERROR_MESSAGE + e.getMessage());
}
}
/**
* Reconnect when the WebSocket connection is lost.
*
* @param connectionInfo - information about the connection.
* @return If attempts reconnection, then return true.
*/
/**
* Set the time to wait before attempting to reconnect.
*
* @param interval - interval to wait before trying to reconnect.
*/
private static void setCountDownLatch(int interval) {
CountDownLatch countDownLatch = new CountDownLatch(1);
try {
if (!countDownLatch.await(interval, TimeUnit.MILLISECONDS)) {
countDownLatch.countDown();
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WebSocketException(WebSocketConstants.ERROR_MESSAGE + e.getMessage());
}
}
/**
* Calculate the waiting time.
*
* @param interval- interval to wait before trying to reconnect.
* @param maxInterval - maximum interval to wait before trying to reconnect.
* @param backOfFactor - The rate of increase of the reconnect delay
* @param reconnectAttempts - the number of reconnecting attempts
* @return the time to wait before attempting to reconnect
*/
private static int calculateWaitingTime(int interval, int maxInterval, float backOfFactor, int reconnectAttempts) {
interval = (int) (interval * Math.pow(backOfFactor, reconnectAttempts));
if (interval > maxInterval) {
interval = maxInterval;
}
return interval;
}
private WebSocketUtil() {
}
} | class WebSocketUtil {
private static final Logger logger = LoggerFactory.getLogger(WebSocketUtil.class);
private static final String CLIENT_ENDPOINT_CONFIG = "config";
public static ObjectValue createAndPopulateWebSocketCaller(WebSocketConnection webSocketConnection,
WebSocketServerService wsService,
WebSocketConnectionManager connectionManager) {
ObjectValue webSocketCaller = BallerinaValues.createObjectValue(HttpConstants.PROTOCOL_HTTP_PKG_ID,
WebSocketConstants.WEBSOCKET_CALLER);
ObjectValue webSocketConnector = BallerinaValues.createObjectValue(
HttpConstants.PROTOCOL_HTTP_PKG_ID, WebSocketConstants.WEBSOCKET_CONNECTOR);
webSocketCaller.set(WebSocketConstants.LISTENER_CONNECTOR_FIELD, webSocketConnector);
populateWebSocketEndpoint(webSocketConnection, webSocketCaller);
WebSocketConnectionInfo connectionInfo =
new WebSocketConnectionInfo(wsService, webSocketConnection, webSocketCaller);
connectionManager.addConnection(webSocketConnection.getChannelId(), connectionInfo);
webSocketConnector.addNativeData(WebSocketConstants.NATIVE_DATA_WEBSOCKET_CONNECTION_INFO,
connectionInfo);
WebSocketObservabilityUtil.observeConnection(
connectionManager.getConnectionInfo(webSocketConnection.getChannelId()));
return webSocketCaller;
}
public static void populateWebSocketEndpoint(WebSocketConnection webSocketConnection, ObjectValue webSocketCaller) {
webSocketCaller.set(WebSocketConstants.LISTENER_ID_FIELD, webSocketConnection.getChannelId());
String negotiatedSubProtocol = webSocketConnection.getNegotiatedSubProtocol();
webSocketCaller.set(WebSocketConstants.LISTENER_NEGOTIATED_SUBPROTOCOLS_FIELD, negotiatedSubProtocol);
webSocketCaller.set(WebSocketConstants.LISTENER_IS_SECURE_FIELD, webSocketConnection.isSecure());
webSocketCaller.set(WebSocketConstants.LISTENER_IS_OPEN_FIELD, webSocketConnection.isOpen());
}
public static void handleWebSocketCallback(NonBlockingCallback callback,
ChannelFuture webSocketChannelFuture, Logger log) {
webSocketChannelFuture.addListener(future -> {
Throwable cause = future.cause();
if (!future.isSuccess() && cause != null) {
log.error("Error occurred ", cause);
callback.notifyFailure(WebSocketUtil.createErrorByType(cause));
} else {
callback.setReturnValues(null);
callback.notifySuccess();
}
});
}
public static void readFirstFrame(WebSocketConnection webSocketConnection, ObjectValue wsClient) {
webSocketConnection.readNextFrame();
wsClient.set(WebSocketConstants.CONNECTOR_IS_READY_FIELD, true);
}
/**
* Closes the connection with the unexpected failure status code.
*
* @param webSocketConnection - the webSocket connection to be closed.
*/
public static void closeDuringUnexpectedCondition(WebSocketConnection webSocketConnection) {
webSocketConnection.terminateConnection(1011, "Unexpected condition");
}
public static void setListenerOpenField(WebSocketConnectionInfo connectionInfo) throws IllegalAccessException {
connectionInfo.getWebSocketEndpoint().set(WebSocketConstants.LISTENER_IS_OPEN_FIELD,
connectionInfo.getWebSocketConnection().isOpen());
}
public static int findMaxFrameSize(MapValue<String, Object> configs) {
long size = configs.getIntValue(WebSocketConstants.ANNOTATION_ATTR_MAX_FRAME_SIZE);
if (size <= 0) {
return WebSocketConstants.DEFAULT_MAX_FRAME_SIZE;
}
try {
return Math.toIntExact(size);
} catch (ArithmeticException e) {
logger.warn("The value set for maxFrameSize needs to be less than " + Integer.MAX_VALUE +
". The maxFrameSize value is set to " + Integer.MAX_VALUE);
return Integer.MAX_VALUE;
}
}
public static int findTimeoutInSeconds(MapValue<String, Object> config, String key, int defaultValue) {
long timeout = config.getIntValue(key);
if (timeout < 0) {
return defaultValue;
}
try {
return Math.toIntExact(timeout);
} catch (ArithmeticException e) {
logger.warn("The value set for {} needs to be less than {} .The {} value is set to {} ", key,
Integer.MAX_VALUE, key, Integer.MAX_VALUE);
return Integer.MAX_VALUE;
}
}
public static String[] findNegotiableSubProtocols(MapValue<String, Object> configs) {
return configs.getArrayValue(WebSocketConstants.ANNOTATION_ATTR_SUB_PROTOCOLS).getStringArray();
}
static String getErrorMessage(Throwable err) {
if (err.getMessage() == null) {
return "Unexpected error occurred";
}
return err.getMessage();
}
/**
* Creates the appropriate ballerina errors using for the given throwable.
*
* @param throwable - the throwable to be represented in Ballerina.
* @return the relevant WebSocketException with proper error code.
*/
public static WebSocketException createErrorByType(Throwable throwable) {
WebSocketConstants.ErrorCode errorCode = WebSocketConstants.ErrorCode.WsGenericError;
ErrorValue cause = null;
String message = getErrorMessage(throwable);
if (throwable instanceof CorruptedWebSocketFrameException) {
WebSocketCloseStatus status = ((CorruptedWebSocketFrameException) throwable).closeStatus();
if (status == WebSocketCloseStatus.MESSAGE_TOO_BIG) {
errorCode = WebSocketConstants.ErrorCode.WsPayloadTooBigError;
} else {
errorCode = WebSocketConstants.ErrorCode.WsProtocolError;
}
} else if (throwable instanceof SSLException) {
cause = createErrorCause(throwable.getMessage(), HttpErrorType.SSL_ERROR.getReason(),
HttpConstants.PROTOCOL_HTTP_PKG_ID);
message = "SSL/TLS Error";
} else if (throwable instanceof IllegalStateException) {
if (throwable.getMessage().contains("frame continuation")) {
errorCode = WebSocketConstants.ErrorCode.WsInvalidContinuationFrameError;
} else if (throwable.getMessage().toLowerCase(Locale.ENGLISH).contains("close frame")) {
errorCode = WebSocketConstants.ErrorCode.WsConnectionClosureError;
}
} else if (throwable instanceof IllegalAccessException &&
throwable.getMessage().equals(WebSocketConstants.THE_WEBSOCKET_CONNECTION_HAS_NOT_BEEN_MADE)) {
errorCode = WebSocketConstants.ErrorCode.WsConnectionError;
} else if (throwable instanceof TooLongFrameException) {
errorCode = WebSocketConstants.ErrorCode.WsPayloadTooBigError;
} else if (throwable instanceof CodecException) {
errorCode = WebSocketConstants.ErrorCode.WsProtocolError;
} else if (throwable instanceof WebSocketHandshakeException) {
errorCode = WebSocketConstants.ErrorCode.WsInvalidHandshakeError;
} else if (throwable instanceof IOException) {
errorCode = WebSocketConstants.ErrorCode.WsConnectionError;
cause = createErrorCause(throwable.getMessage(), IOConstants.ErrorCode.GenericError.errorCode(),
IOConstants.IO_PACKAGE_ID);
message = "IO Error";
}
return new WebSocketException(errorCode, message, cause);
}
private static ErrorValue createErrorCause(String message, String reason, BPackage packageName) {
MapValue<String, Object> detailRecordType = BallerinaValues.createRecordValue(
packageName, WebSocketConstants.WEBSOCKET_ERROR_DETAILS);
MapValue<String, Object> detailRecord = BallerinaValues.createRecord(detailRecordType, message, null);
return BallerinaErrors.createError(reason, detailRecord);
}
/**
* Establish connection with the endpoint.
*
* @param webSocketClient - the WebSocket client.
* @param wsService - the WebSocket service.
*/
public static void establishWebSocketConnection(ObjectValue webSocketClient, WebSocketService wsService) {
WebSocketClientConnectorListener clientConnectorListener = (WebSocketClientConnectorListener) webSocketClient.
getNativeData(WebSocketConstants.CLIENT_LISTENER);
WebSocketClientConnector clientConnector = (WebSocketClientConnector) webSocketClient.
getNativeData(WebSocketConstants.CLIENT_CONNECTOR);
boolean readyOnConnect = webSocketClient.getMapValue(CLIENT_ENDPOINT_CONFIG).getBooleanValue(
WebSocketConstants.CLIENT_READY_ON_CONNECT);
ClientHandshakeFuture handshakeFuture = clientConnector.connect();
handshakeFuture.setWebSocketConnectorListener(clientConnectorListener);
CountDownLatch countDownLatch = new CountDownLatch(1);
if (WebSocketUtil.hasRetryConfig(webSocketClient)) {
handshakeFuture.setClientHandshakeListener(new WebSocketClientHandshakeListenerForRetry(webSocketClient,
wsService, clientConnectorListener, readyOnConnect, countDownLatch,
(RetryContext) webSocketClient.getNativeData(WebSocketConstants.RETRY_CONFIG)));
} else {
handshakeFuture.setClientHandshakeListener(new WebSocketClientHandshakeListener(webSocketClient, wsService,
clientConnectorListener, readyOnConnect, countDownLatch));
}
waitForHandshake(webSocketClient, countDownLatch);
}
/**
* Check whether the client's config has the retryConfig property.
*
* @param webSocketClient - the WebSocket client.
* @return If the client's config has the retry config, then return true.
*/
public static boolean hasRetryConfig(ObjectValue webSocketClient) {
return webSocketClient.getMapValue(CLIENT_ENDPOINT_CONFIG).
getMapValue(WebSocketConstants.RETRY_CONFIG) != null;
}
private static void waitForHandshake(ObjectValue webSocketClient, CountDownLatch countDownLatch) {
@SuppressWarnings(WebSocketConstants.UNCHECKED)
long timeout = WebSocketUtil.findTimeoutInSeconds((MapValue<String, Object>) webSocketClient.getMapValue(
CLIENT_ENDPOINT_CONFIG), "handShakeTimeoutInSeconds", 300);
try {
if (!countDownLatch.await(timeout, TimeUnit.SECONDS)) {
throw new WebSocketException(WebSocketConstants.ErrorCode.WsGenericError,
"Waiting for WebSocket handshake has not been successful", WebSocketUtil.createErrorCause(
"Connection timeout", IOConstants.ErrorCode.ConnectionTimedOut.errorCode(),
IOConstants.IO_PACKAGE_ID));
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WebSocketException(WebSocketConstants.ERROR_MESSAGE + e.getMessage());
}
}
/**
* Reconnect when the WebSocket connection is lost.
*
* @param connectionInfo - information about the connection.
* @return If attempts reconnection, then return true.
*/
/**
* Set the time to wait before attempting to reconnect.
*
* @param interval - interval to wait before trying to reconnect.
*/
private static void createDelay(int interval) {
CountDownLatch countDownLatch = new CountDownLatch(1);
try {
if (!countDownLatch.await(interval, TimeUnit.MILLISECONDS)) {
countDownLatch.countDown();
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new WebSocketException(WebSocketConstants.ERROR_MESSAGE + e.getMessage());
}
}
/**
* Calculate the waiting time.
*
* @param interval- interval to wait before trying to reconnect.
* @param maxInterval - maximum interval to wait before trying to reconnect.
* @param backOfFactor - the rate of increase of the reconnect delay.
* @param reconnectAttempts - the number of reconnecting attempts.
* @return The time to wait before attempting to reconnect.
*/
private static int calculateWaitingTime(int interval, int maxInterval, double backOfFactor,
int reconnectAttempts) {
interval = (int) (interval * Math.pow(backOfFactor, reconnectAttempts));
if (interval > maxInterval) {
interval = maxInterval;
}
return interval;
}
public static WebSocketConnectionInfo getWebSocketOpenConnectionInfo(WebSocketConnection webSocketConnection,
ObjectValue webSocketConnector,
ObjectValue webSocketClient,
WebSocketService wsService) {
WebSocketConnectionInfo connectionInfo = new WebSocketConnectionInfo(
wsService, webSocketConnection, webSocketClient);
webSocketConnector.addNativeData(WebSocketConstants.NATIVE_DATA_WEBSOCKET_CONNECTION_INFO, connectionInfo);
webSocketClient.set(WebSocketConstants.CLIENT_CONNECTOR_FIELD, webSocketConnector);
return connectionInfo;
}
private WebSocketUtil() {
}
} |
Can you elaborate why we mix jre and jdk here? My understanding of this article is that java.specification.version adheres to current jre specification. https://docs.oracle.com/javase/7/docs/technotes/guides/versioning/spec/versioning2.html | public static DataflowRunner fromOptions(PipelineOptions options) {
DataflowPipelineOptions dataflowOptions =
PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options);
ArrayList<String> missing = new ArrayList<>();
if (dataflowOptions.getAppName() == null) {
missing.add("appName");
}
if (missing.size() > 0) {
throw new IllegalArgumentException(
"Missing required values: " + Joiner.on(',').join(missing));
}
PathValidator validator = dataflowOptions.getPathValidator();
String gcpTempLocation;
try {
gcpTempLocation = dataflowOptions.getGcpTempLocation();
} catch (Exception e) {
throw new IllegalArgumentException(
"DataflowRunner requires gcpTempLocation, "
+ "but failed to retrieve a value from PipelineOptions",
e);
}
validator.validateOutputFilePrefixSupported(gcpTempLocation);
String stagingLocation;
try {
stagingLocation = dataflowOptions.getStagingLocation();
} catch (Exception e) {
throw new IllegalArgumentException(
"DataflowRunner requires stagingLocation, "
+ "but failed to retrieve a value from PipelineOptions",
e);
}
validator.validateOutputFilePrefixSupported(stagingLocation);
if (!isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) {
validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs());
}
if (dataflowOptions.getFilesToStage() == null) {
dataflowOptions.setFilesToStage(
detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader()));
if (dataflowOptions.getFilesToStage().isEmpty()) {
throw new IllegalArgumentException("No files to stage has been found.");
} else {
LOG.info(
"PipelineOptions.filesToStage was not specified. "
+ "Defaulting to files from the classpath: will stage {} files. "
+ "Enable logging at DEBUG level to see which files will be staged.",
dataflowOptions.getFilesToStage().size());
LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage());
}
}
String jobName = dataflowOptions.getJobName().toLowerCase();
checkArgument(
jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"),
"JobName invalid; the name must consist of only the characters "
+ "[-a-z0-9], starting with a letter and ending with a letter "
+ "or number");
if (!jobName.equals(dataflowOptions.getJobName())) {
LOG.info(
"PipelineOptions.jobName did not match the service requirements. "
+ "Using {} instead of {}.",
jobName,
dataflowOptions.getJobName());
}
dataflowOptions.setJobName(jobName);
String project = dataflowOptions.getProject();
if (project.matches("[0-9]*")) {
throw new IllegalArgumentException(
"Project ID '"
+ project
+ "' invalid. Please make sure you specified the Project ID, not project number.");
} else if (!project.matches(PROJECT_ID_REGEXP)) {
throw new IllegalArgumentException(
"Project ID '"
+ project
+ "' invalid. Please make sure you specified the Project ID, not project description.");
}
DataflowPipelineDebugOptions debugOptions =
dataflowOptions.as(DataflowPipelineDebugOptions.class);
if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) {
throw new IllegalArgumentException(
"Number of worker harness threads '"
+ debugOptions.getNumberOfWorkerHarnessThreads()
+ "' invalid. Please make sure the value is non-negative.");
}
if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) {
dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT);
}
DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo();
String javaVersion =
Float.parseFloat(System.getProperty("java.specification.version")) >= 9
? "(JDK 11 environment)"
: "(JRE 8 environment)";
String userAgent =
String.format(
"%s %s/%s",
dataflowRunnerInfo.getName(), javaVersion, dataflowRunnerInfo.getVersion())
.replace(" ", "_");
dataflowOptions.setUserAgent(userAgent);
return new DataflowRunner(dataflowOptions);
} | String userAgent = | public static DataflowRunner fromOptions(PipelineOptions options) {
DataflowPipelineOptions dataflowOptions =
PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options);
ArrayList<String> missing = new ArrayList<>();
if (dataflowOptions.getAppName() == null) {
missing.add("appName");
}
if (missing.size() > 0) {
throw new IllegalArgumentException(
"Missing required values: " + Joiner.on(',').join(missing));
}
PathValidator validator = dataflowOptions.getPathValidator();
String gcpTempLocation;
try {
gcpTempLocation = dataflowOptions.getGcpTempLocation();
} catch (Exception e) {
throw new IllegalArgumentException(
"DataflowRunner requires gcpTempLocation, "
+ "but failed to retrieve a value from PipelineOptions",
e);
}
validator.validateOutputFilePrefixSupported(gcpTempLocation);
String stagingLocation;
try {
stagingLocation = dataflowOptions.getStagingLocation();
} catch (Exception e) {
throw new IllegalArgumentException(
"DataflowRunner requires stagingLocation, "
+ "but failed to retrieve a value from PipelineOptions",
e);
}
validator.validateOutputFilePrefixSupported(stagingLocation);
if (!isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) {
validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs());
}
if (dataflowOptions.getFilesToStage() == null) {
dataflowOptions.setFilesToStage(
detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader()));
if (dataflowOptions.getFilesToStage().isEmpty()) {
throw new IllegalArgumentException("No files to stage has been found.");
} else {
LOG.info(
"PipelineOptions.filesToStage was not specified. "
+ "Defaulting to files from the classpath: will stage {} files. "
+ "Enable logging at DEBUG level to see which files will be staged.",
dataflowOptions.getFilesToStage().size());
LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage());
}
}
String jobName = dataflowOptions.getJobName().toLowerCase();
checkArgument(
jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"),
"JobName invalid; the name must consist of only the characters "
+ "[-a-z0-9], starting with a letter and ending with a letter "
+ "or number");
if (!jobName.equals(dataflowOptions.getJobName())) {
LOG.info(
"PipelineOptions.jobName did not match the service requirements. "
+ "Using {} instead of {}.",
jobName,
dataflowOptions.getJobName());
}
dataflowOptions.setJobName(jobName);
String project = dataflowOptions.getProject();
if (project.matches("[0-9]*")) {
throw new IllegalArgumentException(
"Project ID '"
+ project
+ "' invalid. Please make sure you specified the Project ID, not project number.");
} else if (!project.matches(PROJECT_ID_REGEXP)) {
throw new IllegalArgumentException(
"Project ID '"
+ project
+ "' invalid. Please make sure you specified the Project ID, not project description.");
}
DataflowPipelineDebugOptions debugOptions =
dataflowOptions.as(DataflowPipelineDebugOptions.class);
if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) {
throw new IllegalArgumentException(
"Number of worker harness threads '"
+ debugOptions.getNumberOfWorkerHarnessThreads()
+ "' invalid. Please make sure the value is non-negative.");
}
if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) {
dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT);
}
String javaVersion =
Float.parseFloat(System.getProperty("java.specification.version")) >= 9
? "(JDK 11 environment)"
: "(JRE 8 environment)";
DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo();
String userAgent =
String.format(
"%s/%s%s",
dataflowRunnerInfo.getName(), dataflowRunnerInfo.getVersion(), javaVersion)
.replace(" ", "_");
dataflowOptions.setUserAgent(userAgent);
return new DataflowRunner(dataflowOptions);
} | class path allowing for
* user specified configuration injection into the ObjectMapper. This supports user custom types
* on {@link PipelineOptions} | class path allowing for
* user specified configuration injection into the ObjectMapper. This supports user custom types
* on {@link PipelineOptions} |
this should just be set to the default value now. | FirestoreStub getFirestoreStub(PipelineOptions options) {
try {
FirestoreSettings.Builder builder =
FirestoreSettings.newBuilder()
.setHeaderProvider(
new FixedHeaderProvider() {
@Override
public Map<@NonNull String, @NonNull String> getHeaders() {
return ImmutableMap.of("User-Agent", options.getUserAgent());
}
});
RetrySettings retrySettings = RetrySettings.newBuilder().setMaxAttempts(1).build();
builder.applyToAllUnaryMethods(
b -> {
b.setRetrySettings(retrySettings);
return null;
});
FirestoreOptions firestoreOptions = options.as(FirestoreOptions.class);
String emulatorHostPort = firestoreOptions.getEmulatorHost();
if (emulatorHostPort == null) {
emulatorHostPort = System.getenv(FIRESTORE_EMULATOR_HOST_ENV_VARIABLE);
}
if (emulatorHostPort != null) {
builder
.setCredentialsProvider(FixedCredentialsProvider.create(new EmulatorCredentials()))
.setEndpoint(emulatorHostPort)
.setTransportChannelProvider(
InstantiatingGrpcChannelProvider.newBuilder()
.setEndpoint(emulatorHostPort)
.setChannelConfigurator(c -> c.usePlaintext())
.build());
} else {
GcpOptions gcpOptions = options.as(GcpOptions.class);
String host = firestoreOptions.getHost();
if (host == null) {
host = System.getenv().getOrDefault(FIRESTORE_HOST_ENV_VARIABLE, DEFAULT_FIRESTORE_HOST);
}
builder
.setCredentialsProvider(FixedCredentialsProvider.create(gcpOptions.getGcpCredential()))
.setEndpoint(host);
}
ClientContext clientContext = ClientContext.create(builder.build());
return GrpcFirestoreStub.create(clientContext);
} catch (Exception e) {
throw new RuntimeException(e);
}
} | host = System.getenv().getOrDefault(FIRESTORE_HOST_ENV_VARIABLE, DEFAULT_FIRESTORE_HOST); | FirestoreStub getFirestoreStub(PipelineOptions options) {
try {
FirestoreSettings.Builder builder =
FirestoreSettings.newBuilder()
.setHeaderProvider(
new FixedHeaderProvider() {
@Override
public Map<@NonNull String, @NonNull String> getHeaders() {
return ImmutableMap.of("User-Agent", options.getUserAgent());
}
});
RetrySettings retrySettings = RetrySettings.newBuilder().setMaxAttempts(1).build();
builder.applyToAllUnaryMethods(
b -> {
b.setRetrySettings(retrySettings);
return null;
});
FirestoreOptions firestoreOptions = options.as(FirestoreOptions.class);
String emulatorHostPort = firestoreOptions.getEmulatorHost();
if (emulatorHostPort != null) {
builder
.setCredentialsProvider(FixedCredentialsProvider.create(new EmulatorCredentials()))
.setEndpoint(emulatorHostPort)
.setTransportChannelProvider(
InstantiatingGrpcChannelProvider.newBuilder()
.setEndpoint(emulatorHostPort)
.setChannelConfigurator(c -> c.usePlaintext())
.build());
} else {
GcpOptions gcpOptions = options.as(GcpOptions.class);
builder
.setCredentialsProvider(FixedCredentialsProvider.create(gcpOptions.getGcpCredential()))
.setEndpoint(firestoreOptions.getHost());
}
ClientContext clientContext = ClientContext.create(builder.build());
return GrpcFirestoreStub.create(clientContext);
} catch (Exception e) {
throw new RuntimeException(e);
}
} | class FirestoreStatefulComponentFactory implements Serializable {
private static final String DEFAULT_FIRESTORE_HOST = "batch-firestore.googleapis.com:443";
private static final String FIRESTORE_HOST_ENV_VARIABLE = "FIRESTORE_HOST";
private static final String FIRESTORE_EMULATOR_HOST_ENV_VARIABLE = "FIRESTORE_EMULATOR_HOST";
static final FirestoreStatefulComponentFactory INSTANCE = new FirestoreStatefulComponentFactory();
private FirestoreStatefulComponentFactory() {}
/**
* Given a {@link PipelineOptions}, return a pre-configured {@link FirestoreStub} with values set
* based on those options.
*
* <p>The provided {@link PipelineOptions} is expected to provide {@link FirestoreOptions} and
* {@link org.apache.beam.sdk.extensions.gcp.options.GcpOptions GcpOptions} for access to {@link
* GcpOptions
*
* <p>The instance returned by this method is expected to bind to the lifecycle of a bundle.
*
* @param options The instance of options to read from
* @return a new {@link FirestoreStub} pre-configured with values from the provided options
*/
/**
* Given a {@link RpcQosOptions}, return a new instance of {@link RpcQos}
*
* <p>The instance returned by this method is expected to bind to the lifecycle of a worker, and
* specifically live longer than a single bundle.
*
* @param options The instance of options to read from
* @return a new {@link RpcQos} based on the provided options
*/
RpcQos getRpcQos(RpcQosOptions options) {
return new RpcQosImpl(
options,
new SecureRandom(),
Sleeper.DEFAULT,
CounterFactory.DEFAULT,
DistributionFactory.DEFAULT);
}
} | class FirestoreStatefulComponentFactory implements Serializable {
static final FirestoreStatefulComponentFactory INSTANCE = new FirestoreStatefulComponentFactory();
private FirestoreStatefulComponentFactory() {}
/**
* Given a {@link PipelineOptions}, return a pre-configured {@link FirestoreStub} with values set
* based on those options.
*
* <p>The provided {@link PipelineOptions} is expected to provide {@link FirestoreOptions} and
* {@link org.apache.beam.sdk.extensions.gcp.options.GcpOptions GcpOptions} for access to {@link
* GcpOptions
*
* <p>The instance returned by this method is expected to bind to the lifecycle of a bundle.
*
* @param options The instance of options to read from
* @return a new {@link FirestoreStub} pre-configured with values from the provided options
*/
/**
* Given a {@link RpcQosOptions}, return a new instance of {@link RpcQos}
*
* <p>The instance returned by this method is expected to bind to the lifecycle of a worker, and
* specifically live longer than a single bundle.
*
* @param options The instance of options to read from
* @return a new {@link RpcQos} based on the provided options
*/
RpcQos getRpcQos(RpcQosOptions options) {
return new RpcQosImpl(
options,
new SecureRandom(),
Sleeper.DEFAULT,
CounterFactory.DEFAULT,
DistributionFactory.DEFAULT);
}
} |
This still needs to be addressed. | private static Long getExpiresJwtClaim(String accessToken) {
String[] parts = accessToken.split("\\.");
if (parts.length == 3) {
try {
JsonObject claims = new JsonObject(new String(Base64.getUrlDecoder().decode(parts[1]), StandardCharsets.UTF_8));
return claims.getLong(Claims.exp.name());
} catch (IllegalArgumentException ex) {
return null;
}
}
return null;
} | try { | private static Long getExpiresJwtClaim(String accessToken) {
JsonObject claims = decodeJwtToken(accessToken);
if (claims != null) {
try {
return claims.getLong(Claims.exp.name());
} catch (IllegalArgumentException ex) {
LOG.debug("JWT expiry claim can not be converted to Long");
}
}
return null;
} | class OidcClientImpl implements OidcClient {
private static final Logger LOG = Logger.getLogger(OidcClientImpl.class);
private static final String ACCESS_TOKEN = "access_token";
private static final String REFRESH_TOKEN = "refresh_token";
private static final String EXPIRES_AT = "expires_at";
private static final String CLIENT_ASSERTION = "client_assertion";
private static final String CLIENT_ASSERTION_TYPE = "client_assertion_type";
private static final String JWT_BEARER_CLIENT_ASSERTION_TYPE = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer";
private static final String AUTHORIZATION_HEADER = String.valueOf(HttpHeaders.AUTHORIZATION);
private WebClient client;
private String tokenRequestUri;
private MultiMap tokenGrantParams;
private MultiMap commonRefreshGrantParams;
private String grantType;
private String clientSecretBasicAuthScheme;
private Key clientJwtKey;
private OidcClientConfig oidcConfig;
public OidcClientImpl() {
}
public OidcClientImpl(WebClient client, String tokenRequestUri,
MultiMap tokenGrantParams, MultiMap commonRefreshGrantParams, OidcClientConfig oidcClientConfig) {
this.client = client;
this.tokenRequestUri = tokenRequestUri;
this.tokenGrantParams = tokenGrantParams;
this.commonRefreshGrantParams = commonRefreshGrantParams;
this.grantType = oidcClientConfig.grant.type == Grant.Type.CLIENT ? "client_credentials" : "password";
this.oidcConfig = oidcClientConfig;
initClientSecretBasicAuth();
}
private void initClientSecretBasicAuth() {
if (OidcCommonUtils.isClientSecretBasicAuthRequired(oidcConfig.credentials)) {
clientSecretBasicAuthScheme = "Basic "
+ Base64.getEncoder().encodeToString(
(oidcConfig.getClientId().get() + ":" + OidcCommonUtils.clientSecret(oidcConfig.credentials))
.getBytes(StandardCharsets.UTF_8));
} else if (OidcCommonUtils.isClientJwtAuthRequired(oidcConfig.credentials)) {
clientJwtKey = OidcCommonUtils.clientJwtKey(oidcConfig.credentials);
}
}
@Override
public Uni<GrantTokens> getTokens() {
return getJsonResponse(tokenGrantParams, false);
}
@Override
public Uni<GrantTokens> refreshTokens(String refreshToken) {
if (refreshToken == null) {
throw new OidcClientException("Refresh token is null");
}
MultiMap refreshGrantParams = copyMultiMap(commonRefreshGrantParams);
refreshGrantParams.add(REFRESH_TOKEN, refreshToken);
return getJsonResponse(refreshGrantParams, true);
}
private Uni<GrantTokens> getJsonResponse(MultiMap body, boolean refresh) {
HttpRequest<Buffer> request = client.post(tokenRequestUri);
if (clientSecretBasicAuthScheme != null) {
request.putHeader(AUTHORIZATION_HEADER, clientSecretBasicAuthScheme);
} else if (clientJwtKey != null) {
body = !refresh ? copyMultiMap(body) : body;
body.add(CLIENT_ASSERTION_TYPE, JWT_BEARER_CLIENT_ASSERTION_TYPE);
body.add(CLIENT_ASSERTION, OidcCommonUtils.signJwtWithKey(oidcConfig, clientJwtKey));
}
return request.sendForm(body).onItem()
.transformToUni(resp -> emitGrantTokens(resp, refresh));
}
private Uni<GrantTokens> emitGrantTokens(HttpResponse<Buffer> resp, boolean refresh) {
return Uni.createFrom().emitter(new Consumer<UniEmitter<? super GrantTokens>>() {
@Override
public void accept(UniEmitter<? super GrantTokens> emitter) {
if (resp.statusCode() == 200) {
LOG.debugf("Tokens have been %s", refresh ? "refreshed" : "acquired");
JsonObject json = resp.bodyAsJsonObject();
final String accessToken = json.getString(ACCESS_TOKEN);
final String refreshToken = json.getString(REFRESH_TOKEN);
Long accessTokenExpiresAt = json.getLong(EXPIRES_AT);
if (accessTokenExpiresAt == null) {
accessTokenExpiresAt = getExpiresJwtClaim(accessToken);
}
emitter.complete(new GrantTokens(accessToken, accessTokenExpiresAt, refreshToken));
} else {
LOG.errorf("%s token grant request has failed: %s", (refresh ? "refresh" : grantType), resp.bodyAsString());
emitter.fail(new OidcClientException());
}
}
});
}
private static MultiMap copyMultiMap(MultiMap oldMap) {
MultiMap newMap = new MultiMap(io.vertx.core.MultiMap.caseInsensitiveMultiMap());
newMap.addAll(oldMap);
return newMap;
}
@Override
public void close() throws IOException {
client.close();
}
} | class OidcClientImpl implements OidcClient {
private static final Logger LOG = Logger.getLogger(OidcClientImpl.class);
private static final String ACCESS_TOKEN = "access_token";
private static final String REFRESH_TOKEN = "refresh_token";
private static final String EXPIRES_AT = "expires_at";
private static final String AUTHORIZATION_HEADER = String.valueOf(HttpHeaders.AUTHORIZATION);
private final WebClient client;
private final String tokenRequestUri;
private final MultiMap tokenGrantParams;
private final MultiMap commonRefreshGrantParams;
private final String grantType;
private final String clientSecretBasicAuthScheme;
private final Key clientJwtKey;
private final OidcClientConfig oidcConfig;
public OidcClientImpl(WebClient client, String tokenRequestUri, String grantType,
MultiMap tokenGrantParams, MultiMap commonRefreshGrantParams, OidcClientConfig oidcClientConfig) {
this.client = client;
this.tokenRequestUri = tokenRequestUri;
this.tokenGrantParams = tokenGrantParams;
this.commonRefreshGrantParams = commonRefreshGrantParams;
this.grantType = grantType;
this.oidcConfig = oidcClientConfig;
this.clientSecretBasicAuthScheme = initClientSecretBasicAuth(oidcClientConfig);
this.clientJwtKey = initClientJwtKey(oidcClientConfig);
}
private static String initClientSecretBasicAuth(OidcClientConfig oidcClientConfig) {
if (OidcCommonUtils.isClientSecretBasicAuthRequired(oidcClientConfig.credentials)) {
return "Basic "
+ Base64.getEncoder().encodeToString(
(oidcClientConfig.getClientId().get() + ":"
+ OidcCommonUtils.clientSecret(oidcClientConfig.credentials))
.getBytes(StandardCharsets.UTF_8));
}
return null;
}
private static Key initClientJwtKey(OidcClientConfig oidcClientConfig) {
if (OidcCommonUtils.isClientJwtAuthRequired(oidcClientConfig.credentials)) {
return OidcCommonUtils.clientJwtKey(oidcClientConfig.credentials);
}
return null;
}
@Override
public Uni<Tokens> getTokens() {
return getJsonResponse(tokenGrantParams, false);
}
@Override
public Uni<Tokens> refreshTokens(String refreshToken) {
if (refreshToken == null) {
throw new OidcClientException("Refresh token is null");
}
MultiMap refreshGrantParams = copyMultiMap(commonRefreshGrantParams);
refreshGrantParams.add(REFRESH_TOKEN, refreshToken);
return getJsonResponse(refreshGrantParams, true);
}
private Uni<Tokens> getJsonResponse(MultiMap reqBody, boolean refresh) {
return Uni.createFrom().deferred(new Supplier<Uni<? extends Tokens>>() {
@Override
public Uni<Tokens> get() {
MultiMap body = reqBody;
HttpRequest<Buffer> request = client.post(tokenRequestUri);
if (clientSecretBasicAuthScheme != null) {
request.putHeader(AUTHORIZATION_HEADER, clientSecretBasicAuthScheme);
} else if (clientJwtKey != null) {
body = !refresh ? copyMultiMap(body) : body;
body.add(OidcConstants.CLIENT_ASSERTION_TYPE, OidcConstants.JWT_BEARER_CLIENT_ASSERTION_TYPE);
body.add(OidcConstants.CLIENT_ASSERTION, OidcCommonUtils.signJwtWithKey(oidcConfig, clientJwtKey));
}
return request.sendForm(body).onItem()
.transform(resp -> emitGrantTokens(resp, refresh));
}
});
}
private Tokens emitGrantTokens(HttpResponse<Buffer> resp, boolean refresh) {
if (resp.statusCode() == 200) {
LOG.debugf("%s OidcClient has %s the tokens", oidcConfig.getId().get(), (refresh ? "refreshed" : "acquired"));
JsonObject json = resp.bodyAsJsonObject();
final String accessToken = json.getString(ACCESS_TOKEN);
final String refreshToken = json.getString(REFRESH_TOKEN);
Long accessTokenExpiresAt = json.getLong(EXPIRES_AT);
if (accessTokenExpiresAt == null) {
accessTokenExpiresAt = getExpiresJwtClaim(accessToken);
}
return new Tokens(accessToken, accessTokenExpiresAt, refreshToken);
} else {
LOG.debugf("%s OidcClient has failed to complete the %s grant request: %s", oidcConfig.getId().get(),
(refresh ? OidcConstants.REFRESH_TOKEN_GRANT : grantType), resp.bodyAsString());
throw new OidcClientException();
}
}
private static JsonObject decodeJwtToken(String accessToken) {
String[] parts = accessToken.split("\\.");
if (parts.length == 3) {
try {
return new JsonObject(new String(Base64.getUrlDecoder().decode(parts[1]), StandardCharsets.UTF_8));
} catch (IllegalArgumentException ex) {
LOG.debug("JWT token can not be decoded using the Base64Url encoding scheme");
}
} else {
LOG.debug("Access token is not formatted as the encoded JWT token");
}
return null;
}
private static MultiMap copyMultiMap(MultiMap oldMap) {
MultiMap newMap = new MultiMap(io.vertx.core.MultiMap.caseInsensitiveMultiMap());
newMap.addAll(oldMap);
return newMap;
}
@Override
public void close() throws IOException {
client.close();
}
} |
Yes, initializers are not added to attached function. | private void defineFunction(DataInputStream dataInStream) throws IOException {
skipPosition(dataInStream);
String funcName = getStringCPEntryValue(dataInStream);
String workerName = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
BInvokableType funcType = (BInvokableType) readBType(dataInStream);
BInvokableSymbol invokableSymbol = Symbols.createFunctionSymbol(flags, names.fromString(funcName),
this.env.pkgSymbol.pkgID, funcType, this.env.pkgSymbol, Symbols.isFlagOn(flags, Flags.NATIVE));
invokableSymbol.retType = funcType.retType;
Scope scopeToDefine = this.env.pkgSymbol.scope;
if (this.currentStructure != null) {
BType attachedType = this.currentStructure.type;
invokableSymbol.owner = attachedType.tsymbol;
invokableSymbol.name =
names.fromString(Symbols.getAttachedFuncSymbolName(attachedType.tsymbol.name.value, funcName));
if (attachedType.tag == TypeTags.OBJECT || attachedType.tag == TypeTags.RECORD) {
if (attachedType.tag == TypeTags.OBJECT) {
scopeToDefine = ((BObjectTypeSymbol) attachedType.tsymbol).methodScope;
} else {
scopeToDefine = attachedType.tsymbol.scope;
}
BAttachedFunction attachedFunc =
new BAttachedFunction(names.fromString(funcName), invokableSymbol, funcType);
BStructureTypeSymbol structureTypeSymbol = (BStructureTypeSymbol) attachedType.tsymbol;
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(funcName)
|| funcName.equals(Names.INIT_FUNCTION_SUFFIX.value)) {
structureTypeSymbol.initializerFunc = attachedFunc;
} else if (funcName.equals(Names.GENERATED_INIT_SUFFIX.value)) {
((BObjectTypeSymbol) structureTypeSymbol).generatedInitializerFunc = attachedFunc;
} else {
structureTypeSymbol.attachedFuncs.add(attachedFunc);
}
}
}
dataInStream.skip(dataInStream.readLong());
setParamSymbols(invokableSymbol, dataInStream);
readTaintTable(invokableSymbol, dataInStream);
defineMarkDownDocAttachment(invokableSymbol, readDocBytes(dataInStream));
dataInStream.skip(dataInStream.readLong());
scopeToDefine.define(invokableSymbol.name, invokableSymbol);
} | structureTypeSymbol.attachedFuncs.add(attachedFunc); | private void defineFunction(DataInputStream dataInStream) throws IOException {
skipPosition(dataInStream);
String funcName = getStringCPEntryValue(dataInStream);
String workerName = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
BInvokableType funcType = (BInvokableType) readBType(dataInStream);
BInvokableSymbol invokableSymbol = Symbols.createFunctionSymbol(flags, names.fromString(funcName),
this.env.pkgSymbol.pkgID, funcType, this.env.pkgSymbol, Symbols.isFlagOn(flags, Flags.NATIVE));
invokableSymbol.retType = funcType.retType;
Scope scopeToDefine = this.env.pkgSymbol.scope;
if (this.currentStructure != null) {
BType attachedType = this.currentStructure.type;
invokableSymbol.owner = attachedType.tsymbol;
invokableSymbol.name =
names.fromString(Symbols.getAttachedFuncSymbolName(attachedType.tsymbol.name.value, funcName));
if (attachedType.tag == TypeTags.OBJECT || attachedType.tag == TypeTags.RECORD) {
if (attachedType.tag == TypeTags.OBJECT) {
scopeToDefine = ((BObjectTypeSymbol) attachedType.tsymbol).methodScope;
} else {
scopeToDefine = attachedType.tsymbol.scope;
}
BAttachedFunction attachedFunc =
new BAttachedFunction(names.fromString(funcName), invokableSymbol, funcType);
BStructureTypeSymbol structureTypeSymbol = (BStructureTypeSymbol) attachedType.tsymbol;
if (Names.USER_DEFINED_INIT_SUFFIX.value.equals(funcName)
|| funcName.equals(Names.INIT_FUNCTION_SUFFIX.value)) {
structureTypeSymbol.initializerFunc = attachedFunc;
} else if (funcName.equals(Names.GENERATED_INIT_SUFFIX.value)) {
((BObjectTypeSymbol) structureTypeSymbol).generatedInitializerFunc = attachedFunc;
} else {
structureTypeSymbol.attachedFuncs.add(attachedFunc);
}
}
}
dataInStream.skip(dataInStream.readLong());
setParamSymbols(invokableSymbol, dataInStream);
readTaintTable(invokableSymbol, dataInStream);
defineMarkDownDocAttachment(invokableSymbol, readDocBytes(dataInStream));
dataInStream.skip(dataInStream.readLong());
scopeToDefine.define(invokableSymbol.name, invokableSymbol);
} | class BIRPackageSymbolEnter {
private final PackageLoader packageLoader;
private final SymbolResolver symbolResolver;
private final SymbolTable symTable;
private final Names names;
private final TypeParamAnalyzer typeParamAnalyzer;
private final BLangDiagnosticLog dlog;
private BIRTypeReader typeReader;
private BIRPackageSymbolEnv env;
private List<BStructureTypeSymbol> structureTypes;
private BStructureTypeSymbol currentStructure = null;
private LinkedList<Object> compositeStack = new LinkedList<>();
private static final CompilerContext.Key<BIRPackageSymbolEnter> COMPILED_PACKAGE_SYMBOL_ENTER_KEY =
new CompilerContext.Key<>();
public static BIRPackageSymbolEnter getInstance(CompilerContext context) {
BIRPackageSymbolEnter packageReader = context.get(COMPILED_PACKAGE_SYMBOL_ENTER_KEY);
if (packageReader == null) {
packageReader = new BIRPackageSymbolEnter(context);
}
return packageReader;
}
private BIRPackageSymbolEnter(CompilerContext context) {
context.put(COMPILED_PACKAGE_SYMBOL_ENTER_KEY, this);
this.packageLoader = PackageLoader.getInstance(context);
this.symbolResolver = SymbolResolver.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.names = Names.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
}
public BPackageSymbol definePackage(PackageID packageId,
RepoHierarchy packageRepositoryHierarchy,
byte[] packageBinaryContent) {
BPackageSymbol pkgSymbol = definePackage(packageId, packageRepositoryHierarchy,
new ByteArrayInputStream(packageBinaryContent));
byte[] modifiedPkgBinaryContent = Arrays.copyOfRange(
packageBinaryContent, 8, packageBinaryContent.length);
pkgSymbol.birPackageFile = new CompiledBinaryFile.BIRPackageFile(modifiedPkgBinaryContent);
SymbolEnv builtinEnv = this.symTable.pkgEnvMap.get(symTable.langAnnotationModuleSymbol);
SymbolEnv pkgEnv = SymbolEnv.createPkgEnv(null, pkgSymbol.scope, builtinEnv);
this.symTable.pkgEnvMap.put(pkgSymbol, pkgEnv);
return pkgSymbol;
}
private BPackageSymbol definePackage(PackageID packageId,
RepoHierarchy packageRepositoryHierarchy,
InputStream programFileInStream) {
try (DataInputStream dataInStream = new DataInputStream(programFileInStream)) {
BIRPackageSymbolEnv prevEnv = this.env;
this.env = new BIRPackageSymbolEnv();
this.env.requestedPackageId = packageId;
this.env.repoHierarchy = packageRepositoryHierarchy;
BPackageSymbol pkgSymbol = definePackage(dataInStream);
this.env = prevEnv;
return pkgSymbol;
} catch (IOException e) {
throw new BLangCompilerException(e.getMessage(), e);
} catch (Throwable e) {
throw new BLangCompilerException(e.getMessage(), e);
}
}
private BPackageSymbol definePackage(DataInputStream dataInStream) throws IOException {
byte[] magic = new byte[4];
dataInStream.read(magic, 0, 4);
if (!Arrays.equals(magic, BIRPackageFile.BIR_MAGIC)) {
throw new BLangCompilerException("invalid magic number " + Arrays.toString(magic));
}
int version = dataInStream.readInt();
if (version != BIRPackageFile.BIR_VERSION) {
throw new BLangCompilerException("unsupported program file version " + version);
}
this.env.constantPool = readConstantPool(dataInStream);
int pkgCPIndex = dataInStream.readInt();
return definePackage(dataInStream, pkgCPIndex);
}
private BPackageSymbol definePackage(DataInputStream dataInStream, int pkgCpIndex) throws IOException {
PackageCPEntry pkgCpEntry = (PackageCPEntry) this.env.constantPool[pkgCpIndex];
String orgName = ((StringCPEntry) this.env.constantPool[pkgCpEntry.orgNameCPIndex]).value;
String pkgName = ((StringCPEntry) this.env.constantPool[pkgCpEntry.pkgNameCPIndex]).value;
String pkgVersion = ((StringCPEntry) this.env.constantPool[pkgCpEntry.versionCPIndex]).value;
PackageID pkgId = createPackageID(orgName, pkgName, pkgVersion);
this.env.pkgSymbol = Symbols.createPackageSymbol(pkgId, this.symTable);
defineSymbols(dataInStream, rethrow(this::defineImportPackage));
defineSymbols(dataInStream, rethrow(this::defineConstant));
this.structureTypes = new ArrayList<>();
defineSymbols(dataInStream, rethrow(this::defineTypeDef));
defineSymbols(dataInStream, rethrow(this::definePackageLevelVariables));
readTypeDefBodies(dataInStream);
defineSymbols(dataInStream, rethrow(this::defineFunction));
defineSymbols(dataInStream, rethrow(this::defineAnnotations));
this.typeReader = null;
return this.env.pkgSymbol;
}
private void readTypeDefBodies(DataInputStream dataInStream) throws IOException {
for (BStructureTypeSymbol structureTypeSymbol : this.structureTypes) {
this.currentStructure = structureTypeSymbol;
defineSymbols(dataInStream, rethrow(this::defineFunction));
defineSymbols(dataInStream, rethrow(this::readBType));
}
this.currentStructure = null;
}
private CPEntry[] readConstantPool(DataInputStream dataInStream) throws IOException {
int constantPoolSize = dataInStream.readInt();
CPEntry[] constantPool = new CPEntry[constantPoolSize];
this.env.constantPool = constantPool;
for (int i = 0; i < constantPoolSize; i++) {
byte cpTag = dataInStream.readByte();
CPEntry.Type cpEntryType = CPEntry.Type.values()[cpTag - 1];
constantPool[i] = readCPEntry(dataInStream, constantPool, cpEntryType, i);
}
return constantPool;
}
private CPEntry readCPEntry(DataInputStream dataInStream,
CPEntry[] constantPool,
CPEntry.Type cpEntryType, int i) throws IOException {
switch (cpEntryType) {
case CP_ENTRY_INTEGER:
return new CPEntry.IntegerCPEntry(dataInStream.readLong());
case CP_ENTRY_FLOAT:
return new CPEntry.FloatCPEntry(dataInStream.readDouble());
case CP_ENTRY_BOOLEAN:
return new CPEntry.BooleanCPEntry(dataInStream.readBoolean());
case CP_ENTRY_STRING:
int length = dataInStream.readInt();
String strValue = null;
if (length >= 0) {
byte[] bytes = new byte[length];
dataInStream.read(bytes, 0, length);
strValue = new String(bytes);
}
return new CPEntry.StringCPEntry(strValue);
case CP_ENTRY_PACKAGE:
return new CPEntry.PackageCPEntry(dataInStream.readInt(),
dataInStream.readInt(), dataInStream.readInt());
case CP_ENTRY_SHAPE:
env.unparsedBTypeCPs.put(i, readByteArray(dataInStream));
return null;
case CP_ENTRY_BYTE:
return new CPEntry.ByteCPEntry(dataInStream.readInt());
default:
throw new IllegalStateException("unsupported constant pool entry type: " +
cpEntryType.name());
}
}
private byte[] readByteArray(DataInputStream dataInStream) throws IOException {
int length = dataInStream.readInt();
byte[] bytes = new byte[length];
dataInStream.readFully(bytes);
return bytes;
}
private void defineSymbols(DataInputStream dataInStream,
Consumer<DataInputStream> symbolDefineFunc) throws IOException {
int symbolCount = dataInStream.readInt();
for (int i = 0; i < symbolCount; i++) {
symbolDefineFunc.accept(dataInStream);
}
}
private void defineImportPackage(DataInputStream dataInStream) throws IOException {
String orgName = getStringCPEntryValue(dataInStream);
String pkgName = getStringCPEntryValue(dataInStream);
String pkgVersion = getStringCPEntryValue(dataInStream);
PackageID importPkgID = createPackageID(orgName, pkgName, pkgVersion);
BPackageSymbol importPackageSymbol = packageLoader.loadPackageSymbol(importPkgID, this.env.pkgSymbol.pkgID,
this.env.repoHierarchy);
this.env.pkgSymbol.scope.define(importPkgID.name, importPackageSymbol);
this.env.pkgSymbol.imports.add(importPackageSymbol);
}
private void skipPosition(DataInputStream dataInStream) throws IOException {
dataInStream.readInt();
dataInStream.readInt();
dataInStream.readInt();
dataInStream.readInt();
dataInStream.readInt();
}
private void defineTypeDef(DataInputStream dataInStream) throws IOException {
skipPosition(dataInStream);
String typeDefName = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
boolean isLabel = dataInStream.readByte() == 1;
byte[] docBytes = readDocBytes(dataInStream);
BType type = readBType(dataInStream);
if (type.tag == TypeTags.INVOKABLE) {
setInvokableTypeSymbol((BInvokableType) type);
}
flags = Symbols.isFlagOn(type.tsymbol.flags, Flags.ABSTRACT) ? flags | Flags.ABSTRACT : flags;
flags = Symbols.isFlagOn(type.tsymbol.flags, Flags.CLIENT) ? flags | Flags.CLIENT : flags;
BTypeSymbol symbol;
if (isLabel) {
symbol = type.tsymbol.createLabelSymbol();
} else {
symbol = type.tsymbol;
}
defineMarkDownDocAttachment(symbol, docBytes);
symbol.name = names.fromString(typeDefName);
symbol.type = type;
symbol.pkgID = this.env.pkgSymbol.pkgID;
symbol.flags = flags;
if (type.tag == TypeTags.RECORD || type.tag == TypeTags.OBJECT) {
this.structureTypes.add((BStructureTypeSymbol) symbol);
}
this.env.pkgSymbol.scope.define(symbol.name, symbol);
if (type.tag == TypeTags.ERROR) {
defineErrorConstructor(this.env.pkgSymbol.scope, symbol);
}
}
private void setInvokableTypeSymbol(BInvokableType invokableType) {
BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) invokableType.tsymbol;
List<BVarSymbol> params = new ArrayList<>();
for (BType paramType : invokableType.paramTypes) {
BVarSymbol varSymbol = new BVarSymbol(paramType.flags, Names.EMPTY,
this.env.pkgSymbol.pkgID,
paramType, null);
params.add(varSymbol);
}
tsymbol.params = params;
if (invokableType.restType != null) {
tsymbol.restParam = new BVarSymbol(0, Names.EMPTY, this.env.pkgSymbol.pkgID, invokableType.restType, null);
}
tsymbol.returnType = invokableType.retType;
}
private void defineMarkDownDocAttachment(BSymbol symbol, byte[] docBytes) throws IOException {
DataInputStream dataInStream = new DataInputStream(new ByteArrayInputStream(docBytes));
boolean docPresent = dataInStream.readBoolean();
if (!docPresent) {
return;
}
MarkdownDocAttachment markdownDocAttachment = new MarkdownDocAttachment();
int descCPIndex = dataInStream.readInt();
int retDescCPIndex = dataInStream.readInt();
markdownDocAttachment.description = descCPIndex >= 0 ? getStringCPEntryValue(descCPIndex) : null;
markdownDocAttachment.returnValueDescription
= retDescCPIndex >= 0 ? getStringCPEntryValue(retDescCPIndex) : null;
int paramLength = dataInStream.readInt();
for (int i = 0; i < paramLength; i++) {
int nameCPIndex = dataInStream.readInt();
int paramDescCPIndex = dataInStream.readInt();
String name = nameCPIndex >= 0 ? getStringCPEntryValue(nameCPIndex) : null;
String description = paramDescCPIndex >= 0 ? getStringCPEntryValue(paramDescCPIndex) : null;
MarkdownDocAttachment.Parameter parameter = new MarkdownDocAttachment.Parameter(name, description);
markdownDocAttachment.parameters.add(parameter);
}
symbol.markdownDocumentation = markdownDocAttachment;
}
private void defineErrorConstructor(Scope scope, BTypeSymbol typeDefSymbol) {
BConstructorSymbol symbol = new BConstructorSymbol(SymTag.CONSTRUCTOR,
typeDefSymbol.flags, typeDefSymbol.name, typeDefSymbol.pkgID, typeDefSymbol.type, typeDefSymbol.owner);
symbol.kind = SymbolKind.ERROR_CONSTRUCTOR;
symbol.scope = new Scope(symbol);
symbol.retType = typeDefSymbol.type;
scope.define(symbol.name, symbol);
((BErrorTypeSymbol) typeDefSymbol).ctorSymbol = symbol;
}
private BType readBType(DataInputStream dataInStream) throws IOException {
int typeCpIndex = dataInStream.readInt();
CPEntry cpEntry = this.env.constantPool[typeCpIndex];
BType type = null;
if (cpEntry != null) {
type = ((CPEntry.ShapeCPEntry) cpEntry).shape;
if (type.tag != TypeTags.INVOKABLE) {
return type;
}
}
if (type == null) {
byte[] e = env.unparsedBTypeCPs.get(typeCpIndex);
type = new BIRTypeReader(new DataInputStream(new ByteArrayInputStream(e))).readType(typeCpIndex);
addShapeCP(type, typeCpIndex);
}
if (type.tag == TypeTags.INVOKABLE) {
return createClonedInvokableTypeWithTsymbol((BInvokableType) type);
}
return type;
}
private BInvokableType createClonedInvokableTypeWithTsymbol(BInvokableType bInvokableType) {
BInvokableType clonedType = new BInvokableType(bInvokableType.paramTypes, bInvokableType.restType,
bInvokableType.retType, null);
clonedType.tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE,
bInvokableType.flags, env.pkgSymbol.pkgID, null,
env.pkgSymbol.owner);
return clonedType;
}
private void addShapeCP(BType bType, int typeCpIndex) {
this.env.constantPool[typeCpIndex] = new CPEntry.ShapeCPEntry(bType);
}
private void defineAnnotations(DataInputStream dataInStream) throws IOException {
String name = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
int attachPointCount = dataInStream.readInt();
Set<AttachPoint> attachPoints = new HashSet<>(attachPointCount);
for (int i = 0; i < attachPointCount; i++) {
attachPoints.add(AttachPoint.getAttachmentPoint(getStringCPEntryValue(dataInStream),
dataInStream.readBoolean()));
}
BType annotationType = readBType(dataInStream);
BAnnotationSymbol annotationSymbol = Symbols.createAnnotationSymbol(flags, attachPoints, names.fromString(name),
this.env.pkgSymbol.pkgID, null, this.env.pkgSymbol);
annotationSymbol.type = new BAnnotationType(annotationSymbol);
this.env.pkgSymbol.scope.define(annotationSymbol.name, annotationSymbol);
if (annotationType != symTable.noType) {
annotationSymbol.attachedType = annotationType.tsymbol;
}
}
private void defineConstant(DataInputStream dataInStream) throws IOException {
String constantName = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
byte[] docBytes = readDocBytes(dataInStream);
BType type = readBType(dataInStream);
Scope enclScope = this.env.pkgSymbol.scope;
BConstantSymbol constantSymbol = new BConstantSymbol(flags, names.fromString(constantName),
this.env.pkgSymbol.pkgID, null, type, enclScope.owner);
defineMarkDownDocAttachment(constantSymbol, docBytes);
dataInStream.readLong();
constantSymbol.value = readConstLiteralValue(dataInStream);
constantSymbol.literalType = constantSymbol.value.type;
enclScope.define(constantSymbol.name, constantSymbol);
}
private BLangConstantValue readConstLiteralValue(DataInputStream dataInStream) throws IOException {
BType valueType = readBType(dataInStream);
switch (valueType.tag) {
case TypeTags.INT:
return new BLangConstantValue(getIntCPEntryValue(dataInStream), symTable.intType);
case TypeTags.BYTE:
return new BLangConstantValue(getByteCPEntryValue(dataInStream), symTable.byteType);
case TypeTags.FLOAT:
return new BLangConstantValue(getFloatCPEntryValue(dataInStream), symTable.floatType);
case TypeTags.STRING:
return new BLangConstantValue(getStringCPEntryValue(dataInStream), symTable.stringType);
case TypeTags.DECIMAL:
return new BLangConstantValue(getStringCPEntryValue(dataInStream), symTable.decimalType);
case TypeTags.BOOLEAN:
return new BLangConstantValue(dataInStream.readByte() == 1, symTable.booleanType);
case TypeTags.NIL:
return new BLangConstantValue(null, symTable.nilType);
case TypeTags.MAP:
int size = dataInStream.readInt();
Map<String, BLangConstantValue> keyValuePairs = new LinkedHashMap<>();
for (int i = 0; i < size; i++) {
String key = getStringCPEntryValue(dataInStream);
BLangConstantValue value = readConstLiteralValue(dataInStream);
keyValuePairs.put(key, value);
}
return new BLangConstantValue(keyValuePairs, valueType);
default:
throw new RuntimeException("unexpected type: " + valueType);
}
}
private void definePackageLevelVariables(DataInputStream dataInStream) throws IOException {
dataInStream.readByte();
String varName = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
byte[] docBytes = readDocBytes(dataInStream);
BType varType = readBType(dataInStream);
Scope enclScope = this.env.pkgSymbol.scope;
BVarSymbol varSymbol;
if (varType.tag == TypeTags.INVOKABLE) {
varSymbol = new BInvokableSymbol(SymTag.VARIABLE, flags, names.fromString(varName),
this.env.pkgSymbol.pkgID, varType, enclScope.owner);
} else {
varSymbol = new BVarSymbol(flags, names.fromString(varName), this.env.pkgSymbol.pkgID, varType,
enclScope.owner);
if (varType.tsymbol != null && Symbols.isFlagOn(varType.tsymbol.flags, Flags.CLIENT)) {
varSymbol.tag = SymTag.ENDPOINT;
}
}
defineMarkDownDocAttachment(varSymbol, docBytes);
enclScope.define(varSymbol.name, varSymbol);
}
private void setParamSymbols(BInvokableSymbol invokableSymbol, DataInputStream dataInStream)
throws IOException {
int requiredParamCount = dataInStream.readInt();
BInvokableType invokableType = (BInvokableType) invokableSymbol.type;
for (int i = 0; i < requiredParamCount; i++) {
String paramName = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
BVarSymbol varSymbol = new BVarSymbol(flags, names.fromString(paramName), this.env.pkgSymbol.pkgID,
invokableType.paramTypes.get(i), invokableSymbol);
varSymbol.defaultableParam = ((flags & Flags.OPTIONAL) == Flags.OPTIONAL);
invokableSymbol.params.add(varSymbol);
}
if (dataInStream.readBoolean()) {
String paramName = getStringCPEntryValue(dataInStream);
invokableSymbol.restParam = new BVarSymbol(0, names.fromString(paramName), this.env.pkgSymbol.pkgID,
invokableType.restType, invokableSymbol);
}
BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) invokableType.tsymbol;
tsymbol.flags = invokableSymbol.flags;
tsymbol.params = invokableSymbol.params;
tsymbol.restParam = invokableSymbol.restParam;
tsymbol.returnType = invokableSymbol.retType;
boolean hasReceiver = dataInStream.readBoolean();
if (hasReceiver) {
dataInStream.readByte();
readBType(dataInStream);
getStringCPEntryValue(dataInStream);
}
}
/**
* Set taint table to the invokable symbol.
*
* @param invokableSymbol Invokable symbol
* @param dataInStream Input stream
* @throws IOException
*/
private void readTaintTable(BInvokableSymbol invokableSymbol, DataInputStream dataInStream)
throws IOException {
long length = dataInStream.readLong();
if (length <= 0) {
return;
}
int rowCount = dataInStream.readShort();
int columnCount = dataInStream.readShort();
invokableSymbol.taintTable = new HashMap<>();
for (int rowIndex = 0; rowIndex < rowCount; rowIndex++) {
int paramIndex = dataInStream.readShort();
TaintRecord.TaintedStatus returnTaintedStatus =
convertByteToTaintedStatus(dataInStream.readByte());
List<TaintRecord.TaintedStatus> parameterTaintedStatusList = new ArrayList<>();
for (int columnIndex = 1; columnIndex < columnCount; columnIndex++) {
parameterTaintedStatusList.add(convertByteToTaintedStatus(dataInStream.readByte()));
}
TaintRecord taintRecord = new TaintRecord(returnTaintedStatus, parameterTaintedStatusList);
invokableSymbol.taintTable.put(paramIndex, taintRecord);
}
}
private TaintRecord.TaintedStatus convertByteToTaintedStatus(byte readByte) {
return EnumSet.allOf(TaintRecord.TaintedStatus.class).stream()
.filter(taintedStatus -> readByte == taintedStatus.getByteValue()).findFirst().get();
}
private String getStringCPEntryValue(DataInputStream dataInStream) throws IOException {
int pkgNameCPIndex = dataInStream.readInt();
StringCPEntry stringCPEntry = (StringCPEntry) this.env.constantPool[pkgNameCPIndex];
return stringCPEntry.value;
}
private String getStringCPEntryValue(int cpIndex) throws IOException {
StringCPEntry stringCPEntry = (StringCPEntry) this.env.constantPool[cpIndex];
return stringCPEntry.value;
}
private long getIntCPEntryValue(DataInputStream dataInStream) throws IOException {
int pkgNameCPIndex = dataInStream.readInt();
IntegerCPEntry intCPEntry = (IntegerCPEntry) this.env.constantPool[pkgNameCPIndex];
return intCPEntry.value;
}
private int getByteCPEntryValue(DataInputStream dataInStream) throws IOException {
int byteCpIndex = dataInStream.readInt();
ByteCPEntry byteCPEntry = (ByteCPEntry) this.env.constantPool[byteCpIndex];
return byteCPEntry.value;
}
private String getFloatCPEntryValue(DataInputStream dataInStream) throws IOException {
int floatCpIndex = dataInStream.readInt();
FloatCPEntry floatCPEntry = (FloatCPEntry) this.env.constantPool[floatCpIndex];
return Double.toString(floatCPEntry.value);
}
private PackageID createPackageID(String orgName, String pkgName, String pkgVersion) {
if (orgName == null || orgName.isEmpty()) {
throw new BLangCompilerException("invalid module name '" + pkgName + "' in compiled package file");
}
return new PackageID(names.fromString(orgName),
names.fromString(pkgName),
names.fromString(pkgVersion));
}
/**
* This class holds compiled package specific information during the symbol enter phase of the compiled package.
*
* @since 0.970.0
*/
private static class BIRPackageSymbolEnv {
PackageID requestedPackageId;
RepoHierarchy repoHierarchy;
Map<Integer, byte[]> unparsedBTypeCPs = new HashMap<>();
BPackageSymbol pkgSymbol;
CPEntry[] constantPool;
List<UnresolvedType> unresolvedTypes;
BIRPackageSymbolEnv() {
this.unresolvedTypes = new ArrayList<>();
}
}
private static class UnresolvedType {
String typeSig;
Consumer<BType> completer;
UnresolvedType(String typeSig, Consumer<BType> completer) {
this.typeSig = typeSig;
this.completer = completer;
}
}
private class BIRTypeReader {
public static final int SERVICE_TYPE_TAG = 51;
private DataInputStream inputStream;
public BIRTypeReader(DataInputStream inputStream) {
this.inputStream = inputStream;
}
private BType readTypeFromCp() throws IOException {
return readBType(inputStream);
}
public BType readType(int cpI) throws IOException {
byte tag = inputStream.readByte();
Name name = names.fromString(getStringCPEntryValue(inputStream));
int flags = inputStream.readInt();
inputStream.readInt();
switch (tag) {
case TypeTags.INT:
return typeParamAnalyzer.getNominalType(symTable.intType, name, flags);
case TypeTags.BYTE:
return typeParamAnalyzer.getNominalType(symTable.byteType, name, flags);
case TypeTags.FLOAT:
return typeParamAnalyzer.getNominalType(symTable.floatType, name, flags);
case TypeTags.DECIMAL:
return typeParamAnalyzer.getNominalType(symTable.decimalType, name, flags);
case TypeTags.STRING:
return typeParamAnalyzer.getNominalType(symTable.stringType, name, flags);
case TypeTags.BOOLEAN:
return typeParamAnalyzer.getNominalType(symTable.booleanType, name, flags);
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.XML:
return symTable.xmlType;
case TypeTags.TABLE:
BTableType bTableType = new BTableType(TypeTags.TABLE, null, symTable.tableType.tsymbol);
bTableType.constraint = readTypeFromCp();
return bTableType;
case TypeTags.NIL:
return symTable.nilType;
case TypeTags.ANYDATA:
return typeParamAnalyzer.getNominalType(symTable.anydataType, name, flags);
case TypeTags.RECORD:
int pkgCpIndex = inputStream.readInt();
PackageID pkgId = getPackageId(pkgCpIndex);
String recordName = getStringCPEntryValue(inputStream);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(EnumSet.of(Flag.PUBLIC)),
names.fromString(recordName), env.pkgSymbol.pkgID, null, env.pkgSymbol);
recordSymbol.scope = new Scope(recordSymbol);
BRecordType recordType = new BRecordType(recordSymbol);
recordSymbol.type = recordType;
compositeStack.push(recordType);
addShapeCP(recordType, cpI);
recordType.sealed = inputStream.readBoolean();
recordType.restFieldType = readTypeFromCp();
int recordFields = inputStream.readInt();
for (int i = 0; i < recordFields; i++) {
String fieldName = getStringCPEntryValue(inputStream);
int fieldFlags = inputStream.readInt();
byte[] docBytes = readDocBytes(inputStream);
BType fieldType = readTypeFromCp();
BVarSymbol varSymbol = new BVarSymbol(fieldFlags, names.fromString(fieldName),
recordSymbol.pkgID, fieldType, recordSymbol.scope.owner);
defineMarkDownDocAttachment(varSymbol, docBytes);
BField structField = new BField(varSymbol.name, null, varSymbol);
recordType.fields.add(structField);
recordSymbol.scope.define(varSymbol.name, varSymbol);
}
boolean isInitAvailable = inputStream.readByte() == 1;
if (isInitAvailable) {
String recordInitFuncName = getStringCPEntryValue(inputStream);
int recordInitFuncFlags = inputStream.readInt();
BInvokableType recordInitFuncType = (BInvokableType) readTypeFromCp();
Name initFuncName = names.fromString(recordInitFuncName);
boolean isNative = Symbols.isFlagOn(recordInitFuncFlags, Flags.NATIVE);
BInvokableSymbol recordInitFuncSymbol =
Symbols.createFunctionSymbol(recordInitFuncFlags,
initFuncName, env.pkgSymbol.pkgID, recordInitFuncType,
env.pkgSymbol, isNative);
recordInitFuncSymbol.retType = recordInitFuncType.retType;
recordSymbol.initializerFunc = new BAttachedFunction(initFuncName, recordInitFuncSymbol,
recordInitFuncType);
recordSymbol.scope.define(initFuncName, recordInitFuncSymbol);
}
Object poppedRecordType = compositeStack.pop();
assert poppedRecordType == recordType;
if (pkgId.equals(env.pkgSymbol.pkgID)) {
return recordType;
}
BPackageSymbol pkgSymbol = packageLoader.loadPackageSymbol(pkgId, null, null);
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(pkgSymbol);
return symbolResolver.lookupSymbol(pkgEnv, names.fromString(recordName), SymTag.TYPE).type;
case TypeTags.TYPEDESC:
BTypedescType typedescType = new BTypedescType(null, symTable.typeDesc.tsymbol);
typedescType.constraint = readTypeFromCp();
return typedescType;
case TypeTags.STREAM:
BStreamType bStreamType = new BStreamType(TypeTags.STREAM, null, symTable.streamType.tsymbol);
bStreamType.constraint = readTypeFromCp();
return bStreamType;
case TypeTags.MAP:
BMapType bMapType = new BMapType(TypeTags.MAP, null, symTable.mapType.tsymbol);
bMapType.constraint = readTypeFromCp();
return bMapType;
case TypeTags.INVOKABLE:
BInvokableType bInvokableType = new BInvokableType(null, null, null, null);
bInvokableType.flags = flags;
int paramCount = inputStream.readInt();
List<BType> paramTypes = new ArrayList<>();
for (int i = 0; i < paramCount; i++) {
paramTypes.add(readTypeFromCp());
}
bInvokableType.paramTypes = paramTypes;
if (inputStream.readBoolean()) {
bInvokableType.restType = readTypeFromCp();
}
bInvokableType.retType = readTypeFromCp();
return bInvokableType;
case TypeTags.ANY:
return typeParamAnalyzer.getNominalType(symTable.anyType, name, flags);
case TypeTags.HANDLE:
return symTable.handleType;
case TypeTags.ENDPOINT:
break;
case TypeTags.ARRAY:
byte state = inputStream.readByte();
int size = inputStream.readInt();
BTypeSymbol arrayTypeSymbol = Symbols.createTypeSymbol(SymTag.ARRAY_TYPE, Flags.asMask(EnumSet
.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner);
BArrayType bArrayType = new BArrayType(null, arrayTypeSymbol, size, BArrayState.valueOf(state));
bArrayType.eType = readTypeFromCp();
return bArrayType;
case TypeTags.UNION:
BTypeSymbol unionTypeSymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet
.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner);
BUnionType unionType = BUnionType.create(unionTypeSymbol,
new LinkedHashSet<>());
int unionMemberCount = inputStream.readInt();
for (int i = 0; i < unionMemberCount; i++) {
unionType.add(readTypeFromCp());
}
return unionType;
case TypeTags.PACKAGE:
break;
case TypeTags.NONE:
return symTable.noType;
case TypeTags.VOID:
break;
case TypeTags.XMLNS:
break;
case TypeTags.ANNOTATION:
break;
case TypeTags.SEMANTIC_ERROR:
break;
case TypeTags.ERROR:
BTypeSymbol errorSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.EMPTY,
env.pkgSymbol.pkgID, null, env.pkgSymbol.owner);
BErrorType errorType = new BErrorType(errorSymbol);
addShapeCP(errorType, cpI);
compositeStack.push(errorType);
pkgCpIndex = inputStream.readInt();
pkgId = getPackageId(pkgCpIndex);
String errorName = getStringCPEntryValue(inputStream);
BType reasonType = readTypeFromCp();
BType detailsType = readTypeFromCp();
errorType.reasonType = reasonType;
errorType.detailType = detailsType;
errorSymbol.type = errorType;
errorSymbol.pkgID = pkgId;
errorSymbol.name = names.fromString(errorName);
Object poppedErrorType = compositeStack.pop();
assert poppedErrorType == errorType;
if (!env.pkgSymbol.pkgID.equals(PackageID.ANNOTATIONS)
&& Symbols.isFlagOn(flags, Flags.NATIVE)) {
return symTable.errorType;
}
return errorType;
case TypeTags.ITERATOR:
break;
case TypeTags.TUPLE:
BTypeSymbol tupleTypeSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(EnumSet
.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner);
BTupleType bTupleType = new BTupleType(tupleTypeSymbol, null);
int tupleMemberCount = inputStream.readInt();
List<BType> tupleMemberTypes = new ArrayList<>();
for (int i = 0; i < tupleMemberCount; i++) {
tupleMemberTypes.add(readTypeFromCp());
}
bTupleType.tupleTypes = tupleMemberTypes;
return bTupleType;
case TypeTags.FUTURE:
BFutureType bFutureType = new BFutureType(TypeTags.FUTURE, null, symTable.futureType.tsymbol);
bFutureType.constraint = readTypeFromCp();
return bFutureType;
case TypeTags.INTERMEDIATE_COLLECTION:
break;
case TypeTags.FINITE:
String finiteTypeName = getStringCPEntryValue(inputStream);
int finiteTypeFlags = inputStream.readInt();
BTypeSymbol symbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteTypeFlags,
names.fromString(finiteTypeName), env.pkgSymbol.pkgID, null, env.pkgSymbol);
symbol.scope = new Scope(symbol);
BFiniteType finiteType = new BFiniteType(symbol);
symbol.type = finiteType;
int valueSpaceSize = inputStream.readInt();
for (int i = 0; i < valueSpaceSize; i++) {
defineValueSpace(inputStream, finiteType, this);
}
return finiteType;
case TypeTags.OBJECT:
boolean service = inputStream.readByte() == 1;
pkgCpIndex = inputStream.readInt();
pkgId = getPackageId(pkgCpIndex);
String objName = getStringCPEntryValue(inputStream);
int objFlags = (inputStream.readBoolean() ? Flags.ABSTRACT : 0) | Flags.PUBLIC;
objFlags = inputStream.readBoolean() ? objFlags | Flags.CLIENT : objFlags;
BObjectTypeSymbol objectSymbol = (BObjectTypeSymbol) Symbols.createObjectSymbol(objFlags,
names.fromString(objName), env.pkgSymbol.pkgID, null, env.pkgSymbol);
objectSymbol.scope = new Scope(objectSymbol);
objectSymbol.methodScope = new Scope(objectSymbol);
BObjectType objectType;
if (service) {
objectType = new BServiceType(objectSymbol);
} else {
objectType = new BObjectType(objectSymbol);
}
objectSymbol.type = objectType;
addShapeCP(objectType, cpI);
compositeStack.push(objectType);
int fieldCount = inputStream.readInt();
for (int i = 0; i < fieldCount; i++) {
String fieldName = getStringCPEntryValue(inputStream);
int fieldFlags = inputStream.readInt();
byte[] docBytes = readDocBytes(inputStream);
BType fieldType = readTypeFromCp();
BVarSymbol objectVarSymbol = new BVarSymbol(fieldFlags, names.fromString(fieldName),
objectSymbol.pkgID, fieldType, objectSymbol.scope.owner);
defineMarkDownDocAttachment(objectVarSymbol, docBytes);
BField structField = new BField(objectVarSymbol.name, null, objectVarSymbol);
objectType.fields.add(structField);
objectSymbol.scope.define(objectVarSymbol.name, objectVarSymbol);
}
boolean generatedConstructorPresent = inputStream.readBoolean();
if (generatedConstructorPresent) {
ignoreAttachedFunc();
}
boolean constructorPresent = inputStream.readBoolean();
if (constructorPresent) {
ignoreAttachedFunc();
}
int funcCount = inputStream.readInt();
for (int i = 0; i < funcCount; i++) {
ignoreAttachedFunc();
}
Object poppedObjType = compositeStack.pop();
assert poppedObjType == objectType;
if (pkgId.equals(env.pkgSymbol.pkgID)) {
return objectType;
}
pkgSymbol = packageLoader.loadPackageSymbol(pkgId, null, null);
pkgEnv = symTable.pkgEnvMap.get(pkgSymbol);
return symbolResolver.lookupSymbol(pkgEnv, names.fromString(objName), SymTag.TYPE).type;
case TypeTags.BYTE_ARRAY:
break;
case TypeTags.FUNCTION_POINTER:
break;
case SERVICE_TYPE_TAG:
return symTable.anyServiceType;
}
return null;
}
private void ignoreAttachedFunc() throws IOException {
getStringCPEntryValue(inputStream);
inputStream.readInt();
readTypeFromCp();
}
}
private byte[] readDocBytes(DataInputStream inputStream) throws IOException {
int docLength = inputStream.readInt();
byte[] docBytes = new byte[docLength];
int noOfBytesRead = inputStream.read(docBytes);
if (docLength != noOfBytesRead) {
throw new RuntimeException("Failed to read Markdown Documenation");
}
return docBytes;
}
private PackageID getPackageId(int pkgCPIndex) {
PackageCPEntry pkgCpEntry = (PackageCPEntry) env.constantPool[pkgCPIndex];
String orgName = ((StringCPEntry) env.constantPool[pkgCpEntry.orgNameCPIndex]).value;
String pkgName = ((StringCPEntry) env.constantPool[pkgCpEntry.pkgNameCPIndex]).value;
String version = ((StringCPEntry) env.constantPool[pkgCpEntry.versionCPIndex]).value;
return new PackageID(names.fromString(orgName),
names.fromString(pkgName), names.fromString(version));
}
private void defineValueSpace(DataInputStream dataInStream, BFiniteType finiteType, BIRTypeReader typeReader)
throws IOException {
BType valueType = typeReader.readTypeFromCp();
BLangLiteral litExpr = createLiteralBasedOnType(valueType);
switch (valueType.tag) {
case TypeTags.INT:
int integerCpIndex = dataInStream.readInt();
IntegerCPEntry integerCPEntry = (IntegerCPEntry) this.env.constantPool[integerCpIndex];
litExpr.value = integerCPEntry.value;
break;
case TypeTags.BYTE:
int byteCpIndex = dataInStream.readInt();
ByteCPEntry byteCPEntry = (ByteCPEntry) this.env.constantPool[byteCpIndex];
litExpr.value = byteCPEntry.value;
break;
case TypeTags.FLOAT:
int floatCpIndex = dataInStream.readInt();
FloatCPEntry floatCPEntry = (FloatCPEntry) this.env.constantPool[floatCpIndex];
litExpr.value = Double.toString(floatCPEntry.value);
break;
case TypeTags.STRING:
case TypeTags.DECIMAL:
litExpr.value = getStringCPEntryValue(dataInStream);
break;
case TypeTags.BOOLEAN:
litExpr.value = dataInStream.readByte() == 1;
break;
case TypeTags.NIL:
break;
default:
throw new UnsupportedOperationException("finite type value is not supported for type: " + valueType);
}
litExpr.type = valueType;
finiteType.valueSpace.add(litExpr);
}
private BLangLiteral createLiteralBasedOnType(BType valueType) {
NodeKind nodeKind = valueType.tag <= TypeTags.DECIMAL ? NodeKind.NUMERIC_LITERAL : NodeKind.LITERAL;
return nodeKind == NodeKind.LITERAL ? (BLangLiteral) TreeBuilder.createLiteralExpression() :
(BLangLiteral) TreeBuilder.createNumericLiteralExpression();
}
} | class BIRPackageSymbolEnter {
private final PackageLoader packageLoader;
private final SymbolResolver symbolResolver;
private final SymbolTable symTable;
private final Names names;
private final TypeParamAnalyzer typeParamAnalyzer;
private final BLangDiagnosticLog dlog;
private BIRTypeReader typeReader;
private BIRPackageSymbolEnv env;
private List<BStructureTypeSymbol> structureTypes;
private BStructureTypeSymbol currentStructure = null;
private LinkedList<Object> compositeStack = new LinkedList<>();
private static final CompilerContext.Key<BIRPackageSymbolEnter> COMPILED_PACKAGE_SYMBOL_ENTER_KEY =
new CompilerContext.Key<>();
public static BIRPackageSymbolEnter getInstance(CompilerContext context) {
BIRPackageSymbolEnter packageReader = context.get(COMPILED_PACKAGE_SYMBOL_ENTER_KEY);
if (packageReader == null) {
packageReader = new BIRPackageSymbolEnter(context);
}
return packageReader;
}
private BIRPackageSymbolEnter(CompilerContext context) {
context.put(COMPILED_PACKAGE_SYMBOL_ENTER_KEY, this);
this.packageLoader = PackageLoader.getInstance(context);
this.symbolResolver = SymbolResolver.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.names = Names.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
}
public BPackageSymbol definePackage(PackageID packageId,
RepoHierarchy packageRepositoryHierarchy,
byte[] packageBinaryContent) {
BPackageSymbol pkgSymbol = definePackage(packageId, packageRepositoryHierarchy,
new ByteArrayInputStream(packageBinaryContent));
byte[] modifiedPkgBinaryContent = Arrays.copyOfRange(
packageBinaryContent, 8, packageBinaryContent.length);
pkgSymbol.birPackageFile = new CompiledBinaryFile.BIRPackageFile(modifiedPkgBinaryContent);
SymbolEnv builtinEnv = this.symTable.pkgEnvMap.get(symTable.langAnnotationModuleSymbol);
SymbolEnv pkgEnv = SymbolEnv.createPkgEnv(null, pkgSymbol.scope, builtinEnv);
this.symTable.pkgEnvMap.put(pkgSymbol, pkgEnv);
return pkgSymbol;
}
private BPackageSymbol definePackage(PackageID packageId,
RepoHierarchy packageRepositoryHierarchy,
InputStream programFileInStream) {
try (DataInputStream dataInStream = new DataInputStream(programFileInStream)) {
BIRPackageSymbolEnv prevEnv = this.env;
this.env = new BIRPackageSymbolEnv();
this.env.requestedPackageId = packageId;
this.env.repoHierarchy = packageRepositoryHierarchy;
BPackageSymbol pkgSymbol = definePackage(dataInStream);
this.env = prevEnv;
return pkgSymbol;
} catch (IOException e) {
throw new BLangCompilerException(e.getMessage(), e);
} catch (Throwable e) {
throw new BLangCompilerException(e.getMessage(), e);
}
}
private BPackageSymbol definePackage(DataInputStream dataInStream) throws IOException {
byte[] magic = new byte[4];
dataInStream.read(magic, 0, 4);
if (!Arrays.equals(magic, BIRPackageFile.BIR_MAGIC)) {
throw new BLangCompilerException("invalid magic number " + Arrays.toString(magic));
}
int version = dataInStream.readInt();
if (version != BIRPackageFile.BIR_VERSION) {
throw new BLangCompilerException("unsupported program file version " + version);
}
this.env.constantPool = readConstantPool(dataInStream);
int pkgCPIndex = dataInStream.readInt();
return definePackage(dataInStream, pkgCPIndex);
}
private BPackageSymbol definePackage(DataInputStream dataInStream, int pkgCpIndex) throws IOException {
PackageCPEntry pkgCpEntry = (PackageCPEntry) this.env.constantPool[pkgCpIndex];
String orgName = ((StringCPEntry) this.env.constantPool[pkgCpEntry.orgNameCPIndex]).value;
String pkgName = ((StringCPEntry) this.env.constantPool[pkgCpEntry.pkgNameCPIndex]).value;
String pkgVersion = ((StringCPEntry) this.env.constantPool[pkgCpEntry.versionCPIndex]).value;
PackageID pkgId = createPackageID(orgName, pkgName, pkgVersion);
this.env.pkgSymbol = Symbols.createPackageSymbol(pkgId, this.symTable);
defineSymbols(dataInStream, rethrow(this::defineImportPackage));
defineSymbols(dataInStream, rethrow(this::defineConstant));
this.structureTypes = new ArrayList<>();
defineSymbols(dataInStream, rethrow(this::defineTypeDef));
defineSymbols(dataInStream, rethrow(this::definePackageLevelVariables));
readTypeDefBodies(dataInStream);
defineSymbols(dataInStream, rethrow(this::defineFunction));
defineSymbols(dataInStream, rethrow(this::defineAnnotations));
this.typeReader = null;
return this.env.pkgSymbol;
}
private void readTypeDefBodies(DataInputStream dataInStream) throws IOException {
for (BStructureTypeSymbol structureTypeSymbol : this.structureTypes) {
this.currentStructure = structureTypeSymbol;
defineSymbols(dataInStream, rethrow(this::defineFunction));
defineSymbols(dataInStream, rethrow(this::readBType));
}
this.currentStructure = null;
}
private CPEntry[] readConstantPool(DataInputStream dataInStream) throws IOException {
int constantPoolSize = dataInStream.readInt();
CPEntry[] constantPool = new CPEntry[constantPoolSize];
this.env.constantPool = constantPool;
for (int i = 0; i < constantPoolSize; i++) {
byte cpTag = dataInStream.readByte();
CPEntry.Type cpEntryType = CPEntry.Type.values()[cpTag - 1];
constantPool[i] = readCPEntry(dataInStream, constantPool, cpEntryType, i);
}
return constantPool;
}
private CPEntry readCPEntry(DataInputStream dataInStream,
CPEntry[] constantPool,
CPEntry.Type cpEntryType, int i) throws IOException {
switch (cpEntryType) {
case CP_ENTRY_INTEGER:
return new CPEntry.IntegerCPEntry(dataInStream.readLong());
case CP_ENTRY_FLOAT:
return new CPEntry.FloatCPEntry(dataInStream.readDouble());
case CP_ENTRY_BOOLEAN:
return new CPEntry.BooleanCPEntry(dataInStream.readBoolean());
case CP_ENTRY_STRING:
int length = dataInStream.readInt();
String strValue = null;
if (length >= 0) {
byte[] bytes = new byte[length];
dataInStream.read(bytes, 0, length);
strValue = new String(bytes);
}
return new CPEntry.StringCPEntry(strValue);
case CP_ENTRY_PACKAGE:
return new CPEntry.PackageCPEntry(dataInStream.readInt(),
dataInStream.readInt(), dataInStream.readInt());
case CP_ENTRY_SHAPE:
env.unparsedBTypeCPs.put(i, readByteArray(dataInStream));
return null;
case CP_ENTRY_BYTE:
return new CPEntry.ByteCPEntry(dataInStream.readInt());
default:
throw new IllegalStateException("unsupported constant pool entry type: " +
cpEntryType.name());
}
}
private byte[] readByteArray(DataInputStream dataInStream) throws IOException {
int length = dataInStream.readInt();
byte[] bytes = new byte[length];
dataInStream.readFully(bytes);
return bytes;
}
private void defineSymbols(DataInputStream dataInStream,
Consumer<DataInputStream> symbolDefineFunc) throws IOException {
int symbolCount = dataInStream.readInt();
for (int i = 0; i < symbolCount; i++) {
symbolDefineFunc.accept(dataInStream);
}
}
private void defineImportPackage(DataInputStream dataInStream) throws IOException {
String orgName = getStringCPEntryValue(dataInStream);
String pkgName = getStringCPEntryValue(dataInStream);
String pkgVersion = getStringCPEntryValue(dataInStream);
PackageID importPkgID = createPackageID(orgName, pkgName, pkgVersion);
BPackageSymbol importPackageSymbol = packageLoader.loadPackageSymbol(importPkgID, this.env.pkgSymbol.pkgID,
this.env.repoHierarchy);
this.env.pkgSymbol.scope.define(importPkgID.name, importPackageSymbol);
this.env.pkgSymbol.imports.add(importPackageSymbol);
}
private void skipPosition(DataInputStream dataInStream) throws IOException {
dataInStream.readInt();
dataInStream.readInt();
dataInStream.readInt();
dataInStream.readInt();
dataInStream.readInt();
}
private void defineTypeDef(DataInputStream dataInStream) throws IOException {
skipPosition(dataInStream);
String typeDefName = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
boolean isLabel = dataInStream.readByte() == 1;
byte[] docBytes = readDocBytes(dataInStream);
BType type = readBType(dataInStream);
if (type.tag == TypeTags.INVOKABLE) {
setInvokableTypeSymbol((BInvokableType) type);
}
flags = Symbols.isFlagOn(type.tsymbol.flags, Flags.ABSTRACT) ? flags | Flags.ABSTRACT : flags;
flags = Symbols.isFlagOn(type.tsymbol.flags, Flags.CLIENT) ? flags | Flags.CLIENT : flags;
BTypeSymbol symbol;
if (isLabel) {
symbol = type.tsymbol.createLabelSymbol();
} else {
symbol = type.tsymbol;
}
defineMarkDownDocAttachment(symbol, docBytes);
symbol.name = names.fromString(typeDefName);
symbol.type = type;
symbol.pkgID = this.env.pkgSymbol.pkgID;
symbol.flags = flags;
if (type.tag == TypeTags.RECORD || type.tag == TypeTags.OBJECT) {
this.structureTypes.add((BStructureTypeSymbol) symbol);
}
this.env.pkgSymbol.scope.define(symbol.name, symbol);
if (type.tag == TypeTags.ERROR) {
defineErrorConstructor(this.env.pkgSymbol.scope, symbol);
}
}
private void setInvokableTypeSymbol(BInvokableType invokableType) {
BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) invokableType.tsymbol;
List<BVarSymbol> params = new ArrayList<>();
for (BType paramType : invokableType.paramTypes) {
BVarSymbol varSymbol = new BVarSymbol(paramType.flags, Names.EMPTY,
this.env.pkgSymbol.pkgID,
paramType, null);
params.add(varSymbol);
}
tsymbol.params = params;
if (invokableType.restType != null) {
tsymbol.restParam = new BVarSymbol(0, Names.EMPTY, this.env.pkgSymbol.pkgID, invokableType.restType, null);
}
tsymbol.returnType = invokableType.retType;
}
private void defineMarkDownDocAttachment(BSymbol symbol, byte[] docBytes) throws IOException {
DataInputStream dataInStream = new DataInputStream(new ByteArrayInputStream(docBytes));
boolean docPresent = dataInStream.readBoolean();
if (!docPresent) {
return;
}
MarkdownDocAttachment markdownDocAttachment = new MarkdownDocAttachment();
int descCPIndex = dataInStream.readInt();
int retDescCPIndex = dataInStream.readInt();
markdownDocAttachment.description = descCPIndex >= 0 ? getStringCPEntryValue(descCPIndex) : null;
markdownDocAttachment.returnValueDescription
= retDescCPIndex >= 0 ? getStringCPEntryValue(retDescCPIndex) : null;
int paramLength = dataInStream.readInt();
for (int i = 0; i < paramLength; i++) {
int nameCPIndex = dataInStream.readInt();
int paramDescCPIndex = dataInStream.readInt();
String name = nameCPIndex >= 0 ? getStringCPEntryValue(nameCPIndex) : null;
String description = paramDescCPIndex >= 0 ? getStringCPEntryValue(paramDescCPIndex) : null;
MarkdownDocAttachment.Parameter parameter = new MarkdownDocAttachment.Parameter(name, description);
markdownDocAttachment.parameters.add(parameter);
}
symbol.markdownDocumentation = markdownDocAttachment;
}
private void defineErrorConstructor(Scope scope, BTypeSymbol typeDefSymbol) {
BConstructorSymbol symbol = new BConstructorSymbol(SymTag.CONSTRUCTOR,
typeDefSymbol.flags, typeDefSymbol.name, typeDefSymbol.pkgID, typeDefSymbol.type, typeDefSymbol.owner);
symbol.kind = SymbolKind.ERROR_CONSTRUCTOR;
symbol.scope = new Scope(symbol);
symbol.retType = typeDefSymbol.type;
scope.define(symbol.name, symbol);
((BErrorTypeSymbol) typeDefSymbol).ctorSymbol = symbol;
}
private BType readBType(DataInputStream dataInStream) throws IOException {
int typeCpIndex = dataInStream.readInt();
CPEntry cpEntry = this.env.constantPool[typeCpIndex];
BType type = null;
if (cpEntry != null) {
type = ((CPEntry.ShapeCPEntry) cpEntry).shape;
if (type.tag != TypeTags.INVOKABLE) {
return type;
}
}
if (type == null) {
byte[] e = env.unparsedBTypeCPs.get(typeCpIndex);
type = new BIRTypeReader(new DataInputStream(new ByteArrayInputStream(e))).readType(typeCpIndex);
addShapeCP(type, typeCpIndex);
}
if (type.tag == TypeTags.INVOKABLE) {
return createClonedInvokableTypeWithTsymbol((BInvokableType) type);
}
return type;
}
private BInvokableType createClonedInvokableTypeWithTsymbol(BInvokableType bInvokableType) {
BInvokableType clonedType = new BInvokableType(bInvokableType.paramTypes, bInvokableType.restType,
bInvokableType.retType, null);
clonedType.tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE,
bInvokableType.flags, env.pkgSymbol.pkgID, null,
env.pkgSymbol.owner);
return clonedType;
}
private void addShapeCP(BType bType, int typeCpIndex) {
this.env.constantPool[typeCpIndex] = new CPEntry.ShapeCPEntry(bType);
}
private void defineAnnotations(DataInputStream dataInStream) throws IOException {
String name = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
int attachPointCount = dataInStream.readInt();
Set<AttachPoint> attachPoints = new HashSet<>(attachPointCount);
for (int i = 0; i < attachPointCount; i++) {
attachPoints.add(AttachPoint.getAttachmentPoint(getStringCPEntryValue(dataInStream),
dataInStream.readBoolean()));
}
BType annotationType = readBType(dataInStream);
BAnnotationSymbol annotationSymbol = Symbols.createAnnotationSymbol(flags, attachPoints, names.fromString(name),
this.env.pkgSymbol.pkgID, null, this.env.pkgSymbol);
annotationSymbol.type = new BAnnotationType(annotationSymbol);
this.env.pkgSymbol.scope.define(annotationSymbol.name, annotationSymbol);
if (annotationType != symTable.noType) {
annotationSymbol.attachedType = annotationType.tsymbol;
}
}
private void defineConstant(DataInputStream dataInStream) throws IOException {
String constantName = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
byte[] docBytes = readDocBytes(dataInStream);
BType type = readBType(dataInStream);
Scope enclScope = this.env.pkgSymbol.scope;
BConstantSymbol constantSymbol = new BConstantSymbol(flags, names.fromString(constantName),
this.env.pkgSymbol.pkgID, null, type, enclScope.owner);
defineMarkDownDocAttachment(constantSymbol, docBytes);
dataInStream.readLong();
constantSymbol.value = readConstLiteralValue(dataInStream);
constantSymbol.literalType = constantSymbol.value.type;
enclScope.define(constantSymbol.name, constantSymbol);
}
private BLangConstantValue readConstLiteralValue(DataInputStream dataInStream) throws IOException {
BType valueType = readBType(dataInStream);
switch (valueType.tag) {
case TypeTags.INT:
return new BLangConstantValue(getIntCPEntryValue(dataInStream), symTable.intType);
case TypeTags.BYTE:
return new BLangConstantValue(getByteCPEntryValue(dataInStream), symTable.byteType);
case TypeTags.FLOAT:
return new BLangConstantValue(getFloatCPEntryValue(dataInStream), symTable.floatType);
case TypeTags.STRING:
return new BLangConstantValue(getStringCPEntryValue(dataInStream), symTable.stringType);
case TypeTags.DECIMAL:
return new BLangConstantValue(getStringCPEntryValue(dataInStream), symTable.decimalType);
case TypeTags.BOOLEAN:
return new BLangConstantValue(dataInStream.readByte() == 1, symTable.booleanType);
case TypeTags.NIL:
return new BLangConstantValue(null, symTable.nilType);
case TypeTags.MAP:
int size = dataInStream.readInt();
Map<String, BLangConstantValue> keyValuePairs = new LinkedHashMap<>();
for (int i = 0; i < size; i++) {
String key = getStringCPEntryValue(dataInStream);
BLangConstantValue value = readConstLiteralValue(dataInStream);
keyValuePairs.put(key, value);
}
return new BLangConstantValue(keyValuePairs, valueType);
default:
throw new RuntimeException("unexpected type: " + valueType);
}
}
private void definePackageLevelVariables(DataInputStream dataInStream) throws IOException {
dataInStream.readByte();
String varName = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
byte[] docBytes = readDocBytes(dataInStream);
BType varType = readBType(dataInStream);
Scope enclScope = this.env.pkgSymbol.scope;
BVarSymbol varSymbol;
if (varType.tag == TypeTags.INVOKABLE) {
varSymbol = new BInvokableSymbol(SymTag.VARIABLE, flags, names.fromString(varName),
this.env.pkgSymbol.pkgID, varType, enclScope.owner);
} else {
varSymbol = new BVarSymbol(flags, names.fromString(varName), this.env.pkgSymbol.pkgID, varType,
enclScope.owner);
if (varType.tsymbol != null && Symbols.isFlagOn(varType.tsymbol.flags, Flags.CLIENT)) {
varSymbol.tag = SymTag.ENDPOINT;
}
}
defineMarkDownDocAttachment(varSymbol, docBytes);
enclScope.define(varSymbol.name, varSymbol);
}
private void setParamSymbols(BInvokableSymbol invokableSymbol, DataInputStream dataInStream)
throws IOException {
int requiredParamCount = dataInStream.readInt();
BInvokableType invokableType = (BInvokableType) invokableSymbol.type;
for (int i = 0; i < requiredParamCount; i++) {
String paramName = getStringCPEntryValue(dataInStream);
int flags = dataInStream.readInt();
BVarSymbol varSymbol = new BVarSymbol(flags, names.fromString(paramName), this.env.pkgSymbol.pkgID,
invokableType.paramTypes.get(i), invokableSymbol);
varSymbol.defaultableParam = ((flags & Flags.OPTIONAL) == Flags.OPTIONAL);
invokableSymbol.params.add(varSymbol);
}
if (dataInStream.readBoolean()) {
String paramName = getStringCPEntryValue(dataInStream);
invokableSymbol.restParam = new BVarSymbol(0, names.fromString(paramName), this.env.pkgSymbol.pkgID,
invokableType.restType, invokableSymbol);
}
BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) invokableType.tsymbol;
tsymbol.flags = invokableSymbol.flags;
tsymbol.params = invokableSymbol.params;
tsymbol.restParam = invokableSymbol.restParam;
tsymbol.returnType = invokableSymbol.retType;
boolean hasReceiver = dataInStream.readBoolean();
if (hasReceiver) {
dataInStream.readByte();
readBType(dataInStream);
getStringCPEntryValue(dataInStream);
}
}
/**
* Set taint table to the invokable symbol.
*
* @param invokableSymbol Invokable symbol
* @param dataInStream Input stream
* @throws IOException
*/
private void readTaintTable(BInvokableSymbol invokableSymbol, DataInputStream dataInStream)
throws IOException {
long length = dataInStream.readLong();
if (length <= 0) {
return;
}
int rowCount = dataInStream.readShort();
int columnCount = dataInStream.readShort();
invokableSymbol.taintTable = new HashMap<>();
for (int rowIndex = 0; rowIndex < rowCount; rowIndex++) {
int paramIndex = dataInStream.readShort();
TaintRecord.TaintedStatus returnTaintedStatus =
convertByteToTaintedStatus(dataInStream.readByte());
List<TaintRecord.TaintedStatus> parameterTaintedStatusList = new ArrayList<>();
for (int columnIndex = 1; columnIndex < columnCount; columnIndex++) {
parameterTaintedStatusList.add(convertByteToTaintedStatus(dataInStream.readByte()));
}
TaintRecord taintRecord = new TaintRecord(returnTaintedStatus, parameterTaintedStatusList);
invokableSymbol.taintTable.put(paramIndex, taintRecord);
}
}
private TaintRecord.TaintedStatus convertByteToTaintedStatus(byte readByte) {
return EnumSet.allOf(TaintRecord.TaintedStatus.class).stream()
.filter(taintedStatus -> readByte == taintedStatus.getByteValue()).findFirst().get();
}
private String getStringCPEntryValue(DataInputStream dataInStream) throws IOException {
int pkgNameCPIndex = dataInStream.readInt();
StringCPEntry stringCPEntry = (StringCPEntry) this.env.constantPool[pkgNameCPIndex];
return stringCPEntry.value;
}
private String getStringCPEntryValue(int cpIndex) throws IOException {
StringCPEntry stringCPEntry = (StringCPEntry) this.env.constantPool[cpIndex];
return stringCPEntry.value;
}
private long getIntCPEntryValue(DataInputStream dataInStream) throws IOException {
int pkgNameCPIndex = dataInStream.readInt();
IntegerCPEntry intCPEntry = (IntegerCPEntry) this.env.constantPool[pkgNameCPIndex];
return intCPEntry.value;
}
private int getByteCPEntryValue(DataInputStream dataInStream) throws IOException {
int byteCpIndex = dataInStream.readInt();
ByteCPEntry byteCPEntry = (ByteCPEntry) this.env.constantPool[byteCpIndex];
return byteCPEntry.value;
}
private String getFloatCPEntryValue(DataInputStream dataInStream) throws IOException {
int floatCpIndex = dataInStream.readInt();
FloatCPEntry floatCPEntry = (FloatCPEntry) this.env.constantPool[floatCpIndex];
return Double.toString(floatCPEntry.value);
}
private PackageID createPackageID(String orgName, String pkgName, String pkgVersion) {
if (orgName == null || orgName.isEmpty()) {
throw new BLangCompilerException("invalid module name '" + pkgName + "' in compiled package file");
}
return new PackageID(names.fromString(orgName),
names.fromString(pkgName),
names.fromString(pkgVersion));
}
/**
* This class holds compiled package specific information during the symbol enter phase of the compiled package.
*
* @since 0.970.0
*/
private static class BIRPackageSymbolEnv {
PackageID requestedPackageId;
RepoHierarchy repoHierarchy;
Map<Integer, byte[]> unparsedBTypeCPs = new HashMap<>();
BPackageSymbol pkgSymbol;
CPEntry[] constantPool;
List<UnresolvedType> unresolvedTypes;
BIRPackageSymbolEnv() {
this.unresolvedTypes = new ArrayList<>();
}
}
private static class UnresolvedType {
String typeSig;
Consumer<BType> completer;
UnresolvedType(String typeSig, Consumer<BType> completer) {
this.typeSig = typeSig;
this.completer = completer;
}
}
private class BIRTypeReader {
public static final int SERVICE_TYPE_TAG = 51;
private DataInputStream inputStream;
public BIRTypeReader(DataInputStream inputStream) {
this.inputStream = inputStream;
}
private BType readTypeFromCp() throws IOException {
return readBType(inputStream);
}
public BType readType(int cpI) throws IOException {
byte tag = inputStream.readByte();
Name name = names.fromString(getStringCPEntryValue(inputStream));
int flags = inputStream.readInt();
inputStream.readInt();
switch (tag) {
case TypeTags.INT:
return typeParamAnalyzer.getNominalType(symTable.intType, name, flags);
case TypeTags.BYTE:
return typeParamAnalyzer.getNominalType(symTable.byteType, name, flags);
case TypeTags.FLOAT:
return typeParamAnalyzer.getNominalType(symTable.floatType, name, flags);
case TypeTags.DECIMAL:
return typeParamAnalyzer.getNominalType(symTable.decimalType, name, flags);
case TypeTags.STRING:
return typeParamAnalyzer.getNominalType(symTable.stringType, name, flags);
case TypeTags.BOOLEAN:
return typeParamAnalyzer.getNominalType(symTable.booleanType, name, flags);
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.XML:
return symTable.xmlType;
case TypeTags.TABLE:
BTableType bTableType = new BTableType(TypeTags.TABLE, null, symTable.tableType.tsymbol);
bTableType.constraint = readTypeFromCp();
return bTableType;
case TypeTags.NIL:
return symTable.nilType;
case TypeTags.ANYDATA:
return typeParamAnalyzer.getNominalType(symTable.anydataType, name, flags);
case TypeTags.RECORD:
int pkgCpIndex = inputStream.readInt();
PackageID pkgId = getPackageId(pkgCpIndex);
String recordName = getStringCPEntryValue(inputStream);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(EnumSet.of(Flag.PUBLIC)),
names.fromString(recordName), env.pkgSymbol.pkgID, null, env.pkgSymbol);
recordSymbol.scope = new Scope(recordSymbol);
BRecordType recordType = new BRecordType(recordSymbol);
recordSymbol.type = recordType;
compositeStack.push(recordType);
addShapeCP(recordType, cpI);
recordType.sealed = inputStream.readBoolean();
recordType.restFieldType = readTypeFromCp();
int recordFields = inputStream.readInt();
for (int i = 0; i < recordFields; i++) {
String fieldName = getStringCPEntryValue(inputStream);
int fieldFlags = inputStream.readInt();
byte[] docBytes = readDocBytes(inputStream);
BType fieldType = readTypeFromCp();
BVarSymbol varSymbol = new BVarSymbol(fieldFlags, names.fromString(fieldName),
recordSymbol.pkgID, fieldType, recordSymbol.scope.owner);
defineMarkDownDocAttachment(varSymbol, docBytes);
BField structField = new BField(varSymbol.name, null, varSymbol);
recordType.fields.add(structField);
recordSymbol.scope.define(varSymbol.name, varSymbol);
}
boolean isInitAvailable = inputStream.readByte() == 1;
if (isInitAvailable) {
String recordInitFuncName = getStringCPEntryValue(inputStream);
int recordInitFuncFlags = inputStream.readInt();
BInvokableType recordInitFuncType = (BInvokableType) readTypeFromCp();
Name initFuncName = names.fromString(recordInitFuncName);
boolean isNative = Symbols.isFlagOn(recordInitFuncFlags, Flags.NATIVE);
BInvokableSymbol recordInitFuncSymbol =
Symbols.createFunctionSymbol(recordInitFuncFlags,
initFuncName, env.pkgSymbol.pkgID, recordInitFuncType,
env.pkgSymbol, isNative);
recordInitFuncSymbol.retType = recordInitFuncType.retType;
recordSymbol.initializerFunc = new BAttachedFunction(initFuncName, recordInitFuncSymbol,
recordInitFuncType);
recordSymbol.scope.define(initFuncName, recordInitFuncSymbol);
}
Object poppedRecordType = compositeStack.pop();
assert poppedRecordType == recordType;
if (pkgId.equals(env.pkgSymbol.pkgID)) {
return recordType;
}
BPackageSymbol pkgSymbol = packageLoader.loadPackageSymbol(pkgId, null, null);
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(pkgSymbol);
return symbolResolver.lookupSymbol(pkgEnv, names.fromString(recordName), SymTag.TYPE).type;
case TypeTags.TYPEDESC:
BTypedescType typedescType = new BTypedescType(null, symTable.typeDesc.tsymbol);
typedescType.constraint = readTypeFromCp();
return typedescType;
case TypeTags.STREAM:
BStreamType bStreamType = new BStreamType(TypeTags.STREAM, null, symTable.streamType.tsymbol);
bStreamType.constraint = readTypeFromCp();
return bStreamType;
case TypeTags.MAP:
BMapType bMapType = new BMapType(TypeTags.MAP, null, symTable.mapType.tsymbol);
bMapType.constraint = readTypeFromCp();
return bMapType;
case TypeTags.INVOKABLE:
BInvokableType bInvokableType = new BInvokableType(null, null, null, null);
bInvokableType.flags = flags;
int paramCount = inputStream.readInt();
List<BType> paramTypes = new ArrayList<>();
for (int i = 0; i < paramCount; i++) {
paramTypes.add(readTypeFromCp());
}
bInvokableType.paramTypes = paramTypes;
if (inputStream.readBoolean()) {
bInvokableType.restType = readTypeFromCp();
}
bInvokableType.retType = readTypeFromCp();
return bInvokableType;
case TypeTags.ANY:
return typeParamAnalyzer.getNominalType(symTable.anyType, name, flags);
case TypeTags.HANDLE:
return symTable.handleType;
case TypeTags.ENDPOINT:
break;
case TypeTags.ARRAY:
byte state = inputStream.readByte();
int size = inputStream.readInt();
BTypeSymbol arrayTypeSymbol = Symbols.createTypeSymbol(SymTag.ARRAY_TYPE, Flags.asMask(EnumSet
.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner);
BArrayType bArrayType = new BArrayType(null, arrayTypeSymbol, size, BArrayState.valueOf(state));
bArrayType.eType = readTypeFromCp();
return bArrayType;
case TypeTags.UNION:
BTypeSymbol unionTypeSymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet
.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner);
BUnionType unionType = BUnionType.create(unionTypeSymbol,
new LinkedHashSet<>());
int unionMemberCount = inputStream.readInt();
for (int i = 0; i < unionMemberCount; i++) {
unionType.add(readTypeFromCp());
}
return unionType;
case TypeTags.PACKAGE:
break;
case TypeTags.NONE:
return symTable.noType;
case TypeTags.VOID:
break;
case TypeTags.XMLNS:
break;
case TypeTags.ANNOTATION:
break;
case TypeTags.SEMANTIC_ERROR:
break;
case TypeTags.ERROR:
BTypeSymbol errorSymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.EMPTY,
env.pkgSymbol.pkgID, null, env.pkgSymbol.owner);
BErrorType errorType = new BErrorType(errorSymbol);
addShapeCP(errorType, cpI);
compositeStack.push(errorType);
pkgCpIndex = inputStream.readInt();
pkgId = getPackageId(pkgCpIndex);
String errorName = getStringCPEntryValue(inputStream);
BType reasonType = readTypeFromCp();
BType detailsType = readTypeFromCp();
errorType.reasonType = reasonType;
errorType.detailType = detailsType;
errorSymbol.type = errorType;
errorSymbol.pkgID = pkgId;
errorSymbol.name = names.fromString(errorName);
Object poppedErrorType = compositeStack.pop();
assert poppedErrorType == errorType;
if (!env.pkgSymbol.pkgID.equals(PackageID.ANNOTATIONS)
&& Symbols.isFlagOn(flags, Flags.NATIVE)) {
return symTable.errorType;
}
return errorType;
case TypeTags.ITERATOR:
break;
case TypeTags.TUPLE:
BTypeSymbol tupleTypeSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(EnumSet
.of(Flag.PUBLIC)), Names.EMPTY, env.pkgSymbol.pkgID, null, env.pkgSymbol.owner);
BTupleType bTupleType = new BTupleType(tupleTypeSymbol, null);
int tupleMemberCount = inputStream.readInt();
List<BType> tupleMemberTypes = new ArrayList<>();
for (int i = 0; i < tupleMemberCount; i++) {
tupleMemberTypes.add(readTypeFromCp());
}
bTupleType.tupleTypes = tupleMemberTypes;
return bTupleType;
case TypeTags.FUTURE:
BFutureType bFutureType = new BFutureType(TypeTags.FUTURE, null, symTable.futureType.tsymbol);
bFutureType.constraint = readTypeFromCp();
return bFutureType;
case TypeTags.INTERMEDIATE_COLLECTION:
break;
case TypeTags.FINITE:
String finiteTypeName = getStringCPEntryValue(inputStream);
int finiteTypeFlags = inputStream.readInt();
BTypeSymbol symbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteTypeFlags,
names.fromString(finiteTypeName), env.pkgSymbol.pkgID, null, env.pkgSymbol);
symbol.scope = new Scope(symbol);
BFiniteType finiteType = new BFiniteType(symbol);
symbol.type = finiteType;
int valueSpaceSize = inputStream.readInt();
for (int i = 0; i < valueSpaceSize; i++) {
defineValueSpace(inputStream, finiteType, this);
}
return finiteType;
case TypeTags.OBJECT:
boolean service = inputStream.readByte() == 1;
pkgCpIndex = inputStream.readInt();
pkgId = getPackageId(pkgCpIndex);
String objName = getStringCPEntryValue(inputStream);
int objFlags = (inputStream.readBoolean() ? Flags.ABSTRACT : 0) | Flags.PUBLIC;
objFlags = inputStream.readBoolean() ? objFlags | Flags.CLIENT : objFlags;
BObjectTypeSymbol objectSymbol = (BObjectTypeSymbol) Symbols.createObjectSymbol(objFlags,
names.fromString(objName), env.pkgSymbol.pkgID, null, env.pkgSymbol);
objectSymbol.scope = new Scope(objectSymbol);
objectSymbol.methodScope = new Scope(objectSymbol);
BObjectType objectType;
if (service) {
objectType = new BServiceType(objectSymbol);
} else {
objectType = new BObjectType(objectSymbol);
}
objectSymbol.type = objectType;
addShapeCP(objectType, cpI);
compositeStack.push(objectType);
int fieldCount = inputStream.readInt();
for (int i = 0; i < fieldCount; i++) {
String fieldName = getStringCPEntryValue(inputStream);
int fieldFlags = inputStream.readInt();
byte[] docBytes = readDocBytes(inputStream);
BType fieldType = readTypeFromCp();
BVarSymbol objectVarSymbol = new BVarSymbol(fieldFlags, names.fromString(fieldName),
objectSymbol.pkgID, fieldType, objectSymbol.scope.owner);
defineMarkDownDocAttachment(objectVarSymbol, docBytes);
BField structField = new BField(objectVarSymbol.name, null, objectVarSymbol);
objectType.fields.add(structField);
objectSymbol.scope.define(objectVarSymbol.name, objectVarSymbol);
}
boolean generatedConstructorPresent = inputStream.readBoolean();
if (generatedConstructorPresent) {
ignoreAttachedFunc();
}
boolean constructorPresent = inputStream.readBoolean();
if (constructorPresent) {
ignoreAttachedFunc();
}
int funcCount = inputStream.readInt();
for (int i = 0; i < funcCount; i++) {
ignoreAttachedFunc();
}
Object poppedObjType = compositeStack.pop();
assert poppedObjType == objectType;
if (pkgId.equals(env.pkgSymbol.pkgID)) {
return objectType;
}
pkgSymbol = packageLoader.loadPackageSymbol(pkgId, null, null);
pkgEnv = symTable.pkgEnvMap.get(pkgSymbol);
return symbolResolver.lookupSymbol(pkgEnv, names.fromString(objName), SymTag.TYPE).type;
case TypeTags.BYTE_ARRAY:
break;
case TypeTags.FUNCTION_POINTER:
break;
case SERVICE_TYPE_TAG:
return symTable.anyServiceType;
}
return null;
}
private void ignoreAttachedFunc() throws IOException {
getStringCPEntryValue(inputStream);
inputStream.readInt();
readTypeFromCp();
}
}
private byte[] readDocBytes(DataInputStream inputStream) throws IOException {
int docLength = inputStream.readInt();
byte[] docBytes = new byte[docLength];
int noOfBytesRead = inputStream.read(docBytes);
if (docLength != noOfBytesRead) {
throw new RuntimeException("Failed to read Markdown Documenation");
}
return docBytes;
}
private PackageID getPackageId(int pkgCPIndex) {
PackageCPEntry pkgCpEntry = (PackageCPEntry) env.constantPool[pkgCPIndex];
String orgName = ((StringCPEntry) env.constantPool[pkgCpEntry.orgNameCPIndex]).value;
String pkgName = ((StringCPEntry) env.constantPool[pkgCpEntry.pkgNameCPIndex]).value;
String version = ((StringCPEntry) env.constantPool[pkgCpEntry.versionCPIndex]).value;
return new PackageID(names.fromString(orgName),
names.fromString(pkgName), names.fromString(version));
}
private void defineValueSpace(DataInputStream dataInStream, BFiniteType finiteType, BIRTypeReader typeReader)
throws IOException {
BType valueType = typeReader.readTypeFromCp();
BLangLiteral litExpr = createLiteralBasedOnType(valueType);
switch (valueType.tag) {
case TypeTags.INT:
int integerCpIndex = dataInStream.readInt();
IntegerCPEntry integerCPEntry = (IntegerCPEntry) this.env.constantPool[integerCpIndex];
litExpr.value = integerCPEntry.value;
break;
case TypeTags.BYTE:
int byteCpIndex = dataInStream.readInt();
ByteCPEntry byteCPEntry = (ByteCPEntry) this.env.constantPool[byteCpIndex];
litExpr.value = byteCPEntry.value;
break;
case TypeTags.FLOAT:
int floatCpIndex = dataInStream.readInt();
FloatCPEntry floatCPEntry = (FloatCPEntry) this.env.constantPool[floatCpIndex];
litExpr.value = Double.toString(floatCPEntry.value);
break;
case TypeTags.STRING:
case TypeTags.DECIMAL:
litExpr.value = getStringCPEntryValue(dataInStream);
break;
case TypeTags.BOOLEAN:
litExpr.value = dataInStream.readByte() == 1;
break;
case TypeTags.NIL:
break;
default:
throw new UnsupportedOperationException("finite type value is not supported for type: " + valueType);
}
litExpr.type = valueType;
finiteType.valueSpace.add(litExpr);
}
private BLangLiteral createLiteralBasedOnType(BType valueType) {
NodeKind nodeKind = valueType.tag <= TypeTags.DECIMAL ? NodeKind.NUMERIC_LITERAL : NodeKind.LITERAL;
return nodeKind == NodeKind.LITERAL ? (BLangLiteral) TreeBuilder.createLiteralExpression() :
(BLangLiteral) TreeBuilder.createNumericLiteralExpression();
}
} |
Do we need this `exprs`? It can directly be `arrayLiteral.exprs` | public void visit(BLangFunction funcNode) {
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
if (!funcNode.interfaceFunction) {
addReturnIfNotPresent(funcNode);
}
funcNode.originalFuncSymbol = funcNode.symbol;
funcNode.symbol = ASTBuilderUtil.duplicateInvokableSymbol(funcNode.symbol);
funcNode.requiredParams = rewrite(funcNode.requiredParams, funcEnv);
funcNode.restParam = rewrite(funcNode.restParam, funcEnv);
funcNode.workers = rewrite(funcNode.workers, funcEnv);
if (funcNode.returnTypeNode != null && funcNode.returnTypeNode.getKind() != null) {
funcNode.returnTypeNode = rewrite(funcNode.returnTypeNode, funcEnv);
}
List<BLangAnnotationAttachment> participantAnnotation
= funcNode.annAttachments.stream()
.filter(a -> Transactions.isTransactionsAnnotation(a.pkgAlias.value,
a.annotationName.value))
.collect(Collectors.toList());
funcNode.body = rewrite(funcNode.body, funcEnv);
funcNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
if (funcNode.returnTypeNode != null) {
funcNode.returnTypeAnnAttachments.forEach(attachment -> rewrite(attachment, env));
}
if (participantAnnotation.isEmpty()) {
result = funcNode;
return;
}
result = desugarParticipantFunction(funcNode, participantAnnotation);
}
private BLangFunction desugarParticipantFunction(BLangFunction funcNode,
List<BLangAnnotationAttachment> participantAnnotation) {
BLangAnnotationAttachment annotation = participantAnnotation.get(0);
BLangBlockFunctionBody onCommitBody = null;
BLangBlockFunctionBody onAbortBody = null;
funcNode.requiredParams.forEach(bLangSimpleVariable -> bLangSimpleVariable.symbol.closure = true);
if (funcNode.receiver != null) {
funcNode.receiver.symbol.closure = true;
}
BType trxReturnType = BUnionType.create(null, symTable.errorType, symTable.anyType);
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangLambdaFunction commitFunc = createLambdaFunction(funcNode.pos, "$anonOnCommitFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangLambdaFunction abortFunc = createLambdaFunction(funcNode.pos, "$anonOnAbortFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangSimpleVariable onCommitTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, commitFunc.function.symbol));
BLangSimpleVariable onAbortTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, abortFunc.function.symbol));
BLangSimpleVarRef trxIdOnCommitRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onCommitTrxVar.symbol);
BLangSimpleVarRef trxIdOnAbortRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onAbortTrxVar.symbol);
for (Map.Entry<String, BLangExpression> entry :
getKeyValuePairs((BLangStatementExpression) annotation.expr).entrySet()) {
switch (entry.getKey()) {
case Transactions.TRX_ONCOMMIT_FUNC:
BInvokableSymbol commitSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol;
BLangInvocation onCommit = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, commitSym, Lists.of(trxIdOnCommitRef),
Collections.emptyList(), symResolver);
BLangStatement onCommitStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommit);
onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onCommitStmt));
break;
case Transactions.TRX_ONABORT_FUNC:
BInvokableSymbol abortSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol;
BLangInvocation onAbort = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, abortSym, Lists.of(trxIdOnAbortRef),
Collections.emptyList(), symResolver);
BLangStatement onAbortStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbort);
onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onAbortStmt));
break;
}
}
if (onCommitBody == null) {
onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommitBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
if (onAbortBody == null) {
onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbortBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
commitFunc.function.body = onCommitBody;
commitFunc.function.requiredParams.add(onCommitTrxVar);
commitFunc.type = new BInvokableType(Lists.of(onCommitTrxVar.symbol.type),
commitFunc.function.symbol.type.getReturnType(), null);
commitFunc.function.symbol.type = commitFunc.type;
commitFunc.function.symbol.params = Lists.of(onCommitTrxVar.symbol);
abortFunc.function.body = onAbortBody;
abortFunc.function.requiredParams.add(onAbortTrxVar);
abortFunc.type = new BInvokableType(Lists.of(onAbortTrxVar.symbol.type),
abortFunc.function.symbol.type.getReturnType(), null);
abortFunc.function.symbol.type = abortFunc.type;
abortFunc.function.symbol.params = Lists.of(onAbortTrxVar.symbol);
BSymbol trxModSym = env.enclPkg.imports
.stream()
.filter(importPackage -> importPackage.symbol.
pkgID.toString().equals(Names.TRANSACTION_ORG.value + Names.ORG_NAME_SEPARATOR.value
+ Names.TRANSACTION_PACKAGE.value))
.findAny().get().symbol;
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(trxModSym),
getParticipantFunctionName(funcNode));
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.stringType,
getTransactionBlockId());
BLangLambdaFunction trxMainWrapperFunc = createLambdaFunction(funcNode.pos, "$anonTrxWrapperFunc$",
Collections.emptyList(),
funcNode.returnTypeNode,
funcNode.body);
for (BLangSimpleVariable var : funcNode.requiredParams) {
trxMainWrapperFunc.function.closureVarSymbols.add(new ClosureVarSymbol(var.symbol, var.pos));
}
BLangBlockFunctionBody trxMainBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangLambdaFunction trxMainFunc
= createLambdaFunction(funcNode.pos, "$anonTrxParticipantFunc$", Collections.emptyList(),
trxReturnNode, trxMainBody);
trxMainWrapperFunc.capturedClosureEnv = trxMainFunc.function.clonedEnv;
commitFunc.capturedClosureEnv = env.createClone();
abortFunc.capturedClosureEnv = env.createClone();
BVarSymbol wrapperSym = new BVarSymbol(0, names.fromString("$wrapper$1"), this.env.scope.owner.pkgID,
trxMainWrapperFunc.type, trxMainFunc.function.symbol);
BLangSimpleVariable wrapperFuncVar = ASTBuilderUtil.createVariable(funcNode.pos, "$wrapper$1",
trxMainWrapperFunc.type, trxMainWrapperFunc,
wrapperSym);
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(funcNode.pos, trxMainBody);
variableDef.var = wrapperFuncVar;
BLangSimpleVarRef wrapperVarRef = rewrite(ASTBuilderUtil.createVariableRef(variableDef.pos,
wrapperFuncVar.symbol), env);
BLangInvocation wrapperInvocation = new BFunctionPointerInvocation(trxMainWrapperFunc.pos, wrapperVarRef,
wrapperFuncVar.symbol,
trxMainWrapperFunc.function.symbol.retType);
BLangReturn wrapperReturn = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(wrapperInvocation, trxReturnNode.type));
trxMainWrapperFunc.function.receiver = funcNode.receiver;
trxMainFunc.function.receiver = funcNode.receiver;
trxMainBody.stmts.add(wrapperReturn);
rewrite(trxMainFunc.function, env);
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, trxMainFunc, commitFunc, abortFunc);
BLangInvocation participantInvocation
= ASTBuilderUtil.createInvocationExprMethod(funcNode.pos, invokableSymbol, requiredArgs,
Collections.emptyList(), symResolver);
participantInvocation.type = ((BInvokableType) invokableSymbol.type).retType;
BLangStatement stmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(participantInvocation, funcNode.symbol.retType));
funcNode.body = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(rewrite(stmt, env)));
return funcNode;
}
private Name getParticipantFunctionName(BLangFunction function) {
if (Symbols.isFlagOn((function).symbol.flags, Flags.RESOURCE)) {
return TRX_REMOTE_PARTICIPANT_BEGIN_FUNCTION;
}
return TRX_LOCAL_PARTICIPANT_BEGIN_FUNCTION;
}
@Override
public void visit(BLangResource resourceNode) {
}
public void visit(BLangAnnotation annotationNode) {
annotationNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
annAttachmentNode.expr = rewrite(annAttachmentNode.expr, env);
result = annAttachmentNode;
}
@Override
public void visit(BLangSimpleVariable varNode) {
if (((varNode.symbol.owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE)
&& (varNode.symbol.owner.tag & SymTag.LET) != SymTag.LET) {
varNode.expr = null;
result = varNode;
return;
}
if (varNode.typeNode != null && varNode.typeNode.getKind() != null) {
varNode.typeNode = rewrite(varNode.typeNode, env);
}
BLangExpression bLangExpression = rewriteExpr(varNode.expr);
if (bLangExpression != null) {
bLangExpression = addConversionExprIfRequired(bLangExpression, varNode.type);
}
varNode.expr = bLangExpression;
varNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = varNode;
}
@Override
public void visit(BLangLetExpression letExpression) {
SymbolEnv prevEnv = this.env;
this.env = letExpression.env;
BLangExpression expr = letExpression.expr;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(letExpression.pos);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
BLangNode node = rewrite((BLangNode) letVariable.definitionNode, env);
if (node.getKind() == NodeKind.BLOCK) {
blockStmt.stmts.addAll(((BLangBlockStmt) node).stmts);
} else {
blockStmt.addStatement((BLangSimpleVariableDef) node);
}
}
BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$let_var_%d_$", letCount++),
expr.type, expr, expr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.type = expr.type;
result = rewrite(stmtExpr, env);
this.env = prevEnv;
}
@Override
public void visit(BLangTupleVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangRecordVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangErrorVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangBlockStmt block) {
SymbolEnv blockEnv = SymbolEnv.createBlockEnv(block, env);
block.stmts = rewriteStmt(block.stmts, blockEnv);
result = block;
}
@Override
public void visit(BLangSimpleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
result = varDefNode;
}
@Override
public void visit(BLangTupleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
BLangTupleVariable tupleVariable = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(varDefNode.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = tuple;
createVarDefStmts(tupleVariable, blockStmt, tuple.symbol, null);
createRestFieldVarDefStmts(tupleVariable, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
final BLangSimpleVariable arrayVar = (BLangSimpleVariable) parentTupleVariable.restVariable;
boolean isTupleType = parentTupleVariable.type.tag == TypeTags.TUPLE;
DiagnosticPos pos = blockStmt.pos;
if (arrayVar != null) {
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = arrayVar.type;
arrayVar.expr = arrayExpr;
BLangSimpleVariableDef arrayVarDef = ASTBuilderUtil.createVariableDefStmt(arrayVar.pos, blockStmt);
arrayVarDef.var = arrayVar;
BLangExpression tupleExpr = parentTupleVariable.expr;
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, arrayVar.symbol);
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) (isTupleType ? ((BTupleType) parentTupleVariable.type).tupleTypes.size()
: parentTupleVariable.memberVariables.size());
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(arrayVarRef,
createLengthInvocation(pos, arrayVarRef));
indexAccessExpr.type = (isTupleType ? ((BTupleType) parentTupleVariable.type).restType : symTable.anyType);
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
@Override
public void visit(BLangRecordVariableDef varDefNode) {
BLangRecordVariable varNode = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(varDefNode.pos, "$map$0", runTimeType,
null, new BVarSymbol(0, names.fromString("$map$0"), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = varDefNode.var.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = mapVariable;
createVarDefStmts(varNode, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorVariableDef varDefNode) {
BLangErrorVariable errorVariable = varDefNode.errorVariable;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BVarSymbol errorVarSymbol = new BVarSymbol(0, names.fromString("$error$"),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner);
final BLangSimpleVariable error = ASTBuilderUtil.createVariable(varDefNode.pos, errorVarSymbol.name.value,
symTable.errorType, null, errorVarSymbol);
error.expr = errorVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = error;
createVarDefStmts(errorVariable, blockStmt, error.symbol, null);
result = rewrite(blockStmt, env);
}
/**
* This method iterate through each member of the tupleVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* ((string, float) int)) ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var def creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangVariable> memberVars = parentTupleVariable.memberVariables;
for (int index = 0; index < memberVars.size(); index++) {
BLangVariable variable = memberVars.get(index);
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.intType, (long) index);
if (NodeKind.VARIABLE == variable.getKind()) {
createSimpleVarDefStmt((BLangSimpleVariable) variable, parentBlockStmt, indexExpr, tupleVarSymbol,
parentIndexAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BType accessedElemType = symTable.errorType;
if (tupleVarSymbol.type.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) tupleVarSymbol.type;
accessedElemType = arrayType.eType;
}
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, accessedElemType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
}
}
}
/**
* Overloaded method to handle record variables.
* This method iterate through each member of the recordVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* type Foo record {
* string name;
* (int, string) age;
* Address address;
* };
*
* Foo {name: a, age: (b, c), address: d} = {record literal}
*
* a is a simple var, so a simple var def will be created.
*
* (b, c) is a tuple, so it is a recursive var def creation.
*
* d is a record, so it is a recursive var def creation.
*
*/
private void createVarDefStmts(BLangRecordVariable parentRecordVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
List<BLangRecordVariableKeyValue> variableList = parentRecordVariable.variableList;
for (BLangRecordVariableKeyValue recordFieldKeyValue : variableList) {
BLangVariable variable = recordFieldKeyValue.valueBindingPattern;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.stringType,
recordFieldKeyValue.key.value);
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
createSimpleVarDefStmt((BLangSimpleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) recordFieldKeyValue.valueBindingPattern;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) recordFieldKeyValue.valueBindingPattern,
parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
recordVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, variable.type, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
}
}
if (parentRecordVariable.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangVariable) parentRecordVariable.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1",
parentIndexAccessExpr.type, null, new BVarSymbol(0, names.fromString("$map$1"),
this.env.scope.owner.pkgID, parentIndexAccessExpr.type, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
List<String> keysToRemove = parentRecordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangSimpleVariable restParam = (BLangSimpleVariable) parentRecordVariable.restParam;
BLangSimpleVariableDef restParamVarDef = ASTBuilderUtil.createVariableDefStmt(pos,
parentBlockStmt);
restParamVarDef.var = restParam;
restParamVarDef.var.type = restParamType;
restParam.expr = varRef;
}
}
/**
* This method will create the relevant var def statements for reason and details of the error variable.
* The var def statements are created by creating the reason() and detail() builtin methods.
*/
private void createVarDefStmts(BLangErrorVariable parentErrorVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVariableSymbol, BLangIndexBasedAccess parentIndexBasedAccess) {
BVarSymbol convertedErrorVarSymbol;
if (parentIndexBasedAccess != null) {
BType prevType = parentIndexBasedAccess.type;
parentIndexBasedAccess.type = symTable.anyType;
BLangSimpleVariableDef errorVarDef = createVarDef("$error$" + errorCount++,
symTable.errorType,
addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType),
parentErrorVariable.pos);
parentIndexBasedAccess.type = prevType;
parentBlockStmt.addStatement(errorVarDef);
convertedErrorVarSymbol = errorVarDef.var.symbol;
} else {
convertedErrorVarSymbol = errorVariableSymbol;
}
parentErrorVariable.reason.expr = generateErrorReasonBuiltinFunction(parentErrorVariable.reason.pos,
parentErrorVariable.reason.type, convertedErrorVarSymbol, null);
if (names.fromIdNode((parentErrorVariable.reason).name) == Names.IGNORE) {
parentErrorVariable.reason = null;
} else {
BLangSimpleVariableDef reasonVariableDef =
ASTBuilderUtil.createVariableDefStmt(parentErrorVariable.reason.pos, parentBlockStmt);
reasonVariableDef.var = parentErrorVariable.reason;
}
if ((parentErrorVariable.detail == null || parentErrorVariable.detail.isEmpty())
&& parentErrorVariable.restDetail == null) {
return;
}
BType detailMapType;
BType detailType = ((BErrorType) parentErrorVariable.type).detailType;
if (detailType.tag == TypeTags.MAP) {
detailMapType = detailType;
} else {
detailMapType = symTable.detailType;
}
parentErrorVariable.detailExpr = generateErrorDetailBuiltinFunction(
parentErrorVariable.pos,
convertedErrorVarSymbol, null);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail",
parentErrorVariable.detailExpr.type, parentErrorVariable.detailExpr, parentErrorVariable.pos);
detailTempVarDef.type = parentErrorVariable.detailExpr.type;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : parentErrorVariable.detail) {
BLangExpression detailEntryVar = createErrorDetailVar(detailEntry, detailTempVarDef.var.symbol);
createAndAddBoundVariableDef(parentBlockStmt, detailEntry, detailEntryVar);
}
if (parentErrorVariable.restDetail != null && !parentErrorVariable.restDetail.name.value.equals(IGNORE.value)) {
DiagnosticPos pos = parentErrorVariable.restDetail.pos;
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(
pos, detailTempVarDef.var.symbol);
List<String> keysToRemove = parentErrorVariable.detail.stream()
.map(detail -> detail.key.getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVariable.pos, keysToRemove,
parentErrorVariable.restDetail.type, parentBlockStmt);
BLangSimpleVariableDef variableDefStmt = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDefStmt.var = ASTBuilderUtil.createVariable(pos,
parentErrorVariable.restDetail.name.value,
filteredDetail.type,
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol),
parentErrorVariable.restDetail.symbol);
BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos,
ASTBuilderUtil.createVariableRef(pos, parentErrorVariable.restDetail.symbol),
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol));
parentBlockStmt.addStatement(assignmentStmt);
}
rewrite(parentBlockStmt, env);
}
private BLangSimpleVariableDef forceCastIfApplicable(BVarSymbol errorVarySymbol, DiagnosticPos pos,
BType targetType) {
BVarSymbol errorVarSym = new BVarSymbol(Flags.PUBLIC, names.fromString("$cast$temp$"),
this.env.enclPkg.packageID, targetType, this.env.scope.owner);
BLangSimpleVarRef variableRef = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
BLangExpression expr;
if (targetType.tag == TypeTags.RECORD) {
expr = variableRef;
} else {
expr = addConversionExprIfRequired(variableRef, targetType);
}
BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(pos, errorVarSym.name.value, targetType, expr,
errorVarSym);
return ASTBuilderUtil.createVariableDef(pos, errorVar);
}
private BLangSimpleVariable generateRestFilter(BLangSimpleVarRef mapVarRef, DiagnosticPos pos,
List<String> keysToRemove, BType targetType,
BLangBlockStmt parentBlockStmt) {
BLangExpression typeCastExpr = addConversionExprIfRequired(mapVarRef, targetType);
int restNum = annonVarCount++;
String name = "$map$ref$" + restNum;
BLangSimpleVariable mapVariable = defVariable(pos, targetType, parentBlockStmt, typeCastExpr, name);
BLangInvocation entriesInvocation = generateMapEntriesInvocation(
ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol), typeCastExpr.type);
String entriesVarName = "$map$ref$entries$" + restNum;
BType entriesType = new BMapType(TypeTags.MAP,
new BTupleType(Arrays.asList(symTable.stringType, ((BMapType) targetType).constraint)), null);
BLangSimpleVariable entriesInvocationVar = defVariable(pos, entriesType, parentBlockStmt,
addConversionExprIfRequired(entriesInvocation, entriesType),
entriesVarName);
BLangLambdaFunction filter = createFuncToFilterOutRestParam(keysToRemove, pos);
BLangInvocation filterInvocation = generateMapFilterInvocation(pos, entriesInvocationVar, filter);
String filteredEntriesName = "$filtered$detail$entries" + restNum;
BLangSimpleVariable filteredVar = defVariable(pos, entriesType, parentBlockStmt, filterInvocation,
filteredEntriesName);
String filteredVarName = "$detail$filtered" + restNum;
BLangLambdaFunction backToMapLambda = generateEntriesToMapLambda(pos);
BLangInvocation mapInvocation = generateMapMapInvocation(pos, filteredVar, backToMapLambda);
BLangSimpleVariable filtered = defVariable(pos, targetType, parentBlockStmt,
mapInvocation,
filteredVarName);
String filteredRestVarName = "$restVar$" + restNum;
BLangInvocation constructed = generateConstructFromInvocation(pos, targetType, filtered.symbol);
return defVariable(pos, targetType, parentBlockStmt,
addConversionExprIfRequired(constructed, targetType),
filteredRestVarName);
}
private BLangInvocation generateMapEntriesInvocation(BLangExpression expr, BType type) {
BLangInvocation invocationNode = createInvocationNode("entries", new ArrayList<>(), type);
invocationNode.expr = expr;
invocationNode.symbol = symResolver.lookupLangLibMethod(type, names.fromString("entries"));
invocationNode.requiredArgs = Lists.of(expr);
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation generateMapMapInvocation(DiagnosticPos pos, BLangSimpleVariable filteredVar,
BLangLambdaFunction backToMapLambda) {
BLangInvocation invocationNode = createInvocationNode("map", new ArrayList<>(), filteredVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(filteredVar.type, names.fromString("map"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.requiredArgs.add(backToMapLambda);
return invocationNode;
}
private BLangLambdaFunction generateEntriesToMapLambda(DiagnosticPos pos) {
String anonfuncName = "$anonGetValFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType anyType = new BLangValueType();
anyType.typeKind = TypeKind.ANY;
anyType.type = symTable.anyType;
function.returnTypeNode = anyType;
BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>());
function.body = functionBlock;
BLangIndexBasedAccess indexBasesAccessExpr =
ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol,
ASTBuilderUtil
.createLiteral(pos, symTable.intType, (long) 1));
BLangSimpleVariableDef tupSecondElem = createVarDef("val", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupSecondElem);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
returnStmt.expr = ASTBuilderUtil.createVariableRef(pos, tupSecondElem.var.symbol);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
symTable.anyType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return createLambdaFunction(function, functionSymbol);
}
private BLangInvocation generateMapFilterInvocation(DiagnosticPos pos,
BLangSimpleVariable entriesInvocationVar,
BLangLambdaFunction filter) {
BLangInvocation invocationNode = createInvocationNode("filter", new ArrayList<>(), entriesInvocationVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(entriesInvocationVar.type, names.fromString("filter"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.requiredArgs.add(filter);
return invocationNode;
}
private BLangSimpleVariable defVariable(DiagnosticPos pos, BType varType, BLangBlockStmt parentBlockStmt,
BLangExpression expression, String name) {
Name varName = names.fromString(name);
BLangSimpleVariable detailMap = ASTBuilderUtil.createVariable(pos, name, varType, expression,
new BVarSymbol(Flags.PUBLIC, varName, env.enclPkg.packageID, varType, env.scope.owner));
BLangSimpleVariableDef constructedMap = ASTBuilderUtil.createVariableDef(pos, detailMap);
constructedMap.type = varType;
parentBlockStmt.addStatement(constructedMap);
env.scope.define(varName, detailMap.symbol);
return detailMap;
}
private void createAndAddBoundVariableDef(BLangBlockStmt parentBlockStmt,
BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BLangExpression detailEntryVar) {
if (detailEntry.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
BLangSimpleVariableDef errorDetailVar = createVarDef(
((BLangSimpleVariable) detailEntry.valueBindingPattern).name.value,
detailEntry.valueBindingPattern.type,
detailEntryVar,
detailEntry.valueBindingPattern.pos);
parentBlockStmt.addStatement(errorDetailVar);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangRecordVariableDef recordVariableDef = ASTBuilderUtil.createRecordVariableDef(
detailEntry.valueBindingPattern.pos,
(BLangRecordVariable) detailEntry.valueBindingPattern);
recordVariableDef.var.expr = detailEntryVar;
recordVariableDef.type = symTable.recordType;
parentBlockStmt.addStatement(recordVariableDef);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariableDef tupleVariableDef = ASTBuilderUtil.createTupleVariableDef(
detailEntry.valueBindingPattern.pos, (BLangTupleVariable) detailEntry.valueBindingPattern);
parentBlockStmt.addStatement(tupleVariableDef);
}
}
private BLangExpression createErrorDetailVar(BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BVarSymbol tempDetailVarSymbol) {
BLangExpression detailEntryVar = createIndexBasedAccessExpr(
detailEntry.valueBindingPattern.type,
detailEntry.valueBindingPattern.pos,
createStringLiteral(detailEntry.key.pos, detailEntry.key.value),
tempDetailVarSymbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
return detailEntryVar;
}
private BLangExpression constructStringTemplateConcatExpression(List<BLangExpression> exprs) {
BLangExpression concatExpr = null;
BLangExpression currentExpr;
for (BLangExpression expr : exprs) {
currentExpr = expr;
if (expr.type.tag != TypeTags.STRING && expr.type.tag != TypeTags.XML) {
currentExpr = getToStringInvocationOnExpr(expr);
}
if (concatExpr == null) {
concatExpr = currentExpr;
continue;
}
BType binaryExprType =
TypeTags.isXMLTypeTag(concatExpr.type.tag) || TypeTags.isXMLTypeTag(currentExpr.type.tag)
? symTable.xmlType
: symTable.stringType;
concatExpr =
ASTBuilderUtil.createBinaryExpr(concatExpr.pos, concatExpr, currentExpr,
binaryExprType, OperatorKind.ADD, null);
}
return concatExpr;
}
private BLangInvocation getToStringInvocationOnExpr(BLangExpression expression) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langValueModuleSymbol.scope
.lookup(names.fromString(TO_STRING_FUNCTION_NAME)).symbol;
List<BLangExpression> requiredArgs = new ArrayList<BLangExpression>() {{
add(addConversionExprIfRequired(expression, symbol.params.get(0).type));
}};
return ASTBuilderUtil.createInvocationExprMethod(expression.pos, symbol, requiredArgs, new ArrayList<>(),
symResolver);
}
private BLangInvocation generateErrorDetailBuiltinFunction(DiagnosticPos pos, BVarSymbol errorVarySymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangExpression onExpr =
parentIndexBasedAccess != null
? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
return createLangLibInvocationNode(ERROR_DETAIL_FUNCTION_NAME, onExpr, new ArrayList<>(), null, pos);
}
private BLangInvocation generateErrorReasonBuiltinFunction(DiagnosticPos pos, BType reasonType,
BVarSymbol errorVarSymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangExpression onExpr =
parentIndexBasedAccess != null
? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarSymbol);
return createLangLibInvocationNode(ERROR_REASON_FUNCTION_NAME, onExpr, new ArrayList<>(), reasonType, pos);
}
private BLangInvocation generateConstructFromInvocation(DiagnosticPos pos,
BType targetType,
BVarSymbol source) {
BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol);
BLangInvocation invocationNode = createInvocationNode(CONSTRUCT_FROM, new ArrayList<>(), typedescType);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = targetType;
typedescExpr.type = typedescType;
invocationNode.expr = typedescExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(typedescType, names.fromString(CONSTRUCT_FROM));
invocationNode.requiredArgs = Lists.of(typedescExpr, ASTBuilderUtil.createVariableRef(pos, source));
invocationNode.type = BUnionType.create(null, targetType, symTable.errorType);
return invocationNode;
}
private BLangLambdaFunction createFuncToFilterOutRestParam(List<String> toRemoveList, DiagnosticPos pos) {
String anonfuncName = "$anonRestParamFilterFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangBlockFunctionBody functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol);
BLangIndexBasedAccess indexBasesAccessExpr =
ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil
.createLiteral(pos, symTable.intType, (long) 0));
BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupFirstElem);
for (String toRemoveItem : toRemoveList) {
createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, toRemoveItem);
}
BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock);
return createLambdaFunction(function, functionSymbol);
}
private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVariable recordVariable, DiagnosticPos pos) {
List<String> fieldNamesToRemove = recordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
return createFuncToFilterOutRestParam(fieldNamesToRemove, pos);
}
private void createIfStmt(DiagnosticPos pos, BVarSymbol inputParamSymbol, BLangBlockFunctionBody blockStmt,
String key) {
BLangSimpleVarRef firstElemRef = ASTBuilderUtil.createVariableRef(pos, inputParamSymbol);
BLangExpression converted = addConversionExprIfRequired(firstElemRef, symTable.stringType);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, blockStmt);
BLangBlockStmt ifBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>());
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, ifBlock);
returnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, false);
ifStmt.body = ifBlock;
BLangGroupExpr groupExpr = new BLangGroupExpr();
groupExpr.type = symTable.booleanType;
BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, converted,
ASTBuilderUtil.createLiteral(pos, symTable.stringType, key),
symTable.booleanType, OperatorKind.EQUAL, null);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
binaryExpr.opKind, binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type);
groupExpr.expression = binaryExpr;
ifStmt.expr = groupExpr;
}
BLangLambdaFunction createLambdaFunction(BLangFunction function, BInvokableSymbol functionSymbol) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = function;
lambdaFunction.type = functionSymbol.type;
return lambdaFunction;
}
private BInvokableSymbol createReturnTrueStatement(DiagnosticPos pos, BLangFunction function,
BLangBlockFunctionBody functionBlock) {
BLangReturn trueReturnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
trueReturnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value),
env.enclPkg.packageID, function.type,
env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
getRestType(functionSymbol), symTable.booleanType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return functionSymbol;
}
private BLangBlockFunctionBody createAnonymousFunctionBlock(DiagnosticPos pos, BLangFunction function,
BVarSymbol keyValSymbol) {
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType booleanTypeKind = new BLangValueType();
booleanTypeKind.typeKind = TypeKind.BOOLEAN;
booleanTypeKind.type = symTable.booleanType;
function.returnTypeNode = booleanTypeKind;
BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>());
function.body = functionBlock;
return functionBlock;
}
private BTupleType getStringAnyTupleType() {
ArrayList<BType> typeList = new ArrayList<BType>() {{
add(symTable.stringType);
add(symTable.anyType);
}};
return new BTupleType(typeList);
}
/**
* This method creates a simple variable def and assigns and array expression based on the given indexExpr.
*
* case 1: when there is no parent array access expression, but with the indexExpr : 1
* string s = x[1];
*
* case 2: when there is a parent array expression : x[2] and indexExpr : 3
* string s = x[2][3];
*
* case 3: when there is no parent array access expression, but with the indexExpr : name
* string s = x[name];
*
* case 4: when there is a parent map expression : x[name] and indexExpr : fName
* string s = x[name][fName];
*
* case 5: when there is a parent map expression : x[name] and indexExpr : 1
* string s = x[name][1];
*/
private void createSimpleVarDefStmt(BLangSimpleVariable simpleVariable, BLangBlockStmt parentBlockStmt,
BLangLiteral indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
Name varName = names.fromIdNode(simpleVariable.name);
if (varName == Names.IGNORE) {
return;
}
final BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDefStmt(simpleVariable.pos,
parentBlockStmt);
simpleVariableDef.var = simpleVariable;
simpleVariable.expr = createIndexBasedAccessExpr(simpleVariable.type, simpleVariable.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
}
@Override
public void visit(BLangAssignment assignNode) {
if (safeNavigateLHS(assignNode.varRef)) {
BLangAccessExpression accessExpr = (BLangAccessExpression) assignNode.varRef;
accessExpr.leafNode = true;
result = rewriteSafeNavigationAssignment(accessExpr, assignNode.expr, assignNode.safeAssignment);
result = rewrite(result, env);
return;
}
assignNode.varRef = rewriteExpr(assignNode.varRef);
assignNode.expr = rewriteExpr(assignNode.expr);
assignNode.expr = addConversionExprIfRequired(rewriteExpr(assignNode.expr), assignNode.varRef.type);
result = assignNode;
}
@Override
public void visit(BLangTupleDestructure tupleDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(tupleDestructure.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(tupleDestructure.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(tupleDestructure.pos,
blockStmt);
variableDef.var = tuple;
createVarRefAssignmentStmts(tupleDestructure.varRef, blockStmt, tuple.symbol, null);
createRestFieldAssignmentStmt(tupleDestructure, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldAssignmentStmt(BLangTupleDestructure tupleDestructure, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
BLangTupleVarRef tupleVarRef = tupleDestructure.varRef;
DiagnosticPos pos = blockStmt.pos;
if (tupleVarRef.restParam != null) {
BLangExpression tupleExpr = tupleDestructure.expr;
BLangSimpleVarRef restParam = (BLangSimpleVarRef) tupleVarRef.restParam;
BArrayType restParamType = (BArrayType) restParam.type;
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = restParamType;
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = arrayExpr;
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) tupleVarRef.expressions.size();
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(restParam,
createLengthInvocation(pos, restParam));
indexAccessExpr.type = restParamType.eType;
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
private BLangInvocation createLengthInvocation(DiagnosticPos pos, BLangExpression collection) {
BInvokableSymbol lengthInvokableSymbol = (BInvokableSymbol) symResolver
.lookupLangLibMethod(collection.type, names.fromString(LENGTH_FUNCTION_NAME));
BLangInvocation lengthInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, lengthInvokableSymbol,
Lists.of(collection), symResolver);
lengthInvocation.argExprs = lengthInvocation.requiredArgs;
lengthInvocation.type = lengthInvokableSymbol.type.getReturnType();
return lengthInvocation;
}
/**
* This method iterate through each member of the tupleVarRef and create the relevant var ref assignment statements.
* This method does the check for node kind of each member and call the related var ref creation method.
*
* Example:
* ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var ref creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarRefAssignmentStmts(BLangTupleVarRef parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangExpression> expressions = parentTupleVariable.expressions;
for (int index = 0; index < expressions.size(); index++) {
BLangExpression expression = expressions.get(index);
if (NodeKind.SIMPLE_VARIABLE_REF == expression.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == expression.getKind()) {
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(expression.pos, symTable.intType, (long) index);
createSimpleVarRefAssignmentStmt((BLangVariableReference) expression, parentBlockStmt, indexExpr,
tupleVarSymbol, parentIndexAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.TUPLE_VARIABLE_REF) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(tupleVarRef.pos, symTable.intType, (long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVarRef.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangTupleVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.RECORD_VARIABLE_REF) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(recordVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangRecordVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
TypeDefBuilderHelper.addTypeDefinition(recordVarRef.type, recordVarRef.type.tsymbol,
TypeDefBuilderHelper.createRecordTypeNode(
(BRecordType) recordVarRef.type,
env.enclPkg.packageID, symTable, recordVarRef.pos),
env);
continue;
}
if (expression.getKind() == NodeKind.ERROR_VARIABLE_REF) {
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(errorVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, expression.type, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
}
}
}
/**
* This method creates a assignment statement and assigns and array expression based on the given indexExpr.
*
*/
private void createSimpleVarRefAssignmentStmt(BLangVariableReference simpleVarRef, BLangBlockStmt parentBlockStmt,
BLangExpression indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
if (simpleVarRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
Name varName = names.fromIdNode(((BLangSimpleVarRef) simpleVarRef).variableName);
if (varName == Names.IGNORE) {
return;
}
}
BLangExpression assignmentExpr = createIndexBasedAccessExpr(simpleVarRef.type, simpleVarRef.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
assignmentExpr = addConversionExprIfRequired(assignmentExpr, simpleVarRef.type);
final BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(parentBlockStmt.pos,
parentBlockStmt);
assignmentStmt.varRef = simpleVarRef;
assignmentStmt.expr = assignmentExpr;
}
private BLangExpression createIndexBasedAccessExpr(BType varType, DiagnosticPos varPos, BLangExpression indexExpr,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentExpr) {
BLangIndexBasedAccess arrayAccess = ASTBuilderUtil.createIndexBasesAccessExpr(varPos,
symTable.anyType, tupleVarSymbol, indexExpr);
arrayAccess.originalType = varType;
if (parentExpr != null) {
arrayAccess.expr = parentExpr;
}
final BLangExpression assignmentExpr;
if (types.isValueType(varType)) {
BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
castExpr.expr = arrayAccess;
castExpr.type = varType;
assignmentExpr = castExpr;
} else {
assignmentExpr = arrayAccess;
}
return assignmentExpr;
}
@Override
public void visit(BLangRecordDestructure recordDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(recordDestructure.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
String name = "$map$0";
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(recordDestructure.pos, name, runTimeType,
null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = recordDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.
createVariableDefStmt(recordDestructure.pos, blockStmt);
variableDef.var = mapVariable;
createVarRefAssignmentStmts(recordDestructure.varRef, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorDestructure errorDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(errorDestructure.pos);
String name = "$error$";
final BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(errorDestructure.pos, name,
symTable.errorType, null, new BVarSymbol(0, names.fromString(name),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
errorVar.expr = errorDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(errorDestructure.pos,
blockStmt);
variableDef.var = errorVar;
createVarRefAssignmentStmts(errorDestructure.varRef, blockStmt, errorVar.symbol, null);
result = rewrite(blockStmt, env);
}
private void createVarRefAssignmentStmts(BLangRecordVarRef parentRecordVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangRecordVarRefKeyValue> variableRefList = parentRecordVarRef.recordRefFields;
for (BLangRecordVarRefKeyValue varRefKeyValue : variableRefList) {
BLangExpression variableReference = varRefKeyValue.variableReference;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variableReference.pos, symTable.stringType,
varRefKeyValue.variableName.getValue());
if (NodeKind.SIMPLE_VARIABLE_REF == variableReference.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == variableReference.getKind()) {
createSimpleVarRefAssignmentStmt((BLangVariableReference) variableReference, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (NodeKind.RECORD_VARIABLE_REF == variableReference.getKind()) {
BLangRecordVarRef recordVariable = (BLangRecordVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVarRef.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(recordVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.TUPLE_VARIABLE_REF == variableReference.getKind()) {
BLangTupleVarRef tupleVariable = (BLangTupleVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
symTable.tupleType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(tupleVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.ERROR_VARIABLE_REF == variableReference.getKind()) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(variableReference.pos,
symTable.errorType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) variableReference, parentBlockStmt, recordVarSymbol,
arrayAccessExpr);
}
}
if (parentRecordVarRef.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangSimpleVarRef) parentRecordVarRef.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", restParamType,
null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
BLangSimpleVarRef restParam = (BLangSimpleVarRef) parentRecordVarRef.restParam;
List<String> keysToRemove = parentRecordVarRef.recordRefFields.stream()
.map(field -> field.variableName.value)
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, parentBlockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = varRef;
}
}
private void createVarRefAssignmentStmts(BLangErrorVarRef parentErrorVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
if (parentErrorVarRef.reason.getKind() != NodeKind.SIMPLE_VARIABLE_REF ||
names.fromIdNode(((BLangSimpleVarRef) parentErrorVarRef.reason).variableName) != Names.IGNORE) {
BLangAssignment reasonAssignment = ASTBuilderUtil
.createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt);
reasonAssignment.expr = generateErrorReasonBuiltinFunction(parentErrorVarRef.reason.pos,
symTable.stringType, errorVarySymbol, parentIndexAccessExpr);
reasonAssignment.expr = addConversionExprIfRequired(reasonAssignment.expr, parentErrorVarRef.reason.type);
reasonAssignment.varRef = parentErrorVarRef.reason;
}
if (parentErrorVarRef.detail.isEmpty() && isIgnoredErrorRefRestVar(parentErrorVarRef)) {
return;
}
BLangInvocation errorDetailBuiltinFunction = generateErrorDetailBuiltinFunction(parentErrorVarRef.pos,
errorVarySymbol,
parentIndexAccessExpr);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail$" + errorCount++,
symTable.detailType, errorDetailBuiltinFunction,
parentErrorVarRef.pos);
detailTempVarDef.type = symTable.detailType;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
List<String> extractedKeys = new ArrayList<>();
for (BLangNamedArgsExpression detail : parentErrorVarRef.detail) {
extractedKeys.add(detail.name.value);
BLangVariableReference ref = (BLangVariableReference) detail.expr;
BLangExpression detailEntryVar = createIndexBasedAccessExpr(ref.type, ref.pos,
createStringLiteral(detail.name.pos, detail.name.value),
detailTempVarDef.var.symbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
BLangAssignment detailAssignment = ASTBuilderUtil.createAssignmentStmt(ref.pos, parentBlockStmt);
detailAssignment.varRef = ref;
detailAssignment.expr = detailEntryVar;
}
if (!isIgnoredErrorRefRestVar(parentErrorVarRef)) {
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
detailTempVarDef.var.symbol);
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVarRef.restVar.pos,
extractedKeys,
parentErrorVarRef.restVar.type, parentBlockStmt);
BLangAssignment restAssignment = ASTBuilderUtil.createAssignmentStmt(parentErrorVarRef.restVar.pos,
parentBlockStmt);
restAssignment.varRef = parentErrorVarRef.restVar;
restAssignment.expr = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
filteredDetail.symbol);
}
BErrorType errorType = (BErrorType) parentErrorVarRef.type;
if (errorType.detailType.getKind() == TypeKind.RECORD) {
BRecordTypeSymbol tsymbol = (BRecordTypeSymbol) errorType.detailType.tsymbol;
tsymbol.initializerFunc = createRecordInitFunc();
tsymbol.scope.define(tsymbol.initializerFunc.funcName, tsymbol.initializerFunc.symbol);
}
}
private boolean isIgnoredErrorRefRestVar(BLangErrorVarRef parentErrorVarRef) {
if (parentErrorVarRef.restVar == null) {
return true;
}
if (parentErrorVarRef.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return (((BLangSimpleVarRef) parentErrorVarRef.restVar).variableName.value.equals(IGNORE.value));
}
return false;
}
@Override
public void visit(BLangAbort abortNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(abortNode.pos, symTable.intType, -1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangRetry retryNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(retryNode.pos, symTable.intType, 1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangContinue nextNode) {
result = nextNode;
}
@Override
public void visit(BLangBreak breakNode) {
result = breakNode;
}
@Override
public void visit(BLangReturn returnNode) {
if (returnNode.expr != null) {
returnNode.expr = rewriteExpr(returnNode.expr);
}
result = returnNode;
}
@Override
public void visit(BLangPanic panicNode) {
panicNode.expr = rewriteExpr(panicNode.expr);
result = panicNode;
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
xmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env);
result = xmlnsStmtNode;
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
BLangXMLNS generatedXMLNSNode;
xmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);
BSymbol ownerSymbol = xmlnsNode.symbol.owner;
if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
generatedXMLNSNode = new BLangLocalXMLNS();
} else {
generatedXMLNSNode = new BLangPackageXMLNS();
}
generatedXMLNSNode.namespaceURI = xmlnsNode.namespaceURI;
generatedXMLNSNode.prefix = xmlnsNode.prefix;
generatedXMLNSNode.symbol = xmlnsNode.symbol;
result = generatedXMLNSNode;
}
public void visit(BLangCompoundAssignment compoundAssignment) {
BLangVariableReference varRef = compoundAssignment.varRef;
if (compoundAssignment.varRef.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
varRef = ASTBuilderUtil.createVariableRef(compoundAssignment.varRef.pos, varRef.symbol);
varRef.lhsVar = true;
}
result = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, rewriteExpr(varRef),
rewriteExpr(compoundAssignment.modifiedExpr));
return;
}
List<BLangStatement> statements = new ArrayList<>();
List<BLangSimpleVarRef> varRefs = new ArrayList<>();
List<BType> types = new ArrayList<>();
do {
BLangSimpleVariableDef tempIndexVarDef = createVarDef("$temp" + ++indexExprCount + "$",
((BLangIndexBasedAccess) varRef).indexExpr.type, ((BLangIndexBasedAccess) varRef).indexExpr,
compoundAssignment.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(tempIndexVarDef.pos,
tempIndexVarDef.var.symbol);
statements.add(0, tempIndexVarDef);
varRefs.add(0, tempVarRef);
types.add(0, varRef.type);
varRef = (BLangVariableReference) ((BLangIndexBasedAccess) varRef).expr;
} while (varRef.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR);
BLangVariableReference var = varRef;
for (int ref = 0; ref < varRefs.size(); ref++) {
var = ASTBuilderUtil.createIndexAccessExpr(var, varRefs.get(ref));
var.type = types.get(ref);
}
var.type = compoundAssignment.varRef.type;
BLangExpression rhsExpression = ASTBuilderUtil.createBinaryExpr(compoundAssignment.pos, var,
compoundAssignment.expr, compoundAssignment.type, compoundAssignment.opKind, null);
rhsExpression.type = compoundAssignment.modifiedExpr.type;
BLangAssignment assignStmt = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, var,
rhsExpression);
statements.add(assignStmt);
BLangBlockStmt bLangBlockStmt = ASTBuilderUtil.createBlockStmt(compoundAssignment.pos, statements);
result = rewrite(bLangBlockStmt, env);
}
@Override
public void visit(BLangExpressionStmt exprStmtNode) {
exprStmtNode.expr = rewriteExpr(exprStmtNode.expr);
result = exprStmtNode;
}
@Override
public void visit(BLangIf ifNode) {
ifNode.expr = rewriteExpr(ifNode.expr);
ifNode.body = rewrite(ifNode.body, env);
ifNode.elseStmt = rewrite(ifNode.elseStmt, env);
result = ifNode;
}
@Override
public void visit(BLangMatch matchStmt) {
BLangBlockStmt matchBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
matchBlockStmt.pos = matchStmt.pos;
String matchExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable matchExprVar = ASTBuilderUtil.createVariable(matchStmt.expr.pos,
matchExprVarName, matchStmt.expr.type, matchStmt.expr, new BVarSymbol(0,
names.fromString(matchExprVarName),
this.env.scope.owner.pkgID, matchStmt.expr.type, this.env.scope.owner));
BLangSimpleVariableDef matchExprVarDef = ASTBuilderUtil.createVariableDef(matchBlockStmt.pos, matchExprVar);
matchBlockStmt.stmts.add(matchExprVarDef);
matchBlockStmt.stmts.add(generateIfElseStmt(matchStmt, matchExprVar));
rewrite(matchBlockStmt, this.env);
result = matchBlockStmt;
}
@Override
public void visit(BLangForeach foreach) {
BLangBlockStmt blockNode;
BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString("$data$"), this.env.scope.owner.pkgID,
foreach.collection.type, this.env.scope.owner);
BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(foreach.pos, "$data$",
foreach.collection.type, foreach.collection, dataSymbol);
BLangSimpleVariableDef dataVarDef = ASTBuilderUtil.createVariableDef(foreach.pos, dataVariable);
BVarSymbol collectionSymbol = dataVariable.symbol;
switch (foreach.collection.type.tag) {
case TypeTags.STRING:
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.XML:
case TypeTags.MAP:
case TypeTags.STREAM:
case TypeTags.RECORD:
BInvokableSymbol iteratorSymbol = getLangLibIteratorInvokableSymbol(collectionSymbol);
blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, true);
break;
case TypeTags.OBJECT:
iteratorSymbol = getIterableObjectIteratorInvokableSymbol(collectionSymbol);
blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, false);
break;
default:
blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.stmts.add(0, dataVarDef);
break;
}
rewrite(blockNode, this.env);
result = blockNode;
}
private BLangBlockStmt desugarForeachWithIteratorDef(BLangForeach foreach,
BLangSimpleVariableDef dataVariableDefinition,
BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVariableDef iteratorVarDef = getIteratorVariableDefinition(foreach.pos, collectionSymbol,
iteratorInvokableSymbol, isIteratorFuncFromLangLib);
BLangBlockStmt blockNode = desugarForeachToWhile(foreach, iteratorVarDef);
blockNode.stmts.add(0, dataVariableDefinition);
return blockNode;
}
public BInvokableSymbol getIterableObjectIteratorInvokableSymbol(BVarSymbol collectionSymbol) {
BObjectTypeSymbol typeSymbol = (BObjectTypeSymbol) collectionSymbol.type.tsymbol;
BAttachedFunction iteratorFunc = null;
for (BAttachedFunction func : typeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) {
iteratorFunc = func;
break;
}
}
BAttachedFunction function = iteratorFunc;
return function.symbol;
}
BInvokableSymbol getLangLibIteratorInvokableSymbol(BVarSymbol collectionSymbol) {
return (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionSymbol.type,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
}
private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol);
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
valueAccessExpr.expr = addConversionExprIfRequired(valueAccessExpr.expr,
types.getSafeType(valueAccessExpr.expr.type, true, false));
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
whileNode.body.stmts.add(1, resultAssignment);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(resultVariableDefinition);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.type = type;
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
@Override
public void visit(BLangLock lockNode) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.type = symTable.nilType;
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.type = nillableError;
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError, this.env.scope.owner);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr, nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
@Override
public void visit(BLangTransaction transactionNode) {
DiagnosticPos pos = transactionNode.pos;
BType trxReturnType = symTable.intType;
BType otherReturnType = symTable.nilType;
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangType otherReturnNode = ASTBuilderUtil.createTypeNode(otherReturnType);
DiagnosticPos invPos = transactionNode.pos;
/* transaction block code will be desugar to function which returns int. Return value determines the status of
the transaction code.
ex.
0 = successful
1 = retry
-1 = abort
Since transaction block code doesn't return anything, we need to add return statement at end of the
block unless we have abort or retry statement.
*/
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src,
invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol);
BLangStatement statement = null;
if (!transactionNode.transactionBody.stmts.isEmpty()) {
statement = transactionNode.transactionBody.stmts.get(transactionNode.transactionBody.stmts.size() - 1);
}
if (statement == null || !(statement.getKind() == NodeKind.ABORT) && !(statement.getKind() == NodeKind.ABORT)) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(returnStmtPos, trxReturnType, 0L);
transactionNode.transactionBody.addStatement(returnStmt);
}
if (transactionNode.abortedBody == null) {
transactionNode.abortedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.committedBody == null) {
transactionNode.committedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.onRetryBody == null) {
transactionNode.onRetryBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.retryCount == null) {
transactionNode.retryCount = ASTBuilderUtil.createLiteral(pos, symTable.intType, 3L);
}
BLangLambdaFunction trxMainFunc = createLambdaFunction(pos, "$anonTrxMainFunc$", Collections.emptyList(),
trxReturnNode, transactionNode.transactionBody.stmts,
env, transactionNode.transactionBody.scope);
BLangLambdaFunction trxOnRetryFunc = createLambdaFunction(pos, "$anonTrxOnRetryFunc$", Collections.emptyList(),
otherReturnNode, transactionNode.onRetryBody.stmts,
env, transactionNode.onRetryBody.scope);
BLangLambdaFunction trxCommittedFunc = createLambdaFunction(pos, "$anonTrxCommittedFunc$",
Collections.emptyList(), otherReturnNode,
transactionNode.committedBody.stmts, env,
transactionNode.committedBody.scope);
BLangLambdaFunction trxAbortedFunc = createLambdaFunction(pos, "$anonTrxAbortedFunc$", Collections.emptyList(),
otherReturnNode, transactionNode.abortedBody.stmts,
env, transactionNode.abortedBody.scope);
trxMainFunc.capturedClosureEnv = env.createClone();
trxOnRetryFunc.capturedClosureEnv = env.createClone();
trxCommittedFunc.capturedClosureEnv = env.createClone();
trxAbortedFunc.capturedClosureEnv = env.createClone();
PackageID packageID = new PackageID(Names.BALLERINA_ORG, Names.TRANSACTION_PACKAGE, Names.EMPTY);
BPackageSymbol transactionPkgSymbol = new BPackageSymbol(packageID, null, 0);
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(transactionPkgSymbol),
TRX_INITIATOR_BEGIN_FUNCTION);
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(pos, symTable.stringType,
getTransactionBlockId());
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, transactionNode.retryCount, trxMainFunc,
trxOnRetryFunc,
trxCommittedFunc, trxAbortedFunc);
BLangInvocation trxInvocation = ASTBuilderUtil.createInvocationExprMethod(pos, invokableSymbol,
requiredArgs,
Collections.emptyList(),
symResolver);
BLangExpressionStmt stmt = ASTBuilderUtil.createExpressionStmt(pos, ASTBuilderUtil.createBlockStmt(pos));
stmt.expr = trxInvocation;
result = rewrite(stmt, env);
}
private String getTransactionBlockId() {
return env.enclPkg.packageID.orgName + "$" + env.enclPkg.packageID.name + "$"
+ transactionIndex++;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.type = new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null);
return lambdaFunction;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope trxScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = trxScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
TypeNode returnType) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
func.desugared = false;
lambdaFunction.pos = pos;
return lambdaFunction;
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.type.tag == TypeTags.ARRAY && ((BArrayType) literalExpr.type).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.type = literalExpr.type;
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replaceAll(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
if (listConstructor.type.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.type));
result = rewriteExpr(expr);
} else if (getElementType(listConstructor.type).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
if (arrayLiteral.type.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.type));
return;
} else if (getElementType(arrayLiteral.type).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.type);
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
return;
}
tupleLiteral.exprs.forEach(expr -> {
BType expType = expr.impConversionExpr == null ? expr.type : expr.impConversionExpr.type;
types.setImplicitCastExpr(expr, expType, symTable.anyType);
});
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
if (groupExpr.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = groupExpr.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
result = rewriteExpr(groupExpr.expression);
}
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.type = symTable.stringType;
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.type));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
}
genVarRefExpr.type = varRefExpr.type;
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.lhsVar)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
genVarRefExpr.type = varRefExpr.symbol.type;
result = genVarRefExpr;
return;
}
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
BType targetType = genVarRefExpr.type;
genVarRefExpr.type = genVarRefExpr.symbol.type;
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = fieldAccessExpr.expr.type;
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.type, varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.pos, fieldAccessExpr.field.value);
int varRefTypeTag = varRefType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
}
} else if (types.isLax(varRefType)) {
if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) {
if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
targetVarRef = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.lhsVar = fieldAccessExpr.lhsVar;
targetVarRef.type = fieldAccessExpr.type;
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.type, symTable.errorType);
DiagnosticPos pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.type = fieldAccessType;
statementExpression.type = fieldAccessType;
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.type, symTable.nilType);
mapAccessExpr.type = xmlOrNil;
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangInvocation errorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue("error");
errorInvocation.name = name;
errorInvocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
errorInvocation.symbol = symTable.errorConstructor;
errorInvocation.type = symTable.errorType;
ArrayList<BLangExpression> errorCtorArgs = new ArrayList<>();
errorInvocation.requiredArgs = errorCtorArgs;
errorCtorArgs.add(createStringLiteral(pos, "{" + BLangConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
errorCtorArgs.add(message);
BLangSimpleVariableDef errorDef =
createVarDef("_$_invalid_key_error", symTable.errorType, errorInvocation, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangAccessExpression rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangVariableReference targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType varRefType = indexAccessExpr.expr.type;
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.type, varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.lhsVar = indexAccessExpr.lhsVar;
targetVarRef.type = indexAccessExpr.type;
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
BLangInvocation genIExpr = iExpr;
if (iExpr.symbol != null && iExpr.symbol.kind == SymbolKind.ERROR_CONSTRUCTOR) {
result = rewriteErrorConstructor(iExpr);
}
reorderArguments(iExpr);
iExpr.requiredArgs = rewriteExprs(iExpr.requiredArgs);
fixNonRestArgTypeCastInTypeParamInvocation(iExpr);
iExpr.restArgs = rewriteExprs(iExpr.restArgs);
annotationDesugar.defineStatementAnnotations(iExpr.annAttachments, iExpr.pos, iExpr.symbol.pkgID,
iExpr.symbol.owner, env);
if (iExpr.functionPointerInvocation) {
visitFunctionPointerInvocation(iExpr);
return;
}
iExpr.expr = rewriteExpr(iExpr.expr);
result = genIExpr;
if (iExpr.expr == null) {
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
if (iExpr.exprSymbol == null) {
return;
}
iExpr.expr = ASTBuilderUtil.createVariableRef(iExpr.pos, iExpr.exprSymbol);
iExpr.expr = rewriteExpr(iExpr.expr);
}
switch (iExpr.expr.type.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!iExpr.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(iExpr.requiredArgs);
argExprs.add(0, iExpr.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(iExpr.pos, argExprs, iExpr.restArgs, iExpr.symbol,
iExpr.type, iExpr.expr, iExpr.async);
attachedFunctionInvocation.actionInvocation = iExpr.actionInvocation;
attachedFunctionInvocation.name = iExpr.name;
attachedFunctionInvocation.annAttachments = iExpr.annAttachments;
result = genIExpr = attachedFunctionInvocation;
}
break;
}
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 1; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
if (iExpr.langLibInvocation || TypeParamAnalyzer.containsTypeParam(((BInvokableSymbol) iExpr.symbol).retType)) {
BType originalInvType = genIExpr.type;
genIExpr.type = ((BInvokableSymbol) genIExpr.symbol).retType;
BLangExpression expr = addConversionExprIfRequired(genIExpr, originalInvType);
if (expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
this.result = expr;
return;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.expr = genIExpr;
conversionExpr.targetType = originalInvType;
conversionExpr.type = originalInvType;
conversionExpr.pos = genIExpr.pos;
this.result = conversionExpr;
}
}
private BLangInvocation rewriteErrorConstructor(BLangInvocation iExpr) {
BLangExpression reasonExpr = iExpr.requiredArgs.get(0);
if (reasonExpr.impConversionExpr != null &&
reasonExpr.impConversionExpr.targetType.tag != TypeTags.STRING) {
reasonExpr.impConversionExpr = null;
}
reasonExpr = addConversionExprIfRequired(reasonExpr, symTable.stringType);
reasonExpr = rewriteExpr(reasonExpr);
iExpr.requiredArgs.remove(0);
iExpr.requiredArgs.add(reasonExpr);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(iExpr.pos,
((BErrorType) iExpr.symbol.type).detailType);
List<BLangExpression> namedArgs = iExpr.requiredArgs.stream()
.filter(a -> a.getKind() == NodeKind.NAMED_ARGS_EXPR)
.collect(Collectors.toList());
if (namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.type);
} else {
for (BLangExpression arg : namedArgs) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg;
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (recordLiteral.type.tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.type);
}
recordLiteral.fields.add(member);
iExpr.requiredArgs.remove(arg);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), ((BErrorType) iExpr.symbol.type).detailType);
}
iExpr.requiredArgs.add(errorDetail);
return iExpr;
}
public void visit(BLangTypeInit typeInitExpr) {
if (typeInitExpr.type.tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.type);
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol;
typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
if (typeInitExpr.initInvocation.type.tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitExpr.initInvocation;
typeInitExpr.initInvocation.name.value = Names.GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.type = objVarRef.symbol.type;
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.type,
typeInitExpr.initInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.type, null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(typeInitExpr.pos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(typeInitExpr.pos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = resultVarRef.symbol.type;
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType targetType = ((BStreamType) typeInitExpr.type).constraint;
BType errorType = ((BStreamType) typeInitExpr.type).error;
BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = targetType;
typedescExpr.type = typedescType;
BLangExpression iteratorObj = typeInitExpr.argsExpr.get(0);
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, new ArrayList<>(Lists.of(typedescExpr, iteratorObj)),
symResolver);
streamConstructInvocation.type = new BStreamType(TypeTags.STREAM, targetType, errorType, null);
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, DiagnosticPos pos) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(pos, "$" + name + "$", type, expr,
(BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(pos);
objVarDef.var = objVar;
objVarDef.type = objVar.type;
return objVarDef;
}
private BType getObjectType(BType type) {
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = symTable.errorType;
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.type, null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = ternaryExpr.type;
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.type);
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (trapExpr.expr.type.tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.type);
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
if (lhsExprTypeTag == rhsExprTypeTag) {
return;
}
if (TypeTags.isStringTypeTag(lhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) {
if (TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (TypeTags.isStringTypeTag(rhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) {
if (TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
}
}
private BLangInvocation replaceWithIntRange(DiagnosticPos pos, BLangExpression lhsExpr, BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.type = symTable.intRangeType;
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.parent == null || binaryExpr.parent.type == null) {
return;
}
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.type.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr);
matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos,
rewriteExpr(elvisExpr.rhsExpr)));
matchExpr.type = elvisExpr.type;
matchExpr.pos = elvisExpr.pos;
result = rewriteExpr(matchExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final DiagnosticPos pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == unaryExpr.type.tag) {
binaryExpr.type = symTable.byteType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.type = symTable.intType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.type = bLangArrowFunction.body.expr.type;
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.type = bLangArrowFunction.funcType;
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.type;
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.type = new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.type, null);
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
bLangArrowFunction.function = lambdaFunction.function;
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.type);
if (workerSendNode.keyExpr != null) {
workerSendNode.keyExpr = rewriteExpr(workerSendNode.keyExpr);
}
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.type);
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
if (workerReceiveNode.keyExpr != null) {
workerReceiveNode.keyExpr = rewriteExpr(workerReceiveNode.keyExpr);
}
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr);
xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr);
if (xmlAttributeAccessExpr.indexExpr != null
&& xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) {
((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true;
}
xmlAttributeAccessExpr.desugared = true;
if (xmlAttributeAccessExpr.lhsVar || xmlAttributeAccessExpr.indexExpr != null) {
result = xmlAttributeAccessExpr;
} else {
result = rewriteExpr(xmlAttributeAccessExpr);
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(DiagnosticPos pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.type = ((BInvokableType) invocationNode.symbol.type).getReturnType();
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
if (!intRangeExpression.includeStart) {
intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr);
}
if (!intRangeExpression.includeEnd) {
intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr);
}
intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr);
intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr);
result = intRangeExpression;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
addMatchExprDefaultCase(bLangMatchExpression);
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
matchTempResultVarName, bLangMatchExpression.type, null,
new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID,
bLangMatchExpression.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef =
ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar);
tempResultVarDef.desugared = true;
BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef));
List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>();
for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
pattern.expr = rewriteExpr(pattern.expr);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt));
patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody));
}
stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr,
patternClauses));
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef);
statementExpr.type = bLangMatchExpression.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
String checkedExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos,
checkedExprVarName, checkedExpr.type, null, new BVarSymbol(0,
names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExpr.type, this.env.scope.owner));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar);
checkedExprVarDef.desugared = true;
BLangMatchTypedBindingPatternClause patternSuccessCase =
getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true,
checkedExprVar.symbol, null);
BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos,
this.env.scope.owner, checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr,
new ArrayList<BLangMatchTypedBindingPatternClause>() {{
add(patternSuccessCase);
add(patternErrorCase);
}});
BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos,
new ArrayList<BLangStatement>() {{
add(checkedExprVarDef);
add(matchStmt);
}});
BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef(
checkedExpr.pos, checkedExprVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(
generatedStmtBlock, tempCheckedExprVarRef);
statementExpr.type = checkedExpr.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceTypeDefinition.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.type)) {
addConversionExprIfRequired(expr, symTable.anyType);
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.type = annotAccessExpr.type;
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type),
annotAccessExpr.type, null), null);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
BLangStatementExpression stmtExpr = queryDesugar.desugarQueryExpr(queryExpr, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangQueryAction queryAction) {
BLangStatementExpression stmtExpr = queryDesugar.desugarQueryAction(queryAction, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.type, constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(DiagnosticPos pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.type = iteratorInvokableSymbol.retType;
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(DiagnosticPos pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangAssignment getIteratorNextAssignment(DiagnosticPos pos,
BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
nextInvocation.expr.type = types.getSafeType(nextInvocation.expr.type, true, false);
return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false);
}
BLangInvocation createIteratorNextInvocation(DiagnosticPos pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.type = nextFuncSymbol.retType;
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(DiagnosticPos pos, BType varType, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, "value");
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.type = varType;
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.type;
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.type = retType;
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
DiagnosticPos pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.type, names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.type = retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType;
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.type = new BArrayType(symTable.anyType);
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangVariableReference expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.type = iExpr.symbol.type;
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
if (expr.type.tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), expr.type, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
if (expr.type.tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(), expr.type,
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatementLink link = new BLangStatementLink();
link.parent = currentLink;
currentLink = link;
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
link.statement = stmt;
stmt.statementLink = link;
currentLink = link.parent;
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(DiagnosticPos pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.intType;
return literal;
}
private BLangLiteral createByteLiteral(DiagnosticPos pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.type = targetType;
conversionExpr.targetType = targetType;
return conversionExpr;
}
private BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.workers.size() == 0 && invokableNode.symbol.type.getReturnType().isNullable()
&& (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
DiagnosticPos invPos = invokableNode.pos;
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src, invPos.eLine, invPos.eLine, invPos.sCol,
invPos.sCol);
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangExpression varargRef = null;
BLangBlockStmt blockStmt = null;
if (!iExpr.restArgs.isEmpty() &&
restArgs.get(restArgs.size() - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
iExpr.requiredArgs.size() < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgs.size() - 1)).expr;
DiagnosticPos varargExpPos = expr.pos;
BType varargVarType = expr.type;
String varargVarName = DESUGARED_VARARG_KEY + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType,
((BLangRestArgsExpression) restArgs.get(restArgs.size() - 1)).expr,
varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.type = varargVarType;
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgs.isEmpty() || restArgs.get(restArgs.size() - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.type = arrayType;
iExpr.restArgs = new ArrayList<>();
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgs.size() == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.type = firstNonRestArg.type;
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
return;
}
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation sliceInvocation =
createLangLibInvocationNode(SLICE_LANGLIB_METHOD, varargRef,
new ArrayList<BLangExpression>() {{
add(startIndex);
}},
varargRef.type, varargRef.pos);
restArgs.remove(0);
restArgs.add(addConversionExprIfRequired(sliceInvocation, invokableSymbol.restParam.type));
return;
}
BArrayType type = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.type = type;
BType elemType = type.eType;
DiagnosticPos pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgs.size() - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgs.size() - 1);
String name = DESUGARED_VARARG_KEY + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, type, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.type = type;
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, type, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.type = type;
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new HashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = varargRef.type;
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.get(param.name.value));
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.type = param.type;
args.add(expr);
} else {
BLangIndexBasedAccess memberAccessExpr =
(BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
memberAccessExpr.pos = varargRef.pos;
memberAccessExpr.expr = varargRef;
memberAccessExpr.indexExpr = rewriteExpr(createIntLiteral(varargIndex));
memberAccessExpr.type = tupleTypedVararg ? ((BTupleType) varargType).tupleTypes.get(varargIndex) :
((BArrayType) varargType).eType;
varargIndex++;
args.add(addConversionExprIfRequired(memberAccessExpr, param.type));
}
}
iExpr.requiredArgs = args;
}
private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(
DiagnosticPos pos, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType;
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<BType>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(pos,
patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0,
names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternFailureCaseVar.symbol);
BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode();
patternBlockFailureCase.pos = pos;
if (!isCheckPanicExpr && returnOnError) {
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pos;
returnStmt.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(returnStmt);
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(panicNode);
}
return ASTBuilderUtil.createMatchStatementPattern(pos, patternFailureCaseVar, patternBlockFailureCase);
}
private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(DiagnosticPos pos, BType lhsType,
boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) {
String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match";
BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(pos,
patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0,
names.fromString(patternSuccessCaseVarName),
this.env.scope.owner.pkgID, lhsType, this.env.scope.owner));
BLangExpression varRefExpr;
if (isVarDef) {
varRefExpr = ASTBuilderUtil.createVariableRef(pos, varSymbol);
} else {
varRefExpr = lhsExpr;
}
BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternSuccessCaseVar.symbol);
BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(pos,
varRefExpr, patternSuccessCaseVarRef, false);
BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(pos,
new ArrayList<BLangStatement>() {{
add(assignmentStmtSuccessCase);
}});
return ASTBuilderUtil.createMatchStatementPattern(pos,
patternSuccessCaseVar, patternBlockSuccessCase);
}
private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) {
List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses;
BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar);
BLangIf currentIfNode = parentIfNode;
for (int i = 1; i < patterns.size(); i++) {
BLangMatchBindingPatternClause patternClause = patterns.get(i);
if (i == patterns.size() - 1 && patternClause.isLastPattern) {
currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar);
} else {
currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar);
currentIfNode = (BLangIf) currentIfNode.elseStmt;
}
}
return parentIfNode;
}
/**
* Generate an if-else statement from the given match statement.
*
* @param pattern match pattern statement node
* @param matchExprVar variable node of the match expression
* @return if else statement node
*/
private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) {
BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol);
if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) {
BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar);
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null);
}
BType expectedType = matchExprVar.type;
if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern;
expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType);
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol);
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
if (structuredPattern.typeGuardExpr != null) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos);
blockStmt.addStatement(varDef);
blockStmt.addStatement(varDefStmt);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt,
structuredPattern.typeGuardExpr);
stmtExpr.type = symTable.booleanType;
ifCondition = ASTBuilderUtil
.createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.AND, symTable.booleanType,
symTable.booleanType));
} else {
structuredPattern.body.stmts.add(0, varDef);
structuredPattern.body.stmts.add(1, varDefStmt);
}
}
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null);
}
private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body;
BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern;
if (patternClause.variable.name.value.equals(Names.IGNORE.value)) {
return patternClause.body;
}
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos,
matchExprVar.symbol);
BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.type);
BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "",
patternClause.variable.type, patternVarExpr, patternClause.variable.symbol);
BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar);
patternClause.body.stmts.add(0, patternVarDef);
body = patternClause.body;
return body;
}
private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body = pattern.body;
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol);
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
structuredPattern.body.stmts.add(0, varDefStmt);
body = structuredPattern.body;
}
return body;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.type;
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
return expr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.type = lhsType;
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
return conversionExpr;
}
private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol) {
BType patternType;
switch (patternClause.getKind()) {
case MATCH_STATIC_PATTERN_CLAUSE:
BLangMatchStaticBindingPatternClause staticPattern =
(BLangMatchStaticBindingPatternClause) patternClause;
patternType = staticPattern.literal.type;
break;
case MATCH_STRUCTURED_PATTERN_CLAUSE:
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) patternClause;
patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable);
break;
default:
BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause;
patternType = simplePattern.variable.type;
break;
}
BLangExpression binaryExpr;
BType[] memberTypes;
if (patternType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) patternType;
memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
} else {
memberTypes = new BType[1];
memberTypes[0] = patternType;
}
if (memberTypes.length == 1) {
binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
} else {
BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]);
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
for (int i = 2; i < memberTypes.length; i++) {
lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]);
rhsExpr = binaryExpr;
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
}
}
return binaryExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
List<BField> fields = new ArrayList<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName,
env.enclPkg.symbol.pkgID, fieldType, recordSymbol);
fields.add(new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BMapType) ((BLangSimpleVariable) recordVariable.restParam).type).constraint :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorCount++),
env.enclPkg.symbol.pkgID,
null, null);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol,
((BErrorType) errorVariable.type).reasonType,
detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.type;
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(
Flags.PUBLIC,
names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID,
symTable.pureType,
null);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.type,
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorNo + "$detailType"),
env.enclPkg.symbol.pkgID, null, null);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(
Flags.PUBLIC, fieldName, detailRecordTypeSymbol.pkgID, fieldType, detailRecordTypeSymbol);
detailRecordType.fields.add(new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordTypeSymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = errorType;
return errorTypeNode;
}
private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol, BType patternType) {
DiagnosticPos pos = patternClause.pos;
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) {
BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause;
return createBinaryExpression(pos, varRef, pattern.literal);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) {
return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType);
}
if (patternType == symTable.nilType) {
BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null);
return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType,
OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL,
symTable.anyType, symTable.nilType));
} else {
return createIsAssignableExpression(pos, varSymbol, patternType);
}
}
private BLangExpression createBinaryExpression(DiagnosticPos pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.type = symTable.anyType;
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.type, expression.type);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.type,
binaryExpr);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsAssignableExpr createIsAssignableExpression(DiagnosticPos pos,
BVarSymbol varSymbol,
BType patternType) {
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names);
}
private BLangIsLikeExpr createIsLikeExpression(DiagnosticPos pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol symbol) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(variable.pos, symbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, variable.name);
fieldAccess.symbol = variable.symbol;
fieldAccess.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes;
List<BType> unmatchedTypes = new ArrayList<>();
if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type;
exprTypes = new ArrayList<>(unionType.getMemberTypes());
} else {
exprTypes = Lists.of(bLangMatchExpression.type);
}
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
if (this.types.isAssignable(type, pattern.variable.type)) {
assignable = true;
break;
}
}
if (!assignable) {
unmatchedTypes.add(type);
}
}
if (unmatchedTypes.isEmpty()) {
return;
}
BType defaultPatternType;
if (unmatchedTypes.size() == 1) {
defaultPatternType = unmatchedTypes.get(0);
} else {
defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes));
}
String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default";
BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName),
this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner));
BLangMatchExprPatternClause defaultPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
defaultPattern.variable = patternMatchCaseVar;
defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol);
defaultPattern.pos = bLangMatchExpression.pos;
bLangMatchExpression.patternClauses.add(defaultPattern);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.lhsVar || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.type;
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName,
accessExpr.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.type, tempResultVar);
BLangMatch matcEXpr = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.type = originalExprType;
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successPattern = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = accessExpr.originalType;
if (TypeTags.isXMLTypeTag(originalType.tag)) {
accessExpr.type = BUnionType.create(null, originalType, symTable.errorType);
} else {
accessExpr.type = originalType;
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.type);
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>());
if (accessExpr.nilSafeNavigation) {
matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar));
matchStmt.type = type;
}
if (accessExpr.errorSafeNavigation) {
matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar));
matchStmt.type = type;
matchStmt.pos = accessExpr.pos;
}
BLangMatchTypedBindingPatternClause successPattern =
getSuccessPattern(accessExpr, tempResultVar, accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
this.matchStmtStack.push(matchStmt);
if (this.successPattern != null) {
this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt));
}
this.successPattern = successPattern;
}
private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName,
symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, errorPatternVar, patternBody);
return errorPattern;
}
private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(DiagnosticPos pos,
BLangExpression expr) {
String nullPatternVarName = IGNORE.toString();
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType,
null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangMatchExprPatternClause nullPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
nullPattern.variable = errorPatternVar;
nullPattern.expr = expr;
nullPattern.pos = pos;
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName,
symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, nullPatternVar, patternBody);
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getSuccessPattern(BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
BType type = types.getSafeType(accessExpr.expr.type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
BVarSymbol successPatternSymbol;
if (type.tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
} else {
successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
accessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol);
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
if (TypeTags.isXMLTypeTag(accessExpr.expr.type.tag)) {
accessExpr.type = BUnionType.create(null, accessExpr.originalType, symTable.errorType, symTable.nilType);
} else {
accessExpr.type = accessExpr.originalType;
}
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(accessExpr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause successPattern =
ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody);
this.safeNavigationAssignment = assignmentStmt;
return successPattern;
}
private boolean safeNavigateLHS(BLangExpression expr) {
if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
return false;
}
BLangExpression varRef = ((BLangAccessExpression) expr).expr;
if (varRef.type.isNullable()) {
return true;
}
return safeNavigateLHS(varRef);
}
private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr,
boolean safeAssignment) {
this.accessExprStack = new Stack<>();
List<BLangStatement> stmts = new ArrayList<>();
createLHSSafeNavigation(stmts, accessExpr.expr);
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos,
cloneExpression(accessExpr), rhsExpr);
stmts.add(assignment);
return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts);
}
private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) {
NodeKind kind = expr.getKind();
boolean root = false;
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
BLangAccessExpression accessExpr = (BLangAccessExpression) expr;
createLHSSafeNavigation(stmts, accessExpr.expr);
accessExpr.expr = accessExprStack.pop();
} else {
root = true;
}
if (expr.getKind() == NodeKind.INVOCATION) {
BLangInvocation invocation = (BLangInvocation) expr;
BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value
+ "i_intermediate"), this.env.scope.owner.pkgID, invocation.type, this.env.scope.owner);
BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos,
interMediateSymbol.name.value, invocation.type, invocation, interMediateSymbol);
BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos,
intermediateVariable);
stmts.add(intermediateVariableDefinition);
expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol);
}
if (expr.type.isNullable()) {
BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode());
isNillTest.type = symTable.booleanType;
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos);
expr = cloneExpression(expr);
expr.type = types.getSafeType(expr.type, true, false);
if (isDefaultableMappingType(expr.type) && !root) {
BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
jsonLiteral.type = expr.type;
jsonLiteral.pos = expr.pos;
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos,
expr, jsonLiteral);
thenStmt.addStatement(assignment);
} else {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = ERROR_REASON_NULL_REFERENCE_ERROR;
literal.type = symTable.stringType;
BLangInvocation errorCtorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
errorCtorInvocation.pos = expr.pos;
errorCtorInvocation.argExprs.add(literal);
errorCtorInvocation.requiredArgs.add(literal);
errorCtorInvocation.type = symTable.errorType;
errorCtorInvocation.symbol = symTable.errorConstructor;
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.expr = errorCtorInvocation;
panicNode.pos = expr.pos;
thenStmt.addStatement(panicNode);
}
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null);
stmts.add(ifelse);
}
accessExprStack.push(expr);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.type = symTable.nilType;
return nillTypeNode;
}
private BLangVariableReference cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case INVOCATION:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangVariableReference varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.type = types.getSafeType(originalAccessExpr.expr.type, true, false);
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
case INVOCATION:
accessExpr = null;
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.lhsVar = originalAccessExpr.lhsVar;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.type = originalAccessExpr.originalType;
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.booleanType;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
default:
return false;
}
}
private BLangFunction createInitFunctionForObjectType(BLangObjectTypeNode structureTypeNode, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(structureTypeNode, env,
Names.GENERATED_INIT_SUFFIX, names, symTable);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) structureTypeNode.type.tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(Names.GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.type);
structureTypeNode.generatedInitFunction = initFunction;
initFunction.returnTypeNode.type = symTable.nilType;
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.type, null, binaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = binaryExpr.type;
result = rewriteExpr(stmtExpr);
}
/**
* Split packahe init function into several smaller functions.
*
* @param packageNode package node
* @param env symbol environment
* @return initial init function but trimmed in size
*/
private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) {
int methodSize = INIT_METHOD_SPLIT_SIZE;
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body;
if (funcBody.stmts.size() < methodSize || !isJvmTarget) {
return packageNode.initFunction;
}
BLangFunction initFunction = packageNode.initFunction;
List<BLangFunction> generatedFunctions = new ArrayList<>();
List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts);
funcBody.stmts.clear();
BLangFunction newFunc = initFunction;
BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body;
int varDefIndex = 0;
for (int i = 0; i < stmts.size(); i++) {
if (stmts.get(i).getKind() == NodeKind.VARIABLE_DEF) {
break;
}
varDefIndex++;
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
List<BLangStatement> chunkStmts = new ArrayList<>();
for (int i = varDefIndex; i < stmts.size(); i++) {
BLangStatement stmt = stmts.get(i);
chunkStmts.add(stmt);
varDefIndex++;
if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) &&
(newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) {
if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.addAll(chunkStmts);
chunkStmts.clear();
} else if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) &&
Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags,
Flags.LISTENER)
) {
break;
}
}
newFuncBody.stmts.addAll(chunkStmts);
for (int i = varDefIndex; i < stmts.size(); i++) {
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
generatedFunctions.add(newFunc);
for (int j = 0; j < generatedFunctions.size() - 1; j++) {
BLangFunction thisFunction = generatedFunctions.get(j);
BLangCheckedExpr checkedExpr =
ASTBuilderUtil.createCheckExpr(initFunction.pos,
createInvocationNode(generatedFunctions.get(j + 1).name.value,
new ArrayList<>(), symTable.errorOrNilType),
symTable.nilType);
checkedExpr.equivalentErrorTypeList.add(symTable.errorType);
BLangExpressionStmt expressionStmt = ASTBuilderUtil
.createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body);
expressionStmt.expr = checkedExpr;
expressionStmt.expr.pos = initFunction.pos;
if (j > 0) {
thisFunction = rewrite(thisFunction, env);
packageNode.functions.add(thisFunction);
packageNode.topLevelNodes.add(thisFunction);
}
}
if (generatedFunctions.size() > 1) {
BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1);
lastFunc = rewrite(lastFunc, env);
packageNode.functions.add(lastFunc);
packageNode.topLevelNodes.add(lastFunc);
}
return generatedFunctions.get(0);
}
/**
* Create an intermediate package init function.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
BLangFunction initFunction = ASTBuilderUtil
.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
new Name(Names.INIT_FUNCTION_SUFFIX.value
+ this.initFuncIndex++), symTable);
createInvokableSymbol(initFunction, env);
return initFunction;
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.type;
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangStatementExpression rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.type;
DiagnosticPos pos = mappingConstructorExpr.pos;
BLangRecordLiteral recordLiteral = type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type) :
new BLangMapLiteral(pos, type);
String name = DESUGARED_MAPPING_CONSTR_KEY + this.annonVarCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner);
BLangSimpleVariable var = createVariable(pos, name, type, recordLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.type = type;
BLangBlockStmt blockStmt = createBlockStmt(pos);
blockStmt.stmts.add(varDef);
BLangSimpleVarRef mappingVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression keyExpr = key.expr;
BLangExpression indexExpr = key.computedKey ? keyExpr :
keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
createStringLiteral(pos, ((BLangSimpleVarRef) keyExpr).variableName.value) :
((BLangLiteral) keyExpr);;
addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef, indexExpr, keyValueField.valueExpr);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef,
createStringLiteral(pos, varRefField.variableName.value),
varRefField);
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = generateMapEntriesInvocation(spreadOpField.expr, spreadOpField.expr.type);
types.setForeachTypedBindingPatternType(foreach);
BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type,
this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBodyBlock = ASTBuilderUtil.createBlockStmt(pos);
BTupleType foreachVarRefType = (BTupleType) foreachVarRef.type;
BLangIndexBasedAccess indexExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexExpr.pos = pos;
indexExpr.expr = foreachVarRef;
indexExpr.indexExpr = rewriteExpr(createIntLiteral(0));
indexExpr.type = foreachVarRefType.tupleTypes.get(0);
BLangIndexBasedAccess valueExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
valueExpr.pos = pos;
valueExpr.expr = foreachVarRef;
valueExpr.indexExpr = rewriteExpr(createIntLiteral(1));
valueExpr.type = foreachVarRefType.tupleTypes.get(1);
addMemberStoreForKeyValuePair(pos, foreachBodyBlock, mappingVarRef, indexExpr, valueExpr);
foreach.body = foreachBodyBlock;
blockStmt.addStatement(foreach);
}
}
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, mappingVarRef);
stmtExpression.type = type;
return stmtExpression;
}
private void addMemberStoreForKeyValuePair(DiagnosticPos pos, BLangBlockStmt blockStmt,
BLangExpression mappingVarRef, BLangExpression indexExpr,
BLangExpression value) {
BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt);
assignmentStmt.expr = rewriteExpr(value);
BLangIndexBasedAccess indexAccessNode = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexAccessNode.pos = pos;
indexAccessNode.expr = mappingVarRef;
indexAccessNode.indexExpr = rewriteExpr(indexExpr);
indexAccessNode.type = value.type;
assignmentStmt.varRef = indexAccessNode;
}
private Map<String, BLangExpression> getKeyValuePairs(BLangStatementExpression desugaredMappingConst) {
List<BLangStatement> stmts = ((BLangBlockStmt) desugaredMappingConst.stmt).stmts;
Map<String, BLangExpression> keyValuePairs = new HashMap<>();
for (int i = 1; i < stmts.size(); i++) {
BLangAssignment assignmentStmt = (BLangAssignment) stmts.get(i);
BLangExpression indexExpr = ((BLangIndexBasedAccess) assignmentStmt.varRef).indexExpr;
if (indexExpr.getKind() != NodeKind.LITERAL) {
continue;
}
keyValuePairs.put((String) ((BLangLiteral) indexExpr).value, assignmentStmt.expr);
}
return keyValuePairs;
}
} | List<BLangExpression> exprs = new ArrayList<>(); | public void visit(BLangFunction funcNode) {
SymbolEnv funcEnv = SymbolEnv.createFunctionEnv(funcNode, funcNode.symbol.scope, env);
if (!funcNode.interfaceFunction) {
addReturnIfNotPresent(funcNode);
}
funcNode.originalFuncSymbol = funcNode.symbol;
funcNode.symbol = ASTBuilderUtil.duplicateInvokableSymbol(funcNode.symbol);
funcNode.requiredParams = rewrite(funcNode.requiredParams, funcEnv);
funcNode.restParam = rewrite(funcNode.restParam, funcEnv);
funcNode.workers = rewrite(funcNode.workers, funcEnv);
if (funcNode.returnTypeNode != null && funcNode.returnTypeNode.getKind() != null) {
funcNode.returnTypeNode = rewrite(funcNode.returnTypeNode, funcEnv);
}
List<BLangAnnotationAttachment> participantAnnotation
= funcNode.annAttachments.stream()
.filter(a -> Transactions.isTransactionsAnnotation(a.pkgAlias.value,
a.annotationName.value))
.collect(Collectors.toList());
funcNode.body = rewrite(funcNode.body, funcEnv);
funcNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
if (funcNode.returnTypeNode != null) {
funcNode.returnTypeAnnAttachments.forEach(attachment -> rewrite(attachment, env));
}
if (participantAnnotation.isEmpty()) {
result = funcNode;
return;
}
result = desugarParticipantFunction(funcNode, participantAnnotation);
}
private BLangFunction desugarParticipantFunction(BLangFunction funcNode,
List<BLangAnnotationAttachment> participantAnnotation) {
BLangAnnotationAttachment annotation = participantAnnotation.get(0);
BLangBlockFunctionBody onCommitBody = null;
BLangBlockFunctionBody onAbortBody = null;
funcNode.requiredParams.forEach(bLangSimpleVariable -> bLangSimpleVariable.symbol.closure = true);
if (funcNode.receiver != null) {
funcNode.receiver.symbol.closure = true;
}
BType trxReturnType = BUnionType.create(null, symTable.errorType, symTable.anyType);
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangLambdaFunction commitFunc = createLambdaFunction(funcNode.pos, "$anonOnCommitFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangLambdaFunction abortFunc = createLambdaFunction(funcNode.pos, "$anonOnAbortFunc$",
ASTBuilderUtil.createTypeNode(symTable.nilType));
BLangSimpleVariable onCommitTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, commitFunc.function.symbol));
BLangSimpleVariable onAbortTrxVar = ASTBuilderUtil
.createVariable(funcNode.pos, "$trxId$0", symTable.stringType, null,
new BVarSymbol(0, names.fromString("$trxId$0"), this.env.scope.owner.pkgID,
symTable.stringType, abortFunc.function.symbol));
BLangSimpleVarRef trxIdOnCommitRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onCommitTrxVar.symbol);
BLangSimpleVarRef trxIdOnAbortRef = ASTBuilderUtil.createVariableRef(funcNode.pos, onAbortTrxVar.symbol);
for (Map.Entry<String, BLangExpression> entry :
getKeyValuePairs((BLangStatementExpression) annotation.expr).entrySet()) {
switch (entry.getKey()) {
case Transactions.TRX_ONCOMMIT_FUNC:
BInvokableSymbol commitSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol;
BLangInvocation onCommit = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, commitSym, Lists.of(trxIdOnCommitRef),
Collections.emptyList(), symResolver);
BLangStatement onCommitStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommit);
onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onCommitStmt));
break;
case Transactions.TRX_ONABORT_FUNC:
BInvokableSymbol abortSym = (BInvokableSymbol) ((BLangSimpleVarRef) entry.getValue()).symbol;
BLangInvocation onAbort = ASTBuilderUtil
.createInvocationExprMethod(funcNode.pos, abortSym, Lists.of(trxIdOnAbortRef),
Collections.emptyList(), symResolver);
BLangStatement onAbortStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbort);
onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(onAbortStmt));
break;
}
}
if (onCommitBody == null) {
onCommitBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onCommitBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
if (onAbortBody == null) {
onAbortBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, onAbortBody);
returnStmt.expr = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.nilType, Names.NIL_VALUE);
}
commitFunc.function.body = onCommitBody;
commitFunc.function.requiredParams.add(onCommitTrxVar);
commitFunc.type = new BInvokableType(Lists.of(onCommitTrxVar.symbol.type),
commitFunc.function.symbol.type.getReturnType(), null);
commitFunc.function.symbol.type = commitFunc.type;
commitFunc.function.symbol.params = Lists.of(onCommitTrxVar.symbol);
abortFunc.function.body = onAbortBody;
abortFunc.function.requiredParams.add(onAbortTrxVar);
abortFunc.type = new BInvokableType(Lists.of(onAbortTrxVar.symbol.type),
abortFunc.function.symbol.type.getReturnType(), null);
abortFunc.function.symbol.type = abortFunc.type;
abortFunc.function.symbol.params = Lists.of(onAbortTrxVar.symbol);
BSymbol trxModSym = env.enclPkg.imports
.stream()
.filter(importPackage -> importPackage.symbol.
pkgID.toString().equals(Names.TRANSACTION_ORG.value + Names.ORG_NAME_SEPARATOR.value
+ Names.TRANSACTION_PACKAGE.value))
.findAny().get().symbol;
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(trxModSym),
getParticipantFunctionName(funcNode));
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(funcNode.pos, symTable.stringType,
getTransactionBlockId());
BLangLambdaFunction trxMainWrapperFunc = createLambdaFunction(funcNode.pos, "$anonTrxWrapperFunc$",
Collections.emptyList(),
funcNode.returnTypeNode,
funcNode.body);
for (BLangSimpleVariable var : funcNode.requiredParams) {
trxMainWrapperFunc.function.closureVarSymbols.add(new ClosureVarSymbol(var.symbol, var.pos));
}
BLangBlockFunctionBody trxMainBody = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos);
BLangLambdaFunction trxMainFunc
= createLambdaFunction(funcNode.pos, "$anonTrxParticipantFunc$", Collections.emptyList(),
trxReturnNode, trxMainBody);
trxMainWrapperFunc.capturedClosureEnv = trxMainFunc.function.clonedEnv;
commitFunc.capturedClosureEnv = env.createClone();
abortFunc.capturedClosureEnv = env.createClone();
BVarSymbol wrapperSym = new BVarSymbol(0, names.fromString("$wrapper$1"), this.env.scope.owner.pkgID,
trxMainWrapperFunc.type, trxMainFunc.function.symbol);
BLangSimpleVariable wrapperFuncVar = ASTBuilderUtil.createVariable(funcNode.pos, "$wrapper$1",
trxMainWrapperFunc.type, trxMainWrapperFunc,
wrapperSym);
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(funcNode.pos, trxMainBody);
variableDef.var = wrapperFuncVar;
BLangSimpleVarRef wrapperVarRef = rewrite(ASTBuilderUtil.createVariableRef(variableDef.pos,
wrapperFuncVar.symbol), env);
BLangInvocation wrapperInvocation = new BFunctionPointerInvocation(trxMainWrapperFunc.pos, wrapperVarRef,
wrapperFuncVar.symbol,
trxMainWrapperFunc.function.symbol.retType);
BLangReturn wrapperReturn = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(wrapperInvocation, trxReturnNode.type));
trxMainWrapperFunc.function.receiver = funcNode.receiver;
trxMainFunc.function.receiver = funcNode.receiver;
trxMainBody.stmts.add(wrapperReturn);
rewrite(trxMainFunc.function, env);
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, trxMainFunc, commitFunc, abortFunc);
BLangInvocation participantInvocation
= ASTBuilderUtil.createInvocationExprMethod(funcNode.pos, invokableSymbol, requiredArgs,
Collections.emptyList(), symResolver);
participantInvocation.type = ((BInvokableType) invokableSymbol.type).retType;
BLangStatement stmt = ASTBuilderUtil.createReturnStmt(funcNode.pos, addConversionExprIfRequired
(participantInvocation, funcNode.symbol.retType));
funcNode.body = ASTBuilderUtil.createBlockFunctionBody(funcNode.pos, Lists.of(rewrite(stmt, env)));
return funcNode;
}
private Name getParticipantFunctionName(BLangFunction function) {
if (Symbols.isFlagOn((function).symbol.flags, Flags.RESOURCE)) {
return TRX_REMOTE_PARTICIPANT_BEGIN_FUNCTION;
}
return TRX_LOCAL_PARTICIPANT_BEGIN_FUNCTION;
}
@Override
public void visit(BLangResource resourceNode) {
}
public void visit(BLangAnnotation annotationNode) {
annotationNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
}
public void visit(BLangAnnotationAttachment annAttachmentNode) {
annAttachmentNode.expr = rewrite(annAttachmentNode.expr, env);
result = annAttachmentNode;
}
@Override
public void visit(BLangSimpleVariable varNode) {
if (((varNode.symbol.owner.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE)
&& (varNode.symbol.owner.tag & SymTag.LET) != SymTag.LET) {
varNode.expr = null;
result = varNode;
return;
}
if (varNode.typeNode != null && varNode.typeNode.getKind() != null) {
varNode.typeNode = rewrite(varNode.typeNode, env);
}
BLangExpression bLangExpression = rewriteExpr(varNode.expr);
if (bLangExpression != null) {
bLangExpression = addConversionExprIfRequired(bLangExpression, varNode.type);
}
varNode.expr = bLangExpression;
varNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = varNode;
}
@Override
public void visit(BLangLetExpression letExpression) {
SymbolEnv prevEnv = this.env;
this.env = letExpression.env;
BLangExpression expr = letExpression.expr;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(letExpression.pos);
for (BLangLetVariable letVariable : letExpression.letVarDeclarations) {
BLangNode node = rewrite((BLangNode) letVariable.definitionNode, env);
if (node.getKind() == NodeKind.BLOCK) {
blockStmt.stmts.addAll(((BLangBlockStmt) node).stmts);
} else {
blockStmt.addStatement((BLangSimpleVariableDef) node);
}
}
BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$let_var_%d_$", letCount++),
expr.type, expr, expr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.type = expr.type;
result = rewrite(stmtExpr, env);
this.env = prevEnv;
}
@Override
public void visit(BLangTupleVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangRecordVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangErrorVariable varNode) {
result = varNode;
}
@Override
public void visit(BLangBlockStmt block) {
SymbolEnv blockEnv = SymbolEnv.createBlockEnv(block, env);
block.stmts = rewriteStmt(block.stmts, blockEnv);
result = block;
}
@Override
public void visit(BLangSimpleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
result = varDefNode;
}
@Override
public void visit(BLangTupleVariableDef varDefNode) {
varDefNode.var = rewrite(varDefNode.var, env);
BLangTupleVariable tupleVariable = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(varDefNode.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = tuple;
createVarDefStmts(tupleVariable, blockStmt, tuple.symbol, null);
createRestFieldVarDefStmts(tupleVariable, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
final BLangSimpleVariable arrayVar = (BLangSimpleVariable) parentTupleVariable.restVariable;
boolean isTupleType = parentTupleVariable.type.tag == TypeTags.TUPLE;
DiagnosticPos pos = blockStmt.pos;
if (arrayVar != null) {
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = arrayVar.type;
arrayVar.expr = arrayExpr;
BLangSimpleVariableDef arrayVarDef = ASTBuilderUtil.createVariableDefStmt(arrayVar.pos, blockStmt);
arrayVarDef.var = arrayVar;
BLangExpression tupleExpr = parentTupleVariable.expr;
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, arrayVar.symbol);
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) (isTupleType ? ((BTupleType) parentTupleVariable.type).tupleTypes.size()
: parentTupleVariable.memberVariables.size());
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(arrayVarRef,
createLengthInvocation(pos, arrayVarRef));
indexAccessExpr.type = (isTupleType ? ((BTupleType) parentTupleVariable.type).restType : symTable.anyType);
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
@Override
public void visit(BLangRecordVariableDef varDefNode) {
BLangRecordVariable varNode = varDefNode.var;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(varDefNode.pos, "$map$0", runTimeType,
null, new BVarSymbol(0, names.fromString("$map$0"), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = varDefNode.var.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = mapVariable;
createVarDefStmts(varNode, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorVariableDef varDefNode) {
BLangErrorVariable errorVariable = varDefNode.errorVariable;
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(varDefNode.pos);
BVarSymbol errorVarSymbol = new BVarSymbol(0, names.fromString("$error$"),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner);
final BLangSimpleVariable error = ASTBuilderUtil.createVariable(varDefNode.pos, errorVarSymbol.name.value,
symTable.errorType, null, errorVarSymbol);
error.expr = errorVariable.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(varDefNode.pos, blockStmt);
variableDef.var = error;
createVarDefStmts(errorVariable, blockStmt, error.symbol, null);
result = rewrite(blockStmt, env);
}
/**
* This method iterate through each member of the tupleVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* ((string, float) int)) ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var def creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarDefStmts(BLangTupleVariable parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangVariable> memberVars = parentTupleVariable.memberVariables;
for (int index = 0; index < memberVars.size(); index++) {
BLangVariable variable = memberVars.get(index);
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.intType, (long) index);
if (NodeKind.VARIABLE == variable.getKind()) {
createSimpleVarDefStmt((BLangSimpleVariable) variable, parentBlockStmt, indexExpr, tupleVarSymbol,
parentIndexAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) variable;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BType accessedElemType = symTable.errorType;
if (tupleVarSymbol.type.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) tupleVarSymbol.type;
accessedElemType = arrayType.eType;
}
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, accessedElemType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, tupleVarSymbol, arrayAccessExpr);
}
}
}
/**
* Overloaded method to handle record variables.
* This method iterate through each member of the recordVar and create the relevant var def statements. This method
* does the check for node kind of each member and call the related var def creation method.
*
* Example:
* type Foo record {
* string name;
* (int, string) age;
* Address address;
* };
*
* Foo {name: a, age: (b, c), address: d} = {record literal}
*
* a is a simple var, so a simple var def will be created.
*
* (b, c) is a tuple, so it is a recursive var def creation.
*
* d is a record, so it is a recursive var def creation.
*
*/
private void createVarDefStmts(BLangRecordVariable parentRecordVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
List<BLangRecordVariableKeyValue> variableList = parentRecordVariable.variableList;
for (BLangRecordVariableKeyValue recordFieldKeyValue : variableList) {
BLangVariable variable = recordFieldKeyValue.valueBindingPattern;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variable.pos, symTable.stringType,
recordFieldKeyValue.key.value);
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
createSimpleVarDefStmt((BLangSimpleVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) recordFieldKeyValue.valueBindingPattern;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
new BArrayType(symTable.anyType), recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangTupleVariable) recordFieldKeyValue.valueBindingPattern,
parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (recordFieldKeyValue.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangRecordVariable) recordFieldKeyValue.valueBindingPattern, parentBlockStmt,
recordVarSymbol, arrayAccessExpr);
continue;
}
if (variable.getKind() == NodeKind.ERROR_VARIABLE) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVariable.pos, variable.type, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarDefStmts((BLangErrorVariable) variable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
}
}
if (parentRecordVariable.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangVariable) parentRecordVariable.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1",
parentIndexAccessExpr.type, null, new BVarSymbol(0, names.fromString("$map$1"),
this.env.scope.owner.pkgID, parentIndexAccessExpr.type, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
List<String> keysToRemove = parentRecordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangSimpleVariable restParam = (BLangSimpleVariable) parentRecordVariable.restParam;
BLangSimpleVariableDef restParamVarDef = ASTBuilderUtil.createVariableDefStmt(pos,
parentBlockStmt);
restParamVarDef.var = restParam;
restParamVarDef.var.type = restParamType;
restParam.expr = varRef;
}
}
/**
* This method will create the relevant var def statements for reason and details of the error variable.
* The var def statements are created by creating the reason() and detail() builtin methods.
*/
private void createVarDefStmts(BLangErrorVariable parentErrorVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVariableSymbol, BLangIndexBasedAccess parentIndexBasedAccess) {
BVarSymbol convertedErrorVarSymbol;
if (parentIndexBasedAccess != null) {
BType prevType = parentIndexBasedAccess.type;
parentIndexBasedAccess.type = symTable.anyType;
BLangSimpleVariableDef errorVarDef = createVarDef("$error$" + errorCount++,
symTable.errorType,
addConversionExprIfRequired(parentIndexBasedAccess, symTable.errorType),
parentErrorVariable.pos);
parentIndexBasedAccess.type = prevType;
parentBlockStmt.addStatement(errorVarDef);
convertedErrorVarSymbol = errorVarDef.var.symbol;
} else {
convertedErrorVarSymbol = errorVariableSymbol;
}
parentErrorVariable.reason.expr = generateErrorReasonBuiltinFunction(parentErrorVariable.reason.pos,
parentErrorVariable.reason.type, convertedErrorVarSymbol, null);
if (names.fromIdNode((parentErrorVariable.reason).name) == Names.IGNORE) {
parentErrorVariable.reason = null;
} else {
BLangSimpleVariableDef reasonVariableDef =
ASTBuilderUtil.createVariableDefStmt(parentErrorVariable.reason.pos, parentBlockStmt);
reasonVariableDef.var = parentErrorVariable.reason;
}
if ((parentErrorVariable.detail == null || parentErrorVariable.detail.isEmpty())
&& parentErrorVariable.restDetail == null) {
return;
}
BType detailMapType;
BType detailType = ((BErrorType) parentErrorVariable.type).detailType;
if (detailType.tag == TypeTags.MAP) {
detailMapType = detailType;
} else {
detailMapType = symTable.detailType;
}
parentErrorVariable.detailExpr = generateErrorDetailBuiltinFunction(
parentErrorVariable.pos,
convertedErrorVarSymbol, null);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail",
parentErrorVariable.detailExpr.type, parentErrorVariable.detailExpr, parentErrorVariable.pos);
detailTempVarDef.type = parentErrorVariable.detailExpr.type;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : parentErrorVariable.detail) {
BLangExpression detailEntryVar = createErrorDetailVar(detailEntry, detailTempVarDef.var.symbol);
createAndAddBoundVariableDef(parentBlockStmt, detailEntry, detailEntryVar);
}
if (parentErrorVariable.restDetail != null && !parentErrorVariable.restDetail.name.value.equals(IGNORE.value)) {
DiagnosticPos pos = parentErrorVariable.restDetail.pos;
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(
pos, detailTempVarDef.var.symbol);
List<String> keysToRemove = parentErrorVariable.detail.stream()
.map(detail -> detail.key.getValue())
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVariable.pos, keysToRemove,
parentErrorVariable.restDetail.type, parentBlockStmt);
BLangSimpleVariableDef variableDefStmt = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDefStmt.var = ASTBuilderUtil.createVariable(pos,
parentErrorVariable.restDetail.name.value,
filteredDetail.type,
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol),
parentErrorVariable.restDetail.symbol);
BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos,
ASTBuilderUtil.createVariableRef(pos, parentErrorVariable.restDetail.symbol),
ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol));
parentBlockStmt.addStatement(assignmentStmt);
}
rewrite(parentBlockStmt, env);
}
private BLangSimpleVariableDef forceCastIfApplicable(BVarSymbol errorVarySymbol, DiagnosticPos pos,
BType targetType) {
BVarSymbol errorVarSym = new BVarSymbol(Flags.PUBLIC, names.fromString("$cast$temp$"),
this.env.enclPkg.packageID, targetType, this.env.scope.owner);
BLangSimpleVarRef variableRef = ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
BLangExpression expr;
if (targetType.tag == TypeTags.RECORD) {
expr = variableRef;
} else {
expr = addConversionExprIfRequired(variableRef, targetType);
}
BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(pos, errorVarSym.name.value, targetType, expr,
errorVarSym);
return ASTBuilderUtil.createVariableDef(pos, errorVar);
}
private BLangSimpleVariable generateRestFilter(BLangSimpleVarRef mapVarRef, DiagnosticPos pos,
List<String> keysToRemove, BType targetType,
BLangBlockStmt parentBlockStmt) {
BLangExpression typeCastExpr = addConversionExprIfRequired(mapVarRef, targetType);
int restNum = annonVarCount++;
String name = "$map$ref$" + restNum;
BLangSimpleVariable mapVariable = defVariable(pos, targetType, parentBlockStmt, typeCastExpr, name);
BLangInvocation entriesInvocation = generateMapEntriesInvocation(
ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol), typeCastExpr.type);
String entriesVarName = "$map$ref$entries$" + restNum;
BType entriesType = new BMapType(TypeTags.MAP,
new BTupleType(Arrays.asList(symTable.stringType, ((BMapType) targetType).constraint)), null);
BLangSimpleVariable entriesInvocationVar = defVariable(pos, entriesType, parentBlockStmt,
addConversionExprIfRequired(entriesInvocation, entriesType),
entriesVarName);
BLangLambdaFunction filter = createFuncToFilterOutRestParam(keysToRemove, pos);
BLangInvocation filterInvocation = generateMapFilterInvocation(pos, entriesInvocationVar, filter);
String filteredEntriesName = "$filtered$detail$entries" + restNum;
BLangSimpleVariable filteredVar = defVariable(pos, entriesType, parentBlockStmt, filterInvocation,
filteredEntriesName);
String filteredVarName = "$detail$filtered" + restNum;
BLangLambdaFunction backToMapLambda = generateEntriesToMapLambda(pos);
BLangInvocation mapInvocation = generateMapMapInvocation(pos, filteredVar, backToMapLambda);
BLangSimpleVariable filtered = defVariable(pos, targetType, parentBlockStmt,
mapInvocation,
filteredVarName);
String filteredRestVarName = "$restVar$" + restNum;
BLangInvocation constructed = generateConstructFromInvocation(pos, targetType, filtered.symbol);
return defVariable(pos, targetType, parentBlockStmt,
addConversionExprIfRequired(constructed, targetType),
filteredRestVarName);
}
private BLangInvocation generateMapEntriesInvocation(BLangExpression expr, BType type) {
BLangInvocation invocationNode = createInvocationNode("entries", new ArrayList<>(), type);
invocationNode.expr = expr;
invocationNode.symbol = symResolver.lookupLangLibMethod(type, names.fromString("entries"));
invocationNode.requiredArgs = Lists.of(expr);
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation generateMapMapInvocation(DiagnosticPos pos, BLangSimpleVariable filteredVar,
BLangLambdaFunction backToMapLambda) {
BLangInvocation invocationNode = createInvocationNode("map", new ArrayList<>(), filteredVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(filteredVar.type, names.fromString("map"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, filteredVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.requiredArgs.add(backToMapLambda);
return invocationNode;
}
private BLangLambdaFunction generateEntriesToMapLambda(DiagnosticPos pos) {
String anonfuncName = "$anonGetValFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType anyType = new BLangValueType();
anyType.typeKind = TypeKind.ANY;
anyType.type = symTable.anyType;
function.returnTypeNode = anyType;
BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>());
function.body = functionBlock;
BLangIndexBasedAccess indexBasesAccessExpr =
ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol,
ASTBuilderUtil
.createLiteral(pos, symTable.intType, (long) 1));
BLangSimpleVariableDef tupSecondElem = createVarDef("val", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupSecondElem);
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
returnStmt.expr = ASTBuilderUtil.createVariableRef(pos, tupSecondElem.var.symbol);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value), env.enclPkg.packageID, function.type, env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
symTable.anyType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return createLambdaFunction(function, functionSymbol);
}
private BLangInvocation generateMapFilterInvocation(DiagnosticPos pos,
BLangSimpleVariable entriesInvocationVar,
BLangLambdaFunction filter) {
BLangInvocation invocationNode = createInvocationNode("filter", new ArrayList<>(), entriesInvocationVar.type);
invocationNode.expr = ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol);
invocationNode.symbol = symResolver.lookupLangLibMethod(entriesInvocationVar.type, names.fromString("filter"));
invocationNode.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, entriesInvocationVar.symbol));
invocationNode.type = invocationNode.symbol.type.getReturnType();
invocationNode.requiredArgs.add(filter);
return invocationNode;
}
private BLangSimpleVariable defVariable(DiagnosticPos pos, BType varType, BLangBlockStmt parentBlockStmt,
BLangExpression expression, String name) {
Name varName = names.fromString(name);
BLangSimpleVariable detailMap = ASTBuilderUtil.createVariable(pos, name, varType, expression,
new BVarSymbol(Flags.PUBLIC, varName, env.enclPkg.packageID, varType, env.scope.owner));
BLangSimpleVariableDef constructedMap = ASTBuilderUtil.createVariableDef(pos, detailMap);
constructedMap.type = varType;
parentBlockStmt.addStatement(constructedMap);
env.scope.define(varName, detailMap.symbol);
return detailMap;
}
private void createAndAddBoundVariableDef(BLangBlockStmt parentBlockStmt,
BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BLangExpression detailEntryVar) {
if (detailEntry.valueBindingPattern.getKind() == NodeKind.VARIABLE) {
BLangSimpleVariableDef errorDetailVar = createVarDef(
((BLangSimpleVariable) detailEntry.valueBindingPattern).name.value,
detailEntry.valueBindingPattern.type,
detailEntryVar,
detailEntry.valueBindingPattern.pos);
parentBlockStmt.addStatement(errorDetailVar);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.RECORD_VARIABLE) {
BLangRecordVariableDef recordVariableDef = ASTBuilderUtil.createRecordVariableDef(
detailEntry.valueBindingPattern.pos,
(BLangRecordVariable) detailEntry.valueBindingPattern);
recordVariableDef.var.expr = detailEntryVar;
recordVariableDef.type = symTable.recordType;
parentBlockStmt.addStatement(recordVariableDef);
} else if (detailEntry.valueBindingPattern.getKind() == NodeKind.TUPLE_VARIABLE) {
BLangTupleVariableDef tupleVariableDef = ASTBuilderUtil.createTupleVariableDef(
detailEntry.valueBindingPattern.pos, (BLangTupleVariable) detailEntry.valueBindingPattern);
parentBlockStmt.addStatement(tupleVariableDef);
}
}
private BLangExpression createErrorDetailVar(BLangErrorVariable.BLangErrorDetailEntry detailEntry,
BVarSymbol tempDetailVarSymbol) {
BLangExpression detailEntryVar = createIndexBasedAccessExpr(
detailEntry.valueBindingPattern.type,
detailEntry.valueBindingPattern.pos,
createStringLiteral(detailEntry.key.pos, detailEntry.key.value),
tempDetailVarSymbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
return detailEntryVar;
}
private BLangExpression constructStringTemplateConcatExpression(List<BLangExpression> exprs) {
BLangExpression concatExpr = null;
BLangExpression currentExpr;
for (BLangExpression expr : exprs) {
currentExpr = expr;
if (expr.type.tag != TypeTags.STRING && expr.type.tag != TypeTags.XML) {
currentExpr = getToStringInvocationOnExpr(expr);
}
if (concatExpr == null) {
concatExpr = currentExpr;
continue;
}
BType binaryExprType =
TypeTags.isXMLTypeTag(concatExpr.type.tag) || TypeTags.isXMLTypeTag(currentExpr.type.tag)
? symTable.xmlType
: symTable.stringType;
concatExpr =
ASTBuilderUtil.createBinaryExpr(concatExpr.pos, concatExpr, currentExpr,
binaryExprType, OperatorKind.ADD, null);
}
return concatExpr;
}
private BLangInvocation getToStringInvocationOnExpr(BLangExpression expression) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langValueModuleSymbol.scope
.lookup(names.fromString(TO_STRING_FUNCTION_NAME)).symbol;
List<BLangExpression> requiredArgs = new ArrayList<BLangExpression>() {{
add(addConversionExprIfRequired(expression, symbol.params.get(0).type));
}};
return ASTBuilderUtil.createInvocationExprMethod(expression.pos, symbol, requiredArgs, new ArrayList<>(),
symResolver);
}
private BLangInvocation generateErrorDetailBuiltinFunction(DiagnosticPos pos, BVarSymbol errorVarySymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangExpression onExpr =
parentIndexBasedAccess != null
? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarySymbol);
return createLangLibInvocationNode(ERROR_DETAIL_FUNCTION_NAME, onExpr, new ArrayList<>(), null, pos);
}
private BLangInvocation generateErrorReasonBuiltinFunction(DiagnosticPos pos, BType reasonType,
BVarSymbol errorVarSymbol,
BLangIndexBasedAccess parentIndexBasedAccess) {
BLangExpression onExpr =
parentIndexBasedAccess != null
? parentIndexBasedAccess : ASTBuilderUtil.createVariableRef(pos, errorVarSymbol);
return createLangLibInvocationNode(ERROR_REASON_FUNCTION_NAME, onExpr, new ArrayList<>(), reasonType, pos);
}
private BLangInvocation generateConstructFromInvocation(DiagnosticPos pos,
BType targetType,
BVarSymbol source) {
BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol);
BLangInvocation invocationNode = createInvocationNode(CONSTRUCT_FROM, new ArrayList<>(), typedescType);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = targetType;
typedescExpr.type = typedescType;
invocationNode.expr = typedescExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(typedescType, names.fromString(CONSTRUCT_FROM));
invocationNode.requiredArgs = Lists.of(typedescExpr, ASTBuilderUtil.createVariableRef(pos, source));
invocationNode.type = BUnionType.create(null, targetType, symTable.errorType);
return invocationNode;
}
private BLangLambdaFunction createFuncToFilterOutRestParam(List<String> toRemoveList, DiagnosticPos pos) {
String anonfuncName = "$anonRestParamFilterFunc$" + lambdaFunctionCount++;
BLangFunction function = ASTBuilderUtil.createFunction(pos, anonfuncName);
BVarSymbol keyValSymbol = new BVarSymbol(0, names.fromString("$lambdaArg$0"), this.env.scope.owner.pkgID,
getStringAnyTupleType(), this.env.scope.owner);
BLangBlockFunctionBody functionBlock = createAnonymousFunctionBlock(pos, function, keyValSymbol);
BLangIndexBasedAccess indexBasesAccessExpr =
ASTBuilderUtil.createIndexBasesAccessExpr(pos, symTable.anyType, keyValSymbol, ASTBuilderUtil
.createLiteral(pos, symTable.intType, (long) 0));
BLangSimpleVariableDef tupFirstElem = createVarDef("key", indexBasesAccessExpr.type,
indexBasesAccessExpr, pos);
functionBlock.addStatement(tupFirstElem);
for (String toRemoveItem : toRemoveList) {
createIfStmt(pos, tupFirstElem.var.symbol, functionBlock, toRemoveItem);
}
BInvokableSymbol functionSymbol = createReturnTrueStatement(pos, function, functionBlock);
return createLambdaFunction(function, functionSymbol);
}
private BLangLambdaFunction createFuncToFilterOutRestParam(BLangRecordVariable recordVariable, DiagnosticPos pos) {
List<String> fieldNamesToRemove = recordVariable.variableList.stream()
.map(var -> var.getKey().getValue())
.collect(Collectors.toList());
return createFuncToFilterOutRestParam(fieldNamesToRemove, pos);
}
private void createIfStmt(DiagnosticPos pos, BVarSymbol inputParamSymbol, BLangBlockFunctionBody blockStmt,
String key) {
BLangSimpleVarRef firstElemRef = ASTBuilderUtil.createVariableRef(pos, inputParamSymbol);
BLangExpression converted = addConversionExprIfRequired(firstElemRef, symTable.stringType);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, blockStmt);
BLangBlockStmt ifBlock = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>());
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(pos, ifBlock);
returnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, false);
ifStmt.body = ifBlock;
BLangGroupExpr groupExpr = new BLangGroupExpr();
groupExpr.type = symTable.booleanType;
BLangBinaryExpr binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, converted,
ASTBuilderUtil.createLiteral(pos, symTable.stringType, key),
symTable.booleanType, OperatorKind.EQUAL, null);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(
binaryExpr.opKind, binaryExpr.lhsExpr.type, binaryExpr.rhsExpr.type);
groupExpr.expression = binaryExpr;
ifStmt.expr = groupExpr;
}
BLangLambdaFunction createLambdaFunction(BLangFunction function, BInvokableSymbol functionSymbol) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = function;
lambdaFunction.type = functionSymbol.type;
return lambdaFunction;
}
private BInvokableSymbol createReturnTrueStatement(DiagnosticPos pos, BLangFunction function,
BLangBlockFunctionBody functionBlock) {
BLangReturn trueReturnStmt = ASTBuilderUtil.createReturnStmt(pos, functionBlock);
trueReturnStmt.expr = ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(function.flagSet),
new Name(function.name.value),
env.enclPkg.packageID, function.type,
env.enclEnv.enclVarSym, true);
functionSymbol.retType = function.returnTypeNode.type;
functionSymbol.params = function.requiredParams.stream()
.map(param -> param.symbol)
.collect(Collectors.toList());
functionSymbol.scope = env.scope;
functionSymbol.type = new BInvokableType(Collections.singletonList(getStringAnyTupleType()),
getRestType(functionSymbol), symTable.booleanType, null);
function.symbol = functionSymbol;
rewrite(function, env);
env.enclPkg.addFunction(function);
return functionSymbol;
}
private BLangBlockFunctionBody createAnonymousFunctionBlock(DiagnosticPos pos, BLangFunction function,
BVarSymbol keyValSymbol) {
BLangSimpleVariable inputParameter = ASTBuilderUtil.createVariable(pos, null, getStringAnyTupleType(),
null, keyValSymbol);
function.requiredParams.add(inputParameter);
BLangValueType booleanTypeKind = new BLangValueType();
booleanTypeKind.typeKind = TypeKind.BOOLEAN;
booleanTypeKind.type = symTable.booleanType;
function.returnTypeNode = booleanTypeKind;
BLangBlockFunctionBody functionBlock = ASTBuilderUtil.createBlockFunctionBody(pos, new ArrayList<>());
function.body = functionBlock;
return functionBlock;
}
private BTupleType getStringAnyTupleType() {
ArrayList<BType> typeList = new ArrayList<BType>() {{
add(symTable.stringType);
add(symTable.anyType);
}};
return new BTupleType(typeList);
}
/**
* This method creates a simple variable def and assigns and array expression based on the given indexExpr.
*
* case 1: when there is no parent array access expression, but with the indexExpr : 1
* string s = x[1];
*
* case 2: when there is a parent array expression : x[2] and indexExpr : 3
* string s = x[2][3];
*
* case 3: when there is no parent array access expression, but with the indexExpr : name
* string s = x[name];
*
* case 4: when there is a parent map expression : x[name] and indexExpr : fName
* string s = x[name][fName];
*
* case 5: when there is a parent map expression : x[name] and indexExpr : 1
* string s = x[name][1];
*/
private void createSimpleVarDefStmt(BLangSimpleVariable simpleVariable, BLangBlockStmt parentBlockStmt,
BLangLiteral indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
Name varName = names.fromIdNode(simpleVariable.name);
if (varName == Names.IGNORE) {
return;
}
final BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDefStmt(simpleVariable.pos,
parentBlockStmt);
simpleVariableDef.var = simpleVariable;
simpleVariable.expr = createIndexBasedAccessExpr(simpleVariable.type, simpleVariable.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
}
@Override
public void visit(BLangAssignment assignNode) {
if (safeNavigateLHS(assignNode.varRef)) {
BLangAccessExpression accessExpr = (BLangAccessExpression) assignNode.varRef;
accessExpr.leafNode = true;
result = rewriteSafeNavigationAssignment(accessExpr, assignNode.expr, assignNode.safeAssignment);
result = rewrite(result, env);
return;
}
assignNode.varRef = rewriteExpr(assignNode.varRef);
assignNode.expr = rewriteExpr(assignNode.expr);
assignNode.expr = addConversionExprIfRequired(rewriteExpr(assignNode.expr), assignNode.varRef.type);
result = assignNode;
}
@Override
public void visit(BLangTupleDestructure tupleDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(tupleDestructure.pos);
BType runTimeType = new BArrayType(symTable.anyType);
String name = "tuple";
final BLangSimpleVariable tuple = ASTBuilderUtil.createVariable(tupleDestructure.pos, name, runTimeType, null,
new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, runTimeType,
this.env.scope.owner));
tuple.expr = tupleDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(tupleDestructure.pos,
blockStmt);
variableDef.var = tuple;
createVarRefAssignmentStmts(tupleDestructure.varRef, blockStmt, tuple.symbol, null);
createRestFieldAssignmentStmt(tupleDestructure, blockStmt, tuple.symbol);
result = rewrite(blockStmt, env);
}
private void createRestFieldAssignmentStmt(BLangTupleDestructure tupleDestructure, BLangBlockStmt blockStmt,
BVarSymbol tupleVarSymbol) {
BLangTupleVarRef tupleVarRef = tupleDestructure.varRef;
DiagnosticPos pos = blockStmt.pos;
if (tupleVarRef.restParam != null) {
BLangExpression tupleExpr = tupleDestructure.expr;
BLangSimpleVarRef restParam = (BLangSimpleVarRef) tupleVarRef.restParam;
BArrayType restParamType = (BArrayType) restParam.type;
BLangArrayLiteral arrayExpr = createArrayLiteralExprNode();
arrayExpr.type = restParamType;
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = arrayExpr;
BLangLiteral startIndexLiteral = (BLangLiteral) TreeBuilder.createLiteralExpression();
startIndexLiteral.value = (long) tupleVarRef.expressions.size();
startIndexLiteral.type = symTable.intType;
BLangInvocation lengthInvocation = createLengthInvocation(pos, tupleExpr);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndexLiteral,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos,
"$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type, this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess indexAccessExpr = ASTBuilderUtil.createIndexAccessExpr(restParam,
createLengthInvocation(pos, restParam));
indexAccessExpr.type = restParamType.eType;
createSimpleVarRefAssignmentStmt(indexAccessExpr, foreachBody, foreachVarRef, tupleVarSymbol, null);
foreach.body = foreachBody;
blockStmt.addStatement(foreach);
}
}
private BLangInvocation createLengthInvocation(DiagnosticPos pos, BLangExpression collection) {
BInvokableSymbol lengthInvokableSymbol = (BInvokableSymbol) symResolver
.lookupLangLibMethod(collection.type, names.fromString(LENGTH_FUNCTION_NAME));
BLangInvocation lengthInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, lengthInvokableSymbol,
Lists.of(collection), symResolver);
lengthInvocation.argExprs = lengthInvocation.requiredArgs;
lengthInvocation.type = lengthInvokableSymbol.type.getReturnType();
return lengthInvocation;
}
/**
* This method iterate through each member of the tupleVarRef and create the relevant var ref assignment statements.
* This method does the check for node kind of each member and call the related var ref creation method.
*
* Example:
* ((a, b), c)) = (tuple)
*
* (a, b) is again a tuple, so it is a recursive var ref creation.
*
* c is a simple var, so a simple var def will be created.
*
*/
private void createVarRefAssignmentStmts(BLangTupleVarRef parentTupleVariable, BLangBlockStmt parentBlockStmt,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangExpression> expressions = parentTupleVariable.expressions;
for (int index = 0; index < expressions.size(); index++) {
BLangExpression expression = expressions.get(index);
if (NodeKind.SIMPLE_VARIABLE_REF == expression.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == expression.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == expression.getKind()) {
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(expression.pos, symTable.intType, (long) index);
createSimpleVarRefAssignmentStmt((BLangVariableReference) expression, parentBlockStmt, indexExpr,
tupleVarSymbol, parentIndexAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.TUPLE_VARIABLE_REF) {
BLangTupleVarRef tupleVarRef = (BLangTupleVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(tupleVarRef.pos, symTable.intType, (long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVarRef.pos,
new BArrayType(symTable.anyType), tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangTupleVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
continue;
}
if (expression.getKind() == NodeKind.RECORD_VARIABLE_REF) {
BLangRecordVarRef recordVarRef = (BLangRecordVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(recordVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, symTable.mapType, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangRecordVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
TypeDefBuilderHelper.addTypeDefinition(recordVarRef.type, recordVarRef.type.tsymbol,
TypeDefBuilderHelper.createRecordTypeNode(
(BRecordType) recordVarRef.type,
env.enclPkg.packageID, symTable, recordVarRef.pos),
env);
continue;
}
if (expression.getKind() == NodeKind.ERROR_VARIABLE_REF) {
BLangErrorVarRef errorVarRef = (BLangErrorVarRef) expression;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(errorVarRef.pos, symTable.intType,
(long) index);
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentTupleVariable.pos, expression.type, tupleVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) expression, parentBlockStmt, tupleVarSymbol,
arrayAccessExpr);
}
}
}
/**
* This method creates a assignment statement and assigns and array expression based on the given indexExpr.
*
*/
private void createSimpleVarRefAssignmentStmt(BLangVariableReference simpleVarRef, BLangBlockStmt parentBlockStmt,
BLangExpression indexExpr, BVarSymbol tupleVarSymbol,
BLangIndexBasedAccess parentArrayAccessExpr) {
if (simpleVarRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
Name varName = names.fromIdNode(((BLangSimpleVarRef) simpleVarRef).variableName);
if (varName == Names.IGNORE) {
return;
}
}
BLangExpression assignmentExpr = createIndexBasedAccessExpr(simpleVarRef.type, simpleVarRef.pos,
indexExpr, tupleVarSymbol, parentArrayAccessExpr);
assignmentExpr = addConversionExprIfRequired(assignmentExpr, simpleVarRef.type);
final BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(parentBlockStmt.pos,
parentBlockStmt);
assignmentStmt.varRef = simpleVarRef;
assignmentStmt.expr = assignmentExpr;
}
private BLangExpression createIndexBasedAccessExpr(BType varType, DiagnosticPos varPos, BLangExpression indexExpr,
BVarSymbol tupleVarSymbol, BLangIndexBasedAccess parentExpr) {
BLangIndexBasedAccess arrayAccess = ASTBuilderUtil.createIndexBasesAccessExpr(varPos,
symTable.anyType, tupleVarSymbol, indexExpr);
arrayAccess.originalType = varType;
if (parentExpr != null) {
arrayAccess.expr = parentExpr;
}
final BLangExpression assignmentExpr;
if (types.isValueType(varType)) {
BLangTypeConversionExpr castExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
castExpr.expr = arrayAccess;
castExpr.type = varType;
assignmentExpr = castExpr;
} else {
assignmentExpr = arrayAccess;
}
return assignmentExpr;
}
@Override
public void visit(BLangRecordDestructure recordDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(recordDestructure.pos);
BType runTimeType = new BMapType(TypeTags.MAP, symTable.anyType, null);
String name = "$map$0";
final BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(recordDestructure.pos, name, runTimeType,
null, new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
runTimeType, this.env.scope.owner));
mapVariable.expr = recordDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.
createVariableDefStmt(recordDestructure.pos, blockStmt);
variableDef.var = mapVariable;
createVarRefAssignmentStmts(recordDestructure.varRef, blockStmt, mapVariable.symbol, null);
result = rewrite(blockStmt, env);
}
@Override
public void visit(BLangErrorDestructure errorDestructure) {
final BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(errorDestructure.pos);
String name = "$error$";
final BLangSimpleVariable errorVar = ASTBuilderUtil.createVariable(errorDestructure.pos, name,
symTable.errorType, null, new BVarSymbol(0, names.fromString(name),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
errorVar.expr = errorDestructure.expr;
final BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(errorDestructure.pos,
blockStmt);
variableDef.var = errorVar;
createVarRefAssignmentStmts(errorDestructure.varRef, blockStmt, errorVar.symbol, null);
result = rewrite(blockStmt, env);
}
private void createVarRefAssignmentStmts(BLangRecordVarRef parentRecordVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol recordVarSymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
final List<BLangRecordVarRefKeyValue> variableRefList = parentRecordVarRef.recordRefFields;
for (BLangRecordVarRefKeyValue varRefKeyValue : variableRefList) {
BLangExpression variableReference = varRefKeyValue.variableReference;
BLangLiteral indexExpr = ASTBuilderUtil.createLiteral(variableReference.pos, symTable.stringType,
varRefKeyValue.variableName.getValue());
if (NodeKind.SIMPLE_VARIABLE_REF == variableReference.getKind() ||
NodeKind.FIELD_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.INDEX_BASED_ACCESS_EXPR == variableReference.getKind() ||
NodeKind.XML_ATTRIBUTE_ACCESS_EXPR == variableReference.getKind()) {
createSimpleVarRefAssignmentStmt((BLangVariableReference) variableReference, parentBlockStmt,
indexExpr, recordVarSymbol, parentIndexAccessExpr);
continue;
}
if (NodeKind.RECORD_VARIABLE_REF == variableReference.getKind()) {
BLangRecordVarRef recordVariable = (BLangRecordVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(
parentRecordVarRef.pos, symTable.mapType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(recordVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.TUPLE_VARIABLE_REF == variableReference.getKind()) {
BLangTupleVarRef tupleVariable = (BLangTupleVarRef) variableReference;
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(tupleVariable.pos,
symTable.tupleType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts(tupleVariable, parentBlockStmt, recordVarSymbol, arrayAccessExpr);
continue;
}
if (NodeKind.ERROR_VARIABLE_REF == variableReference.getKind()) {
BLangIndexBasedAccess arrayAccessExpr = ASTBuilderUtil.createIndexBasesAccessExpr(variableReference.pos,
symTable.errorType, recordVarSymbol, indexExpr);
if (parentIndexAccessExpr != null) {
arrayAccessExpr.expr = parentIndexAccessExpr;
}
createVarRefAssignmentStmts((BLangErrorVarRef) variableReference, parentBlockStmt, recordVarSymbol,
arrayAccessExpr);
}
}
if (parentRecordVarRef.restParam != null) {
DiagnosticPos pos = parentBlockStmt.pos;
BMapType restParamType = (BMapType) ((BLangSimpleVarRef) parentRecordVarRef.restParam).type;
BLangSimpleVarRef variableReference;
if (parentIndexAccessExpr != null) {
BLangSimpleVariable mapVariable = ASTBuilderUtil.createVariable(pos, "$map$1", restParamType,
null, new BVarSymbol(0, names.fromString("$map$1"), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner));
mapVariable.expr = parentIndexAccessExpr;
BLangSimpleVariableDef variableDef = ASTBuilderUtil.createVariableDefStmt(pos, parentBlockStmt);
variableDef.var = mapVariable;
variableReference = ASTBuilderUtil.createVariableRef(pos, mapVariable.symbol);
} else {
variableReference = ASTBuilderUtil.createVariableRef(pos,
((BLangSimpleVariableDef) parentBlockStmt.stmts.get(0)).var.symbol);
}
BLangSimpleVarRef restParam = (BLangSimpleVarRef) parentRecordVarRef.restParam;
List<String> keysToRemove = parentRecordVarRef.recordRefFields.stream()
.map(field -> field.variableName.value)
.collect(Collectors.toList());
BLangSimpleVariable filteredDetail = generateRestFilter(variableReference, pos,
keysToRemove, restParamType, parentBlockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, filteredDetail.symbol);
BLangAssignment restParamAssignment = ASTBuilderUtil.createAssignmentStmt(pos, parentBlockStmt);
restParamAssignment.varRef = restParam;
restParamAssignment.varRef.type = restParamType;
restParamAssignment.expr = varRef;
}
}
private void createVarRefAssignmentStmts(BLangErrorVarRef parentErrorVarRef, BLangBlockStmt parentBlockStmt,
BVarSymbol errorVarySymbol, BLangIndexBasedAccess parentIndexAccessExpr) {
if (parentErrorVarRef.reason.getKind() != NodeKind.SIMPLE_VARIABLE_REF ||
names.fromIdNode(((BLangSimpleVarRef) parentErrorVarRef.reason).variableName) != Names.IGNORE) {
BLangAssignment reasonAssignment = ASTBuilderUtil
.createAssignmentStmt(parentBlockStmt.pos, parentBlockStmt);
reasonAssignment.expr = generateErrorReasonBuiltinFunction(parentErrorVarRef.reason.pos,
symTable.stringType, errorVarySymbol, parentIndexAccessExpr);
reasonAssignment.expr = addConversionExprIfRequired(reasonAssignment.expr, parentErrorVarRef.reason.type);
reasonAssignment.varRef = parentErrorVarRef.reason;
}
if (parentErrorVarRef.detail.isEmpty() && isIgnoredErrorRefRestVar(parentErrorVarRef)) {
return;
}
BLangInvocation errorDetailBuiltinFunction = generateErrorDetailBuiltinFunction(parentErrorVarRef.pos,
errorVarySymbol,
parentIndexAccessExpr);
BLangSimpleVariableDef detailTempVarDef = createVarDef("$error$detail$" + errorCount++,
symTable.detailType, errorDetailBuiltinFunction,
parentErrorVarRef.pos);
detailTempVarDef.type = symTable.detailType;
parentBlockStmt.addStatement(detailTempVarDef);
this.env.scope.define(names.fromIdNode(detailTempVarDef.var.name), detailTempVarDef.var.symbol);
List<String> extractedKeys = new ArrayList<>();
for (BLangNamedArgsExpression detail : parentErrorVarRef.detail) {
extractedKeys.add(detail.name.value);
BLangVariableReference ref = (BLangVariableReference) detail.expr;
BLangExpression detailEntryVar = createIndexBasedAccessExpr(ref.type, ref.pos,
createStringLiteral(detail.name.pos, detail.name.value),
detailTempVarDef.var.symbol, null);
if (detailEntryVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangIndexBasedAccess bLangIndexBasedAccess = (BLangIndexBasedAccess) detailEntryVar;
bLangIndexBasedAccess.originalType = symTable.pureType;
}
BLangAssignment detailAssignment = ASTBuilderUtil.createAssignmentStmt(ref.pos, parentBlockStmt);
detailAssignment.varRef = ref;
detailAssignment.expr = detailEntryVar;
}
if (!isIgnoredErrorRefRestVar(parentErrorVarRef)) {
BLangSimpleVarRef detailVarRef = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
detailTempVarDef.var.symbol);
BLangSimpleVariable filteredDetail = generateRestFilter(detailVarRef, parentErrorVarRef.restVar.pos,
extractedKeys,
parentErrorVarRef.restVar.type, parentBlockStmt);
BLangAssignment restAssignment = ASTBuilderUtil.createAssignmentStmt(parentErrorVarRef.restVar.pos,
parentBlockStmt);
restAssignment.varRef = parentErrorVarRef.restVar;
restAssignment.expr = ASTBuilderUtil.createVariableRef(parentErrorVarRef.restVar.pos,
filteredDetail.symbol);
}
BErrorType errorType = (BErrorType) parentErrorVarRef.type;
if (errorType.detailType.getKind() == TypeKind.RECORD) {
BRecordTypeSymbol tsymbol = (BRecordTypeSymbol) errorType.detailType.tsymbol;
tsymbol.initializerFunc = createRecordInitFunc();
tsymbol.scope.define(tsymbol.initializerFunc.funcName, tsymbol.initializerFunc.symbol);
}
}
private boolean isIgnoredErrorRefRestVar(BLangErrorVarRef parentErrorVarRef) {
if (parentErrorVarRef.restVar == null) {
return true;
}
if (parentErrorVarRef.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return (((BLangSimpleVarRef) parentErrorVarRef.restVar).variableName.value.equals(IGNORE.value));
}
return false;
}
@Override
public void visit(BLangAbort abortNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(abortNode.pos, symTable.intType, -1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangRetry retryNode) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(retryNode.pos, symTable.intType, 1L);
result = rewrite(returnStmt, env);
}
@Override
public void visit(BLangContinue nextNode) {
result = nextNode;
}
@Override
public void visit(BLangBreak breakNode) {
result = breakNode;
}
@Override
public void visit(BLangReturn returnNode) {
if (returnNode.expr != null) {
returnNode.expr = rewriteExpr(returnNode.expr);
}
result = returnNode;
}
@Override
public void visit(BLangPanic panicNode) {
panicNode.expr = rewriteExpr(panicNode.expr);
result = panicNode;
}
@Override
public void visit(BLangXMLNSStatement xmlnsStmtNode) {
xmlnsStmtNode.xmlnsDecl = rewrite(xmlnsStmtNode.xmlnsDecl, env);
result = xmlnsStmtNode;
}
@Override
public void visit(BLangXMLNS xmlnsNode) {
BLangXMLNS generatedXMLNSNode;
xmlnsNode.namespaceURI = rewriteExpr(xmlnsNode.namespaceURI);
BSymbol ownerSymbol = xmlnsNode.symbol.owner;
if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
generatedXMLNSNode = new BLangLocalXMLNS();
} else {
generatedXMLNSNode = new BLangPackageXMLNS();
}
generatedXMLNSNode.namespaceURI = xmlnsNode.namespaceURI;
generatedXMLNSNode.prefix = xmlnsNode.prefix;
generatedXMLNSNode.symbol = xmlnsNode.symbol;
result = generatedXMLNSNode;
}
public void visit(BLangCompoundAssignment compoundAssignment) {
BLangVariableReference varRef = compoundAssignment.varRef;
if (compoundAssignment.varRef.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
if (varRef.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
varRef = ASTBuilderUtil.createVariableRef(compoundAssignment.varRef.pos, varRef.symbol);
varRef.lhsVar = true;
}
result = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, rewriteExpr(varRef),
rewriteExpr(compoundAssignment.modifiedExpr));
return;
}
List<BLangStatement> statements = new ArrayList<>();
List<BLangSimpleVarRef> varRefs = new ArrayList<>();
List<BType> types = new ArrayList<>();
do {
BLangSimpleVariableDef tempIndexVarDef = createVarDef("$temp" + ++indexExprCount + "$",
((BLangIndexBasedAccess) varRef).indexExpr.type, ((BLangIndexBasedAccess) varRef).indexExpr,
compoundAssignment.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(tempIndexVarDef.pos,
tempIndexVarDef.var.symbol);
statements.add(0, tempIndexVarDef);
varRefs.add(0, tempVarRef);
types.add(0, varRef.type);
varRef = (BLangVariableReference) ((BLangIndexBasedAccess) varRef).expr;
} while (varRef.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR);
BLangVariableReference var = varRef;
for (int ref = 0; ref < varRefs.size(); ref++) {
var = ASTBuilderUtil.createIndexAccessExpr(var, varRefs.get(ref));
var.type = types.get(ref);
}
var.type = compoundAssignment.varRef.type;
BLangExpression rhsExpression = ASTBuilderUtil.createBinaryExpr(compoundAssignment.pos, var,
compoundAssignment.expr, compoundAssignment.type, compoundAssignment.opKind, null);
rhsExpression.type = compoundAssignment.modifiedExpr.type;
BLangAssignment assignStmt = ASTBuilderUtil.createAssignmentStmt(compoundAssignment.pos, var,
rhsExpression);
statements.add(assignStmt);
BLangBlockStmt bLangBlockStmt = ASTBuilderUtil.createBlockStmt(compoundAssignment.pos, statements);
result = rewrite(bLangBlockStmt, env);
}
@Override
public void visit(BLangExpressionStmt exprStmtNode) {
exprStmtNode.expr = rewriteExpr(exprStmtNode.expr);
result = exprStmtNode;
}
@Override
public void visit(BLangIf ifNode) {
ifNode.expr = rewriteExpr(ifNode.expr);
ifNode.body = rewrite(ifNode.body, env);
ifNode.elseStmt = rewrite(ifNode.elseStmt, env);
result = ifNode;
}
@Override
public void visit(BLangMatch matchStmt) {
BLangBlockStmt matchBlockStmt = (BLangBlockStmt) TreeBuilder.createBlockNode();
matchBlockStmt.pos = matchStmt.pos;
String matchExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable matchExprVar = ASTBuilderUtil.createVariable(matchStmt.expr.pos,
matchExprVarName, matchStmt.expr.type, matchStmt.expr, new BVarSymbol(0,
names.fromString(matchExprVarName),
this.env.scope.owner.pkgID, matchStmt.expr.type, this.env.scope.owner));
BLangSimpleVariableDef matchExprVarDef = ASTBuilderUtil.createVariableDef(matchBlockStmt.pos, matchExprVar);
matchBlockStmt.stmts.add(matchExprVarDef);
matchBlockStmt.stmts.add(generateIfElseStmt(matchStmt, matchExprVar));
rewrite(matchBlockStmt, this.env);
result = matchBlockStmt;
}
@Override
public void visit(BLangForeach foreach) {
BLangBlockStmt blockNode;
BVarSymbol dataSymbol = new BVarSymbol(0, names.fromString("$data$"), this.env.scope.owner.pkgID,
foreach.collection.type, this.env.scope.owner);
BLangSimpleVariable dataVariable = ASTBuilderUtil.createVariable(foreach.pos, "$data$",
foreach.collection.type, foreach.collection, dataSymbol);
BLangSimpleVariableDef dataVarDef = ASTBuilderUtil.createVariableDef(foreach.pos, dataVariable);
BVarSymbol collectionSymbol = dataVariable.symbol;
switch (foreach.collection.type.tag) {
case TypeTags.STRING:
case TypeTags.ARRAY:
case TypeTags.TUPLE:
case TypeTags.XML:
case TypeTags.MAP:
case TypeTags.STREAM:
case TypeTags.RECORD:
BInvokableSymbol iteratorSymbol = getLangLibIteratorInvokableSymbol(collectionSymbol);
blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, true);
break;
case TypeTags.OBJECT:
iteratorSymbol = getIterableObjectIteratorInvokableSymbol(collectionSymbol);
blockNode = desugarForeachWithIteratorDef(foreach, dataVarDef, collectionSymbol, iteratorSymbol, false);
break;
default:
blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.stmts.add(0, dataVarDef);
break;
}
rewrite(blockNode, this.env);
result = blockNode;
}
private BLangBlockStmt desugarForeachWithIteratorDef(BLangForeach foreach,
BLangSimpleVariableDef dataVariableDefinition,
BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVariableDef iteratorVarDef = getIteratorVariableDefinition(foreach.pos, collectionSymbol,
iteratorInvokableSymbol, isIteratorFuncFromLangLib);
BLangBlockStmt blockNode = desugarForeachToWhile(foreach, iteratorVarDef);
blockNode.stmts.add(0, dataVariableDefinition);
return blockNode;
}
public BInvokableSymbol getIterableObjectIteratorInvokableSymbol(BVarSymbol collectionSymbol) {
BObjectTypeSymbol typeSymbol = (BObjectTypeSymbol) collectionSymbol.type.tsymbol;
BAttachedFunction iteratorFunc = null;
for (BAttachedFunction func : typeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) {
iteratorFunc = func;
break;
}
}
BAttachedFunction function = iteratorFunc;
return function.symbol;
}
BInvokableSymbol getLangLibIteratorInvokableSymbol(BVarSymbol collectionSymbol) {
return (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionSymbol.type,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
}
private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol);
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
valueAccessExpr.expr = addConversionExprIfRequired(valueAccessExpr.expr,
types.getSafeType(valueAccessExpr.expr.type, true, false));
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
whileNode.body.stmts.add(1, resultAssignment);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(resultVariableDefinition);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.type = type;
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
@Override
public void visit(BLangLock lockNode) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.type = symTable.nilType;
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.type = nillableError;
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError, this.env.scope.owner);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr, nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
@Override
public void visit(BLangTransaction transactionNode) {
DiagnosticPos pos = transactionNode.pos;
BType trxReturnType = symTable.intType;
BType otherReturnType = symTable.nilType;
BLangType trxReturnNode = ASTBuilderUtil.createTypeNode(trxReturnType);
BLangType otherReturnNode = ASTBuilderUtil.createTypeNode(otherReturnType);
DiagnosticPos invPos = transactionNode.pos;
/* transaction block code will be desugar to function which returns int. Return value determines the status of
the transaction code.
ex.
0 = successful
1 = retry
-1 = abort
Since transaction block code doesn't return anything, we need to add return statement at end of the
block unless we have abort or retry statement.
*/
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src,
invPos.eLine, invPos.eLine, invPos.sCol, invPos.sCol);
BLangStatement statement = null;
if (!transactionNode.transactionBody.stmts.isEmpty()) {
statement = transactionNode.transactionBody.stmts.get(transactionNode.transactionBody.stmts.size() - 1);
}
if (statement == null || !(statement.getKind() == NodeKind.ABORT) && !(statement.getKind() == NodeKind.ABORT)) {
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(returnStmtPos, trxReturnType, 0L);
transactionNode.transactionBody.addStatement(returnStmt);
}
if (transactionNode.abortedBody == null) {
transactionNode.abortedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.committedBody == null) {
transactionNode.committedBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.onRetryBody == null) {
transactionNode.onRetryBody = ASTBuilderUtil.createBlockStmt(transactionNode.pos);
}
if (transactionNode.retryCount == null) {
transactionNode.retryCount = ASTBuilderUtil.createLiteral(pos, symTable.intType, 3L);
}
BLangLambdaFunction trxMainFunc = createLambdaFunction(pos, "$anonTrxMainFunc$", Collections.emptyList(),
trxReturnNode, transactionNode.transactionBody.stmts,
env, transactionNode.transactionBody.scope);
BLangLambdaFunction trxOnRetryFunc = createLambdaFunction(pos, "$anonTrxOnRetryFunc$", Collections.emptyList(),
otherReturnNode, transactionNode.onRetryBody.stmts,
env, transactionNode.onRetryBody.scope);
BLangLambdaFunction trxCommittedFunc = createLambdaFunction(pos, "$anonTrxCommittedFunc$",
Collections.emptyList(), otherReturnNode,
transactionNode.committedBody.stmts, env,
transactionNode.committedBody.scope);
BLangLambdaFunction trxAbortedFunc = createLambdaFunction(pos, "$anonTrxAbortedFunc$", Collections.emptyList(),
otherReturnNode, transactionNode.abortedBody.stmts,
env, transactionNode.abortedBody.scope);
trxMainFunc.capturedClosureEnv = env.createClone();
trxOnRetryFunc.capturedClosureEnv = env.createClone();
trxCommittedFunc.capturedClosureEnv = env.createClone();
trxAbortedFunc.capturedClosureEnv = env.createClone();
PackageID packageID = new PackageID(Names.BALLERINA_ORG, Names.TRANSACTION_PACKAGE, Names.EMPTY);
BPackageSymbol transactionPkgSymbol = new BPackageSymbol(packageID, null, 0);
BInvokableSymbol invokableSymbol =
(BInvokableSymbol) symResolver.lookupSymbolInMainSpace(symTable.pkgEnvMap.get(transactionPkgSymbol),
TRX_INITIATOR_BEGIN_FUNCTION);
BLangLiteral transactionBlockId = ASTBuilderUtil.createLiteral(pos, symTable.stringType,
getTransactionBlockId());
List<BLangExpression> requiredArgs = Lists.of(transactionBlockId, transactionNode.retryCount, trxMainFunc,
trxOnRetryFunc,
trxCommittedFunc, trxAbortedFunc);
BLangInvocation trxInvocation = ASTBuilderUtil.createInvocationExprMethod(pos, invokableSymbol,
requiredArgs,
Collections.emptyList(),
symResolver);
BLangExpressionStmt stmt = ASTBuilderUtil.createExpressionStmt(pos, ASTBuilderUtil.createBlockStmt(pos));
stmt.expr = trxInvocation;
result = rewrite(stmt, env);
}
private String getTransactionBlockId() {
return env.enclPkg.packageID.orgName + "$" + env.enclPkg.packageID.name + "$"
+ transactionIndex++;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.type = new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null);
return lambdaFunction;
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope trxScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = trxScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private BLangLambdaFunction createLambdaFunction(DiagnosticPos pos, String functionNamePrefix,
TypeNode returnType) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func = ASTBuilderUtil.createFunction(pos, functionNamePrefix + lambdaFunctionCount++);
lambdaFunction.function = func;
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
func.desugared = false;
lambdaFunction.pos = pos;
return lambdaFunction;
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.type.tag == TypeTags.ARRAY && ((BArrayType) literalExpr.type).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.type = literalExpr.type;
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replaceAll(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
if (listConstructor.type.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.type));
result = rewriteExpr(expr);
} else if (getElementType(listConstructor.type).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
} else if (listConstructor.type.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.type);
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
if (arrayLiteral.type.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.type));
return;
} else if (getElementType(arrayLiteral.type).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.type);
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
return;
}
tupleLiteral.exprs.forEach(expr -> {
BType expType = expr.impConversionExpr == null ? expr.type : expr.impConversionExpr.type;
types.setImplicitCastExpr(expr, expType, symTable.anyType);
});
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
if (groupExpr.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = groupExpr.typedescType;
typedescExpr.type = symTable.typeDesc;
result = rewriteExpr(typedescExpr);
} else {
result = rewriteExpr(groupExpr.expression);
}
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.type = symTable.stringType;
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.type));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
}
genVarRefExpr.type = varRefExpr.type;
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.lhsVar)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
genVarRefExpr.type = varRefExpr.symbol.type;
result = genVarRefExpr;
return;
}
genVarRefExpr.lhsVar = varRefExpr.lhsVar;
BType targetType = genVarRefExpr.type;
genVarRefExpr.type = genVarRefExpr.symbol.type;
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = fieldAccessExpr.expr.type;
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.type, varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.pos, fieldAccessExpr.field.value);
int varRefTypeTag = varRefType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false);
}
} else if (types.isLax(varRefType)) {
if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) {
if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
targetVarRef = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.lhsVar = fieldAccessExpr.lhsVar;
targetVarRef.type = fieldAccessExpr.type;
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.type, symTable.errorType);
DiagnosticPos pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.type = fieldAccessType;
statementExpression.type = fieldAccessType;
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.type, symTable.nilType);
mapAccessExpr.type = xmlOrNil;
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangInvocation errorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue("error");
errorInvocation.name = name;
errorInvocation.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
errorInvocation.symbol = symTable.errorConstructor;
errorInvocation.type = symTable.errorType;
ArrayList<BLangExpression> errorCtorArgs = new ArrayList<>();
errorInvocation.requiredArgs = errorCtorArgs;
errorCtorArgs.add(createStringLiteral(pos, "{" + BLangConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
errorCtorArgs.add(message);
BLangSimpleVariableDef errorDef =
createVarDef("_$_invalid_key_error", symTable.errorType, errorInvocation, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangAccessExpression rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangVariableReference targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType varRefType = indexAccessExpr.expr.type;
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.type, varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr, indexAccessExpr.indexExpr);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, (BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.lhsVar = indexAccessExpr.lhsVar;
targetVarRef.type = indexAccessExpr.type;
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
BLangInvocation genIExpr = iExpr;
if (iExpr.symbol != null && iExpr.symbol.kind == SymbolKind.ERROR_CONSTRUCTOR) {
result = rewriteErrorConstructor(iExpr);
}
reorderArguments(iExpr);
iExpr.requiredArgs = rewriteExprs(iExpr.requiredArgs);
fixNonRestArgTypeCastInTypeParamInvocation(iExpr);
iExpr.restArgs = rewriteExprs(iExpr.restArgs);
annotationDesugar.defineStatementAnnotations(iExpr.annAttachments, iExpr.pos, iExpr.symbol.pkgID,
iExpr.symbol.owner, env);
if (iExpr.functionPointerInvocation) {
visitFunctionPointerInvocation(iExpr);
return;
}
iExpr.expr = rewriteExpr(iExpr.expr);
result = genIExpr;
if (iExpr.expr == null) {
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
if (iExpr.exprSymbol == null) {
return;
}
iExpr.expr = ASTBuilderUtil.createVariableRef(iExpr.pos, iExpr.exprSymbol);
iExpr.expr = rewriteExpr(iExpr.expr);
}
switch (iExpr.expr.type.tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!iExpr.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(iExpr.requiredArgs);
argExprs.add(0, iExpr.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(iExpr.pos, argExprs, iExpr.restArgs, iExpr.symbol,
iExpr.type, iExpr.expr, iExpr.async);
attachedFunctionInvocation.actionInvocation = iExpr.actionInvocation;
attachedFunctionInvocation.name = iExpr.name;
attachedFunctionInvocation.annAttachments = iExpr.annAttachments;
result = genIExpr = attachedFunctionInvocation;
}
break;
}
fixTypeCastInTypeParamInvocation(iExpr, genIExpr);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 1; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
if (iExpr.langLibInvocation || TypeParamAnalyzer.containsTypeParam(((BInvokableSymbol) iExpr.symbol).retType)) {
BType originalInvType = genIExpr.type;
genIExpr.type = ((BInvokableSymbol) genIExpr.symbol).retType;
BLangExpression expr = addConversionExprIfRequired(genIExpr, originalInvType);
if (expr.getKind() == NodeKind.TYPE_CONVERSION_EXPR) {
this.result = expr;
return;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.expr = genIExpr;
conversionExpr.targetType = originalInvType;
conversionExpr.type = originalInvType;
conversionExpr.pos = genIExpr.pos;
this.result = conversionExpr;
}
}
private BLangInvocation rewriteErrorConstructor(BLangInvocation iExpr) {
BLangExpression reasonExpr = iExpr.requiredArgs.get(0);
if (reasonExpr.impConversionExpr != null &&
reasonExpr.impConversionExpr.targetType.tag != TypeTags.STRING) {
reasonExpr.impConversionExpr = null;
}
reasonExpr = addConversionExprIfRequired(reasonExpr, symTable.stringType);
reasonExpr = rewriteExpr(reasonExpr);
iExpr.requiredArgs.remove(0);
iExpr.requiredArgs.add(reasonExpr);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(iExpr.pos,
((BErrorType) iExpr.symbol.type).detailType);
List<BLangExpression> namedArgs = iExpr.requiredArgs.stream()
.filter(a -> a.getKind() == NodeKind.NAMED_ARGS_EXPR)
.collect(Collectors.toList());
if (namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.type);
} else {
for (BLangExpression arg : namedArgs) {
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) arg;
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (recordLiteral.type.tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.type);
}
recordLiteral.fields.add(member);
iExpr.requiredArgs.remove(arg);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), ((BErrorType) iExpr.symbol.type).detailType);
}
iExpr.requiredArgs.add(errorDetail);
return iExpr;
}
public void visit(BLangTypeInit typeInitExpr) {
if (typeInitExpr.type.tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.type);
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol;
typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
if (typeInitExpr.initInvocation.type.tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitExpr.initInvocation;
typeInitExpr.initInvocation.name.value = Names.GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.type = objVarRef.symbol.type;
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.type,
typeInitExpr.initInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.type, null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(typeInitExpr.pos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.type = symTable.booleanType;
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(typeInitExpr.pos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(typeInitExpr.pos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(typeInitExpr.pos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = resultVarRef.symbol.type;
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType targetType = ((BStreamType) typeInitExpr.type).constraint;
BType errorType = ((BStreamType) typeInitExpr.type).error;
BType typedescType = new BTypedescType(targetType, symTable.typeDesc.tsymbol);
BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = targetType;
typedescExpr.type = typedescType;
BLangExpression iteratorObj = typeInitExpr.argsExpr.get(0);
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, new ArrayList<>(Lists.of(typedescExpr, iteratorObj)),
symResolver);
streamConstructInvocation.type = new BStreamType(TypeTags.STREAM, targetType, errorType, null);
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr, DiagnosticPos pos) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(pos, "$" + name + "$", type, expr,
(BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(pos);
objVarDef.var = objVar;
objVarDef.type = objVar.type;
return objVarDef;
}
private BType getObjectType(BType type) {
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = symTable.errorType;
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$ternary_result$", ternaryExpr.type, null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = ternaryExpr.type;
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.type);
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (trapExpr.expr.type.tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.type);
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
if (lhsExprTypeTag == rhsExprTypeTag) {
return;
}
if (TypeTags.isStringTypeTag(lhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) {
if (TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (TypeTags.isStringTypeTag(rhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) {
if (TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.type);
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.type);
}
}
private BLangInvocation replaceWithIntRange(DiagnosticPos pos, BLangExpression lhsExpr, BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(pos, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.type = symTable.intRangeType;
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = binaryExpr.rhsExpr.type.tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.type.tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr);
matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos,
rewriteExpr(elvisExpr.rhsExpr)));
matchExpr.type = elvisExpr.type;
matchExpr.pos = elvisExpr.pos;
result = rewriteExpr(matchExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final DiagnosticPos pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == unaryExpr.type.tag) {
binaryExpr.type = symTable.byteType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.type = symTable.intType;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.type = bLangArrowFunction.body.expr.type;
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.type = bLangArrowFunction.funcType;
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value), env.enclPkg.symbol.pkgID, bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.type;
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.type = new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.type, null);
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
bLangArrowFunction.function = lambdaFunction.function;
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.type);
if (workerSendNode.keyExpr != null) {
workerSendNode.keyExpr = rewriteExpr(workerSendNode.keyExpr);
}
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.type);
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
if (workerReceiveNode.keyExpr != null) {
workerReceiveNode.keyExpr = rewriteExpr(workerReceiveNode.keyExpr);
}
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr);
xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr);
if (xmlAttributeAccessExpr.indexExpr != null
&& xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) {
((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true;
}
xmlAttributeAccessExpr.desugared = true;
if (xmlAttributeAccessExpr.lhsVar || xmlAttributeAccessExpr.indexExpr != null) {
result = xmlAttributeAccessExpr;
} else {
result = rewriteExpr(xmlAttributeAccessExpr);
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(DiagnosticPos pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.type = ((BInvokableType) invocationNode.symbol.type).getReturnType();
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
if (!intRangeExpression.includeStart) {
intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr);
}
if (!intRangeExpression.includeEnd) {
intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr);
}
intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr);
intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr);
result = intRangeExpression;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
addMatchExprDefaultCase(bLangMatchExpression);
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
matchTempResultVarName, bLangMatchExpression.type, null,
new BVarSymbol(0, names.fromString(matchTempResultVarName), this.env.scope.owner.pkgID,
bLangMatchExpression.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef =
ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar);
tempResultVarDef.desugared = true;
BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef));
List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>();
for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
pattern.expr = rewriteExpr(pattern.expr);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt));
patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody));
}
stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr,
patternClauses));
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef);
statementExpr.type = bLangMatchExpression.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
String checkedExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable checkedExprVar = ASTBuilderUtil.createVariable(checkedExpr.pos,
checkedExprVarName, checkedExpr.type, null, new BVarSymbol(0,
names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExpr.type, this.env.scope.owner));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar);
checkedExprVarDef.desugared = true;
BLangMatchTypedBindingPatternClause patternSuccessCase =
getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true,
checkedExprVar.symbol, null);
BLangMatchTypedBindingPatternClause patternErrorCase = getSafeAssignErrorPattern(checkedExpr.pos,
this.env.scope.owner, checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr,
new ArrayList<BLangMatchTypedBindingPatternClause>() {{
add(patternSuccessCase);
add(patternErrorCase);
}});
BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos,
new ArrayList<BLangStatement>() {{
add(checkedExprVarDef);
add(matchStmt);
}});
BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef(
checkedExpr.pos, checkedExprVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(
generatedStmtBlock, tempCheckedExprVarRef);
statementExpr.type = checkedExpr.type;
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceTypeDefinition.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.type)) {
addConversionExprIfRequired(expr, symTable.anyType);
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.type = annotAccessExpr.type;
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.type,
binaryExpr.rhsExpr.type),
annotAccessExpr.type, null), null);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
BLangStatementExpression stmtExpr = queryDesugar.desugarQueryExpr(queryExpr, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangQueryAction queryAction) {
BLangStatementExpression stmtExpr = queryDesugar.desugarQueryAction(queryAction, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.type, constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(DiagnosticPos pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.type = iteratorInvokableSymbol.retType;
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(DiagnosticPos pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangAssignment getIteratorNextAssignment(DiagnosticPos pos,
BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
nextInvocation.expr.type = types.getSafeType(nextInvocation.expr.type, true, false);
return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false);
}
BLangInvocation createIteratorNextInvocation(DiagnosticPos pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.type = nextFuncSymbol.retType;
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(DiagnosticPos pos, BType varType, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, "value");
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.type = varType;
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.type;
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.type = retType;
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
DiagnosticPos pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.type, names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.type = retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType;
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.type = new BArrayType(symTable.anyType);
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangVariableReference expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.type = iExpr.symbol.type;
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
if (expr.type.tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), expr.type, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.type)) {
return expr;
}
if (expr.type.tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(), expr.type,
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatementLink link = new BLangStatementLink();
link.parent = currentLink;
currentLink = link;
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
link.statement = stmt;
stmt.statementLink = link;
currentLink = link.parent;
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(DiagnosticPos pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.intType;
return literal;
}
private BLangLiteral createByteLiteral(DiagnosticPos pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.type = targetType;
conversionExpr.targetType = targetType;
return conversionExpr;
}
private BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.workers.size() == 0 && invokableNode.symbol.type.getReturnType().isNullable()
&& (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
DiagnosticPos invPos = invokableNode.pos;
DiagnosticPos returnStmtPos = new DiagnosticPos(invPos.src, invPos.eLine, invPos.eLine, invPos.sCol,
invPos.sCol);
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangExpression varargRef = null;
BLangBlockStmt blockStmt = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
DiagnosticPos varargExpPos = expr.pos;
BType varargVarType = expr.type;
String varargVarName = DESUGARED_VARARG_KEY + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.type = varargVarType;
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.type = arrayType;
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.type = firstNonRestArg.type;
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
return;
}
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation sliceInvocation =
createLangLibInvocationNode(SLICE_LANGLIB_METHOD, varargRef,
new ArrayList<BLangExpression>() {{
add(startIndex);
}},
varargRef.type, varargRef.pos);
restArgs.remove(0);
restArgs.add(addConversionExprIfRequired(sliceInvocation, invokableSymbol.restParam.type));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.type = restParamType;
BType elemType = restParamType.eType;
DiagnosticPos pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.type = restParamType;
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.type = restParamType;
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new HashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = varargRef.type;
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.get(param.name.value));
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.type = param.type;
args.add(expr);
} else {
BLangIndexBasedAccess memberAccessExpr =
(BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
memberAccessExpr.pos = varargRef.pos;
memberAccessExpr.expr = varargRef;
memberAccessExpr.indexExpr = rewriteExpr(createIntLiteral(varargIndex));
memberAccessExpr.type = tupleTypedVararg ? ((BTupleType) varargType).tupleTypes.get(varargIndex) :
((BArrayType) varargType).eType;
varargIndex++;
args.add(addConversionExprIfRequired(memberAccessExpr, param.type));
}
}
iExpr.requiredArgs = args;
}
private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(
DiagnosticPos pos, BSymbol invokableSymbol, List<BType> equivalentErrorTypes, boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType;
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<BType>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable patternFailureCaseVar = ASTBuilderUtil.createVariable(pos,
patternFailureCaseVarName, symTable.errorType, null, new BVarSymbol(0,
names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangVariableReference patternFailureCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternFailureCaseVar.symbol);
BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode();
patternBlockFailureCase.pos = pos;
if (!isCheckPanicExpr && returnOnError) {
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pos;
returnStmt.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(returnStmt);
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(panicNode);
}
return ASTBuilderUtil.createMatchStatementPattern(pos, patternFailureCaseVar, patternBlockFailureCase);
}
private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(DiagnosticPos pos, BType lhsType,
boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) {
String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match";
BLangSimpleVariable patternSuccessCaseVar = ASTBuilderUtil.createVariable(pos,
patternSuccessCaseVarName, lhsType, null, new BVarSymbol(0,
names.fromString(patternSuccessCaseVarName),
this.env.scope.owner.pkgID, lhsType, this.env.scope.owner));
BLangExpression varRefExpr;
if (isVarDef) {
varRefExpr = ASTBuilderUtil.createVariableRef(pos, varSymbol);
} else {
varRefExpr = lhsExpr;
}
BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(pos,
patternSuccessCaseVar.symbol);
BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(pos,
varRefExpr, patternSuccessCaseVarRef, false);
BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(pos,
new ArrayList<BLangStatement>() {{
add(assignmentStmtSuccessCase);
}});
return ASTBuilderUtil.createMatchStatementPattern(pos,
patternSuccessCaseVar, patternBlockSuccessCase);
}
private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) {
List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses;
BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar);
BLangIf currentIfNode = parentIfNode;
for (int i = 1; i < patterns.size(); i++) {
BLangMatchBindingPatternClause patternClause = patterns.get(i);
if (i == patterns.size() - 1 && patternClause.isLastPattern) {
currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar);
} else {
currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar);
currentIfNode = (BLangIf) currentIfNode.elseStmt;
}
}
return parentIfNode;
}
/**
* Generate an if-else statement from the given match statement.
*
* @param pattern match pattern statement node
* @param matchExprVar variable node of the match expression
* @return if else statement node
*/
private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) {
BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol);
if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) {
BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar);
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null);
}
BType expectedType = matchExprVar.type;
if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern;
expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType);
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol);
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
if (structuredPattern.typeGuardExpr != null) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos);
blockStmt.addStatement(varDef);
blockStmt.addStatement(varDefStmt);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt,
structuredPattern.typeGuardExpr);
stmtExpr.type = symTable.booleanType;
ifCondition = ASTBuilderUtil
.createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.AND, symTable.booleanType,
symTable.booleanType));
} else {
structuredPattern.body.stmts.add(0, varDef);
structuredPattern.body.stmts.add(1, varDefStmt);
}
}
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null);
}
private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body;
BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern;
if (patternClause.variable.name.value.equals(Names.IGNORE.value)) {
return patternClause.body;
}
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos,
matchExprVar.symbol);
BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef, patternClause.variable.type);
BLangSimpleVariable patternVar = ASTBuilderUtil.createVariable(patternClause.pos, "",
patternClause.variable.type, patternVarExpr, patternClause.variable.symbol);
BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar);
patternClause.body.stmts.add(0, patternVarDef);
body = patternClause.body;
return body;
}
private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body = pattern.body;
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol);
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
structuredPattern.body.stmts.add(0, varDefStmt);
body = structuredPattern.body;
}
return body;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.type;
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
return expr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.type = lhsType;
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
return conversionExpr;
}
private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol) {
BType patternType;
switch (patternClause.getKind()) {
case MATCH_STATIC_PATTERN_CLAUSE:
BLangMatchStaticBindingPatternClause staticPattern =
(BLangMatchStaticBindingPatternClause) patternClause;
patternType = staticPattern.literal.type;
break;
case MATCH_STRUCTURED_PATTERN_CLAUSE:
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) patternClause;
patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable);
break;
default:
BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause;
patternType = simplePattern.variable.type;
break;
}
BLangExpression binaryExpr;
BType[] memberTypes;
if (patternType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) patternType;
memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
} else {
memberTypes = new BType[1];
memberTypes[0] = patternType;
}
if (memberTypes.length == 1) {
binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
} else {
BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]);
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
for (int i = 2; i < memberTypes.length; i++) {
lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]);
rhsExpr = binaryExpr;
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.type, rhsExpr.type));
}
}
return binaryExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
List<BField> fields = new ArrayList<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName,
env.enclPkg.symbol.pkgID, fieldType, recordSymbol);
fields.add(new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BMapType) ((BLangSimpleVariable) recordVariable.restParam).type).constraint :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorCount++),
env.enclPkg.symbol.pkgID,
null, null);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol,
((BErrorType) errorVariable.type).reasonType,
detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.type;
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(
Flags.PUBLIC,
names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID,
symTable.pureType,
null);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.type,
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + errorNo + "$detailType"),
env.enclPkg.symbol.pkgID, null, null);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(
Flags.PUBLIC, fieldName, detailRecordTypeSymbol.pkgID, fieldType, detailRecordTypeSymbol);
detailRecordType.fields.add(new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordTypeSymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.type = errorType;
return errorTypeNode;
}
private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol, BType patternType) {
DiagnosticPos pos = patternClause.pos;
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) {
BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause;
return createBinaryExpression(pos, varRef, pattern.literal);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) {
return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType);
}
if (patternType == symTable.nilType) {
BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null);
return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType,
OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL,
symTable.anyType, symTable.nilType));
} else {
return createIsAssignableExpression(pos, varSymbol, patternType);
}
}
private BLangExpression createBinaryExpression(DiagnosticPos pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.type = symTable.anyType;
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.type, expression.type);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.type,
binaryExpr);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsAssignableExpr createIsAssignableExpression(DiagnosticPos pos,
BVarSymbol varSymbol,
BType patternType) {
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names);
}
private BLangIsLikeExpr createIsLikeExpression(DiagnosticPos pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol symbol) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(variable.pos, symbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, variable.name);
fieldAccess.symbol = variable.symbol;
fieldAccess.type = variable.type;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes;
List<BType> unmatchedTypes = new ArrayList<>();
if (bLangMatchExpression.expr.type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.type;
exprTypes = new ArrayList<>(unionType.getMemberTypes());
} else {
exprTypes = Lists.of(bLangMatchExpression.type);
}
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
if (this.types.isAssignable(type, pattern.variable.type)) {
assignable = true;
break;
}
}
if (!assignable) {
unmatchedTypes.add(type);
}
}
if (unmatchedTypes.isEmpty()) {
return;
}
BType defaultPatternType;
if (unmatchedTypes.size() == 1) {
defaultPatternType = unmatchedTypes.get(0);
} else {
defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes));
}
String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default";
BLangSimpleVariable patternMatchCaseVar = ASTBuilderUtil.createVariable(bLangMatchExpression.pos,
patternCaseVarName, defaultPatternType, null, new BVarSymbol(0, names.fromString(patternCaseVarName),
this.env.scope.owner.pkgID, defaultPatternType, this.env.scope.owner));
BLangMatchExprPatternClause defaultPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
defaultPattern.variable = patternMatchCaseVar;
defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol);
defaultPattern.pos = bLangMatchExpression.pos;
bLangMatchExpression.patternClauses.add(defaultPattern);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.lhsVar || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.type;
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar = ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName,
accessExpr.type, null, new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.type, this.env.scope.owner));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.type, tempResultVar);
BLangMatch matcEXpr = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.type = originalExprType;
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successPattern = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = accessExpr.originalType;
if (TypeTags.isXMLTypeTag(originalType.tag)) {
accessExpr.type = BUnionType.create(null, originalType, symTable.errorType);
} else {
accessExpr.type = originalType;
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.type);
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>());
if (accessExpr.nilSafeNavigation) {
matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar));
matchStmt.type = type;
}
if (accessExpr.errorSafeNavigation) {
matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar));
matchStmt.type = type;
matchStmt.pos = accessExpr.pos;
}
BLangMatchTypedBindingPatternClause successPattern =
getSuccessPattern(accessExpr, tempResultVar, accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
this.matchStmtStack.push(matchStmt);
if (this.successPattern != null) {
this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt));
}
this.successPattern = successPattern;
}
private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName,
symTable.errorType, null, new BVarSymbol(0, names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.errorType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, errorPatternVar, patternBody);
return errorPattern;
}
private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(DiagnosticPos pos,
BLangExpression expr) {
String nullPatternVarName = IGNORE.toString();
BLangSimpleVariable errorPatternVar = ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType,
null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangMatchExprPatternClause nullPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
nullPattern.variable = errorPatternVar;
nullPattern.expr = expr;
nullPattern.pos = pos;
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
BLangSimpleVariable nullPatternVar = ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName,
symTable.nilType, null, new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType, this.env.scope.owner));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, nullPatternVar, patternBody);
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getSuccessPattern(BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
BType type = types.getSafeType(accessExpr.expr.type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
BVarSymbol successPatternSymbol;
if (type.tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
} else {
successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
accessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol);
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
if (TypeTags.isXMLTypeTag(accessExpr.expr.type.tag)) {
accessExpr.type = BUnionType.create(null, accessExpr.originalType, symTable.errorType, symTable.nilType);
} else {
accessExpr.type = accessExpr.originalType;
}
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(accessExpr, tempResultVarRef.type);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause successPattern =
ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody);
this.safeNavigationAssignment = assignmentStmt;
return successPattern;
}
private boolean safeNavigateLHS(BLangExpression expr) {
if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
return false;
}
BLangExpression varRef = ((BLangAccessExpression) expr).expr;
if (varRef.type.isNullable()) {
return true;
}
return safeNavigateLHS(varRef);
}
private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr,
boolean safeAssignment) {
this.accessExprStack = new Stack<>();
List<BLangStatement> stmts = new ArrayList<>();
createLHSSafeNavigation(stmts, accessExpr.expr);
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos,
cloneExpression(accessExpr), rhsExpr);
stmts.add(assignment);
return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts);
}
private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) {
NodeKind kind = expr.getKind();
boolean root = false;
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
BLangAccessExpression accessExpr = (BLangAccessExpression) expr;
createLHSSafeNavigation(stmts, accessExpr.expr);
accessExpr.expr = accessExprStack.pop();
} else {
root = true;
}
if (expr.getKind() == NodeKind.INVOCATION) {
BLangInvocation invocation = (BLangInvocation) expr;
BVarSymbol interMediateSymbol = new BVarSymbol(0, names.fromString(GEN_VAR_PREFIX.value
+ "i_intermediate"), this.env.scope.owner.pkgID, invocation.type, this.env.scope.owner);
BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos,
interMediateSymbol.name.value, invocation.type, invocation, interMediateSymbol);
BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos,
intermediateVariable);
stmts.add(intermediateVariableDefinition);
expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol);
}
if (expr.type.isNullable()) {
BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode());
isNillTest.type = symTable.booleanType;
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos);
expr = cloneExpression(expr);
expr.type = types.getSafeType(expr.type, true, false);
if (isDefaultableMappingType(expr.type) && !root) {
BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
jsonLiteral.type = expr.type;
jsonLiteral.pos = expr.pos;
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos,
expr, jsonLiteral);
thenStmt.addStatement(assignment);
} else {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = ERROR_REASON_NULL_REFERENCE_ERROR;
literal.type = symTable.stringType;
BLangInvocation errorCtorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
errorCtorInvocation.pos = expr.pos;
errorCtorInvocation.argExprs.add(literal);
errorCtorInvocation.requiredArgs.add(literal);
errorCtorInvocation.type = symTable.errorType;
errorCtorInvocation.symbol = symTable.errorConstructor;
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.expr = errorCtorInvocation;
panicNode.pos = expr.pos;
thenStmt.addStatement(panicNode);
}
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null);
stmts.add(ifelse);
}
accessExprStack.push(expr);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.type = symTable.nilType;
return nillTypeNode;
}
private BLangVariableReference cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case INVOCATION:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangVariableReference varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.type = types.getSafeType(originalAccessExpr.expr.type, true, false);
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
case INVOCATION:
accessExpr = null;
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.lhsVar = originalAccessExpr.lhsVar;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.type = originalAccessExpr.originalType;
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.type = symTable.booleanType;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
default:
return false;
}
}
private BLangFunction createInitFunctionForObjectType(BLangObjectTypeNode structureTypeNode, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(structureTypeNode, env,
Names.GENERATED_INIT_SUFFIX, names, symTable);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) structureTypeNode.type.tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(Names.GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.type);
structureTypeNode.generatedInitFunction = initFunction;
initFunction.returnTypeNode.type = symTable.nilType;
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.type, null, binaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.type = binaryExpr.type;
result = rewriteExpr(stmtExpr);
}
/**
* Split packahe init function into several smaller functions.
*
* @param packageNode package node
* @param env symbol environment
* @return initial init function but trimmed in size
*/
private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) {
int methodSize = INIT_METHOD_SPLIT_SIZE;
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body;
if (funcBody.stmts.size() < methodSize || !isJvmTarget) {
return packageNode.initFunction;
}
BLangFunction initFunction = packageNode.initFunction;
List<BLangFunction> generatedFunctions = new ArrayList<>();
List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts);
funcBody.stmts.clear();
BLangFunction newFunc = initFunction;
BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body;
int varDefIndex = 0;
for (int i = 0; i < stmts.size(); i++) {
if (stmts.get(i).getKind() == NodeKind.VARIABLE_DEF) {
break;
}
varDefIndex++;
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
List<BLangStatement> chunkStmts = new ArrayList<>();
for (int i = varDefIndex; i < stmts.size(); i++) {
BLangStatement stmt = stmts.get(i);
chunkStmts.add(stmt);
varDefIndex++;
if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) &&
(newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) {
if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.addAll(chunkStmts);
chunkStmts.clear();
} else if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) &&
Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags,
Flags.LISTENER)
) {
break;
}
}
newFuncBody.stmts.addAll(chunkStmts);
for (int i = varDefIndex; i < stmts.size(); i++) {
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
generatedFunctions.add(newFunc);
for (int j = 0; j < generatedFunctions.size() - 1; j++) {
BLangFunction thisFunction = generatedFunctions.get(j);
BLangCheckedExpr checkedExpr =
ASTBuilderUtil.createCheckExpr(initFunction.pos,
createInvocationNode(generatedFunctions.get(j + 1).name.value,
new ArrayList<>(), symTable.errorOrNilType),
symTable.nilType);
checkedExpr.equivalentErrorTypeList.add(symTable.errorType);
BLangExpressionStmt expressionStmt = ASTBuilderUtil
.createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body);
expressionStmt.expr = checkedExpr;
expressionStmt.expr.pos = initFunction.pos;
if (j > 0) {
thisFunction = rewrite(thisFunction, env);
packageNode.functions.add(thisFunction);
packageNode.topLevelNodes.add(thisFunction);
}
}
if (generatedFunctions.size() > 1) {
BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1);
lastFunc = rewrite(lastFunc, env);
packageNode.functions.add(lastFunc);
packageNode.topLevelNodes.add(lastFunc);
}
return generatedFunctions.get(0);
}
/**
* Create an intermediate package init function.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
BLangFunction initFunction = ASTBuilderUtil
.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
new Name(Names.INIT_FUNCTION_SUFFIX.value
+ this.initFuncIndex++), symTable);
createInvokableSymbol(initFunction, env);
return initFunction;
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.type;
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangStatementExpression rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.type;
DiagnosticPos pos = mappingConstructorExpr.pos;
BLangRecordLiteral recordLiteral = type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type) :
new BLangMapLiteral(pos, type);
String name = DESUGARED_MAPPING_CONSTR_KEY + this.annonVarCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner);
BLangSimpleVariable var = createVariable(pos, name, type, recordLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.type = type;
BLangBlockStmt blockStmt = createBlockStmt(pos);
blockStmt.stmts.add(varDef);
BLangSimpleVarRef mappingVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression keyExpr = key.expr;
BLangExpression indexExpr = key.computedKey ? keyExpr :
keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ?
createStringLiteral(pos, ((BLangSimpleVarRef) keyExpr).variableName.value) :
((BLangLiteral) keyExpr);;
addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef, indexExpr, keyValueField.valueExpr);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
addMemberStoreForKeyValuePair(pos, blockStmt, mappingVarRef,
createStringLiteral(pos, varRefField.variableName.value),
varRefField);
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = generateMapEntriesInvocation(spreadOpField.expr, spreadOpField.expr.type);
types.setForeachTypedBindingPatternType(foreach);
BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i", foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.type,
this.env.scope.owner);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBodyBlock = ASTBuilderUtil.createBlockStmt(pos);
BTupleType foreachVarRefType = (BTupleType) foreachVarRef.type;
BLangIndexBasedAccess indexExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexExpr.pos = pos;
indexExpr.expr = foreachVarRef;
indexExpr.indexExpr = rewriteExpr(createIntLiteral(0));
indexExpr.type = foreachVarRefType.tupleTypes.get(0);
BLangIndexBasedAccess valueExpr = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
valueExpr.pos = pos;
valueExpr.expr = foreachVarRef;
valueExpr.indexExpr = rewriteExpr(createIntLiteral(1));
valueExpr.type = foreachVarRefType.tupleTypes.get(1);
addMemberStoreForKeyValuePair(pos, foreachBodyBlock, mappingVarRef, indexExpr, valueExpr);
foreach.body = foreachBodyBlock;
blockStmt.addStatement(foreach);
}
}
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, mappingVarRef);
stmtExpression.type = type;
return stmtExpression;
}
private void addMemberStoreForKeyValuePair(DiagnosticPos pos, BLangBlockStmt blockStmt,
BLangExpression mappingVarRef, BLangExpression indexExpr,
BLangExpression value) {
BLangAssignment assignmentStmt = ASTBuilderUtil.createAssignmentStmt(pos, blockStmt);
assignmentStmt.expr = rewriteExpr(value);
BLangIndexBasedAccess indexAccessNode = (BLangIndexBasedAccess) TreeBuilder.createIndexBasedAccessNode();
indexAccessNode.pos = pos;
indexAccessNode.expr = mappingVarRef;
indexAccessNode.indexExpr = rewriteExpr(indexExpr);
indexAccessNode.type = value.type;
assignmentStmt.varRef = indexAccessNode;
}
private Map<String, BLangExpression> getKeyValuePairs(BLangStatementExpression desugaredMappingConst) {
List<BLangStatement> stmts = ((BLangBlockStmt) desugaredMappingConst.stmt).stmts;
Map<String, BLangExpression> keyValuePairs = new HashMap<>();
for (int i = 1; i < stmts.size(); i++) {
BLangAssignment assignmentStmt = (BLangAssignment) stmts.get(i);
BLangExpression indexExpr = ((BLangIndexBasedAccess) assignmentStmt.varRef).indexExpr;
if (indexExpr.getKind() != NodeKind.LITERAL) {
continue;
}
keyValuePairs.put((String) ((BLangLiteral) indexExpr).value, assignmentStmt.expr);
}
return keyValuePairs;
}
} | class Desugar extends BLangNodeVisitor {
private static final CompilerContext.Key<Desugar> DESUGAR_KEY =
new CompilerContext.Key<>();
private static final String QUERY_TABLE_WITH_JOIN_CLAUSE = "queryTableWithJoinClause";
private static final String QUERY_TABLE_WITHOUT_JOIN_CLAUSE = "queryTableWithoutJoinClause";
private static final String BASE_64 = "base64";
private static final String ERROR_REASON_FUNCTION_NAME = "reason";
private static final String ERROR_DETAIL_FUNCTION_NAME = "detail";
private static final String TO_STRING_FUNCTION_NAME = "toString";
private static final String LENGTH_FUNCTION_NAME = "length";
private static final String ERROR_REASON_NULL_REFERENCE_ERROR = "NullReferenceException";
private static final String CONSTRUCT_FROM = "constructFrom";
private static final String SLICE_LANGLIB_METHOD = "slice";
private static final String PUSH_LANGLIB_METHOD = "push";
private static final String DESUGARED_VARARG_KEY = "$vararg$";
public static final String XML_INTERNAL_SELECT_DESCENDANTS = "selectDescendants";
public static final String XML_INTERNAL_CHILDREN = "children";
public static final String XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT = "getFilteredChildrenFlat";
public static final String XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING = "getElementNameNilLifting";
public static final String XML_INTERNAL_GET_ATTRIBUTE = "getAttribute";
public static final String XML_INTERNAL_GET_ELEMENTS = "getElements";
private SymbolTable symTable;
private SymbolResolver symResolver;
private final SymbolEnter symbolEnter;
private ClosureDesugar closureDesugar;
private QueryDesugar queryDesugar;
private AnnotationDesugar annotationDesugar;
private Types types;
private Names names;
private ServiceDesugar serviceDesugar;
private BLangNode result;
private NodeCloner nodeCloner;
private SemanticAnalyzer semanticAnalyzer;
private BLangStatementLink currentLink;
public Stack<BLangLockStmt> enclLocks = new Stack<>();
private SymbolEnv env;
private int lambdaFunctionCount = 0;
private int transactionIndex = 0;
private int recordCount = 0;
private int errorCount = 0;
private int annonVarCount = 0;
private int initFuncIndex = 0;
private int indexExprCount = 0;
private int letCount = 0;
private int varargCount = 0;
private Stack<BLangMatch> matchStmtStack = new Stack<>();
Stack<BLangExpression> accessExprStack = new Stack<>();
private BLangMatchTypedBindingPatternClause successPattern;
private BLangAssignment safeNavigationAssignment;
static boolean isJvmTarget = false;
public static Desugar getInstance(CompilerContext context) {
Desugar desugar = context.get(DESUGAR_KEY);
if (desugar == null) {
desugar = new Desugar(context);
}
return desugar;
}
private Desugar(CompilerContext context) {
isJvmTarget = true;
context.put(DESUGAR_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.closureDesugar = ClosureDesugar.getInstance(context);
this.queryDesugar = QueryDesugar.getInstance(context);
this.annotationDesugar = AnnotationDesugar.getInstance(context);
this.types = Types.getInstance(context);
this.names = Names.getInstance(context);
this.names = Names.getInstance(context);
this.serviceDesugar = ServiceDesugar.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
}
public BLangPackage perform(BLangPackage pkgNode) {
annotationDesugar.initializeAnnotationMap(pkgNode);
SymbolEnv env = this.symTable.pkgEnvMap.get(pkgNode.symbol);
return rewrite(pkgNode, env);
}
private void addAttachedFunctionsToPackageLevel(BLangPackage pkgNode, SymbolEnv env) {
for (BLangTypeDefinition typeDef : pkgNode.typeDefinitions) {
if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
continue;
}
if (typeDef.symbol.tag == SymTag.OBJECT) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDef.typeNode;
objectTypeNode.functions.forEach(f -> {
if (!pkgNode.objAttachedFunctions.contains(f.symbol)) {
pkgNode.functions.add(f);
pkgNode.topLevelNodes.add(f);
}
});
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
continue;
}
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(objectTypeNode, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env);
objectTypeNode.generatedInitFunction = tempGeneratedInitFunction;
pkgNode.functions.add(objectTypeNode.generatedInitFunction);
pkgNode.topLevelNodes.add(objectTypeNode.generatedInitFunction);
if (objectTypeNode.initFunction != null) {
pkgNode.functions.add(objectTypeNode.initFunction);
pkgNode.topLevelNodes.add(objectTypeNode.initFunction);
}
} else if (typeDef.symbol.tag == SymTag.RECORD) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode;
recordTypeNode.initFunction = rewrite(
TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
pkgNode.functions.add(recordTypeNode.initFunction);
pkgNode.topLevelNodes.add(recordTypeNode.initFunction);
}
}
}
private BLangFunction createGeneratedInitializerFunction(BLangObjectTypeNode objectTypeNode, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForObjectType(objectTypeNode, env);
if (objectTypeNode.initFunction == null) {
return generatedInitFunc;
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc;
BAttachedFunction generatedInitializerFunc =
((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc;
addRequiredParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc,
generatedInitializerFunc);
addRestParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc);
generatedInitFunc.returnTypeNode = objectTypeNode.initFunction.returnTypeNode;
generatedInitializerFunc.symbol.retType = generatedInitFunc.returnTypeNode.type;
((BInvokableType) generatedInitFunc.symbol.type).paramTypes = initializerFunc.type.paramTypes;
((BInvokableType) generatedInitFunc.symbol.type).retType = initializerFunc.type.retType;
((BInvokableType) generatedInitFunc.symbol.type).restType = initializerFunc.type.restType;
generatedInitializerFunc.type = initializerFunc.type;
generatedInitFunc.desugared = false;
return generatedInitFunc;
}
private void addRequiredParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc,
BAttachedFunction generatedInitializerFunc) {
if (initFunction.requiredParams.isEmpty()) {
return;
}
for (BLangSimpleVariable requiredParameter : initFunction.requiredParams) {
BLangSimpleVariable var =
ASTBuilderUtil.createVariable(initFunction.pos,
requiredParameter.name.getValue(), requiredParameter.type,
createRequiredParamExpr(requiredParameter.expr),
new BVarSymbol(0, names.fromString(requiredParameter.name.getValue()),
requiredParameter.symbol.pkgID,
requiredParameter.type, requiredParameter.symbol.owner));
generatedInitFunc.requiredParams.add(var);
generatedInitializerFunc.symbol.params.add(var.symbol);
}
}
private BLangExpression createRequiredParamExpr(BLangExpression expr) {
if (expr == null) {
return null;
}
if (expr.getKind() == NodeKind.LAMBDA) {
BLangFunction func = ((BLangLambdaFunction) expr).function;
return createLambdaFunction(func.pos, func.name.value, func.requiredParams, func.returnTypeNode, func.body);
}
BLangExpression expression = this.nodeCloner.clone(expr);
if (expression.getKind() == NodeKind.ARROW_EXPR) {
BLangIdentifier func = (BLangIdentifier) ((BLangArrowFunction) expression).functionName;
((BLangArrowFunction) expression).functionName = ASTBuilderUtil.createIdentifier(func.pos,
"$" + func.getValue() + "$");
}
return expression;
}
private void addRestParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc,
BAttachedFunction generatedInitializerFunc) {
if (initFunction.restParam == null) {
return;
}
BLangSimpleVariable restParam = initFunction.restParam;
generatedInitFunc.restParam =
ASTBuilderUtil.createVariable(initFunction.pos,
restParam.name.getValue(), restParam.type, null, new BVarSymbol(0,
names.fromString(restParam.name.getValue()), restParam.symbol.pkgID,
restParam.type, restParam.symbol.owner));
generatedInitializerFunc.symbol.restParam = generatedInitFunc.restParam.symbol;
}
/**
* Create package init functions.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private void createPackageInitFunctions(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
pkgNode.initFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.INIT_FUNCTION_SUFFIX,
symTable);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
for (BLangXMLNS xmlns : pkgNode.xmlnsList) {
initFnBody.addStatement(createNamespaceDeclrStatement(xmlns));
}
pkgNode.startFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.START_FUNCTION_SUFFIX,
symTable);
pkgNode.stopFunction = ASTBuilderUtil.createInitFunctionWithNilReturn(pkgNode.pos, alias,
Names.STOP_FUNCTION_SUFFIX);
createInvokableSymbol(pkgNode.initFunction, env);
createInvokableSymbol(pkgNode.startFunction, env);
createInvokableSymbol(pkgNode.stopFunction, env);
}
private void addUserDefinedModuleInitInvocationAndReturn(BLangPackage pkgNode) {
Optional<BLangFunction> userDefInitOptional = pkgNode.functions.stream()
.filter(bLangFunction -> !bLangFunction.attachedFunction &&
bLangFunction.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value))
.findFirst();
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
if (!userDefInitOptional.isPresent()) {
addNilReturnStatement(initFnBody);
return;
}
BLangFunction userDefInit = userDefInitOptional.get();
BLangInvocation userDefInitInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
userDefInitInvocation.pos = pkgNode.initFunction.pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(userDefInit.name.value);
userDefInitInvocation.name = name;
userDefInitInvocation.symbol = userDefInit.symbol;
BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
pkgAlias.setLiteral(false);
pkgAlias.setValue(pkgNode.packageID.name.value);
userDefInitInvocation.pkgAlias = pkgAlias;
userDefInitInvocation.type = userDefInit.returnTypeNode.type;
userDefInitInvocation.requiredArgs = Collections.emptyList();
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pkgNode.initFunction.pos;
returnStmt.expr = userDefInitInvocation;
initFnBody.stmts.add(returnStmt);
}
/**
* Create invokable symbol for function.
*
* @param bLangFunction function node
* @param env Symbol environment
*/
private void createInvokableSymbol(BLangFunction bLangFunction, SymbolEnv env) {
BType returnType = bLangFunction.returnTypeNode.type == null ?
symResolver.resolveTypeNode(bLangFunction.returnTypeNode, env) : bLangFunction.returnTypeNode.type;
BInvokableType invokableType = new BInvokableType(new ArrayList<>(), getRestType(bLangFunction),
returnType, null);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(bLangFunction.flagSet),
new Name(bLangFunction.name.value), env.enclPkg.packageID, invokableType, env.enclPkg.symbol, true);
functionSymbol.retType = returnType;
for (BLangVariable param : bLangFunction.requiredParams) {
functionSymbol.params.add(param.symbol);
}
functionSymbol.scope = new Scope(functionSymbol);
bLangFunction.symbol = functionSymbol;
}
/**
* Add nil return statement.
*
* @param bLangBlockStmt block statement node
*/
private void addNilReturnStatement(BlockNode bLangBlockStmt) {
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(((BLangNode) bLangBlockStmt).pos, symTable.nilType);
bLangBlockStmt.addStatement(returnStmt);
}
/**
* Create namespace declaration statement for XMNLNS.
*
* @param xmlns XMLNS node
* @return XMLNS statement
*/
private BLangXMLNSStatement createNamespaceDeclrStatement(BLangXMLNS xmlns) {
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = xmlns.pos;
return xmlnsStmt;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) {
result = pkgNode;
return;
}
createPackageInitFunctions(pkgNode, env);
addAttachedFunctionsToPackageLevel(pkgNode, env);
pkgNode.constants.stream()
.filter(constant -> constant.expr.getKind() == NodeKind.LITERAL ||
constant.expr.getKind() == NodeKind.NUMERIC_LITERAL)
.forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition));
BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
for (BLangConstant constant : pkgNode.constants) {
if (constant.symbol.type.tag == TypeTags.MAP) {
BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol);
constant.expr = rewrite(constant.expr, SymbolEnv.createTypeEnv(constant.typeNode,
pkgNode.initFunction.symbol.scope, env));
BLangInvocation frozenConstValExpr =
createLangLibInvocationNode(
"cloneReadOnly", constant.expr, new ArrayList<>(), constant.expr.type, constant.pos);
BLangAssignment constInit =
ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr);
initFnBody.stmts.add(constInit);
}
}
pkgNode.globalVars.forEach(globalVar -> {
BLangAssignment assignment = createAssignmentStmt(globalVar);
if (assignment.expr != null) {
initFnBody.stmts.add(assignment);
}
});
pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env));
annotationDesugar.rewritePackageAnnotations(pkgNode, env);
addUserDefinedModuleInitInvocationAndReturn(pkgNode);
pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence));
pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env);
pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env);
pkgNode.constants = rewrite(pkgNode.constants, env);
pkgNode.globalVars = rewrite(pkgNode.globalVars, env);
pkgNode.functions = rewrite(pkgNode.functions, env);
serviceDesugar.rewriteListeners(pkgNode.globalVars, env, pkgNode.startFunction, pkgNode.stopFunction);
ASTBuilderUtil.appendStatements(serviceAttachments, (BLangBlockFunctionBody) pkgNode.initFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.startFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.stopFunction.body);
pkgNode.initFunction = splitInitFunction(pkgNode, env);
pkgNode.initFunction = rewrite(pkgNode.initFunction, env);
pkgNode.startFunction = rewrite(pkgNode.startFunction, env);
pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env);
closureDesugar.visit(pkgNode);
for (BLangTestablePackage testablePkg : pkgNode.getTestablePkgs()) {
rewrite(testablePkg, this.symTable.pkgEnvMap.get(testablePkg.symbol));
}
pkgNode.completedPhases.add(CompilerPhase.DESUGAR);
initFuncIndex = 0;
result = pkgNode;
}
@Override
public void visit(BLangImportPackage importPkgNode) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
rewrite(pkgEnv.node, pkgEnv);
result = importPkgNode;
}
@Override
public void visit(BLangTypeDefinition typeDef) {
if (typeDef.typeNode.getKind() == NodeKind.OBJECT_TYPE
|| typeDef.typeNode.getKind() == NodeKind.RECORD_TYPE) {
typeDef.typeNode = rewrite(typeDef.typeNode, env);
}
typeDef.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = typeDef;
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
objectTypeNode.fields.addAll(objectTypeNode.referencedFields);
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
result = objectTypeNode;
return;
}
for (BLangSimpleVariable bLangSimpleVariable : objectTypeNode.fields) {
bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env);
}
Map<BSymbol, BLangStatement> initFuncStmts = objectTypeNode.generatedInitFunction.initFunctionStmts;
for (BLangSimpleVariable field : objectTypeNode.fields) {
if (!initFuncStmts.containsKey(field.symbol) && field.expr != null) {
initFuncStmts.put(field.symbol,
createStructFieldUpdate(objectTypeNode.generatedInitFunction, field,
objectTypeNode.generatedInitFunction.receiver.symbol));
}
}
BLangStatement[] initStmts = initFuncStmts.values().toArray(new BLangStatement[0]);
BLangBlockFunctionBody generatedInitFnBody =
(BLangBlockFunctionBody) objectTypeNode.generatedInitFunction.body;
int i;
for (i = 0; i < initStmts.length; i++) {
generatedInitFnBody.stmts.add(i, initStmts[i]);
}
if (objectTypeNode.initFunction != null) {
((BLangReturn) generatedInitFnBody.stmts.get(i)).expr =
createUserDefinedInitInvocation(objectTypeNode);
}
for (BLangFunction fn : objectTypeNode.functions) {
rewrite(fn, this.env);
}
rewrite(objectTypeNode.generatedInitFunction, this.env);
rewrite(objectTypeNode.initFunction, this.env);
result = objectTypeNode;
}
private BLangInvocation createUserDefinedInitInvocation(BLangObjectTypeNode objectTypeNode) {
ArrayList<BLangExpression> paramRefs = new ArrayList<>();
for (BLangSimpleVariable var : objectTypeNode.generatedInitFunction.requiredParams) {
paramRefs.add(ASTBuilderUtil.createVariableRef(objectTypeNode.pos, var.symbol));
}
BLangInvocation invocation = ASTBuilderUtil.createInvocationExprMethod(objectTypeNode.pos,
((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc.symbol,
paramRefs, Collections.emptyList(), symResolver);
if (objectTypeNode.generatedInitFunction.restParam != null) {
BLangSimpleVarRef restVarRef = ASTBuilderUtil.createVariableRef(objectTypeNode.pos,
objectTypeNode.generatedInitFunction.restParam.symbol);
BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression();
bLangRestArgsExpression.expr = restVarRef;
bLangRestArgsExpression.pos = objectTypeNode.generatedInitFunction.pos;
bLangRestArgsExpression.type = objectTypeNode.generatedInitFunction.restParam.type;
bLangRestArgsExpression.expectedType = bLangRestArgsExpression.type;
invocation.restArgs.add(bLangRestArgsExpression);
}
invocation.exprSymbol =
((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc.symbol.receiverSymbol;
return rewriteExpr(invocation);
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
recordTypeNode.fields.addAll(recordTypeNode.referencedFields);
for (BLangSimpleVariable bLangSimpleVariable : recordTypeNode.fields) {
bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env);
}
if (recordTypeNode.initFunction == null) {
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
env.enclPkg.addFunction(recordTypeNode.initFunction);
env.enclPkg.topLevelNodes.add(recordTypeNode.initFunction);
}
for (BLangSimpleVariable field : recordTypeNode.fields) {
if (!recordTypeNode.initFunction.initFunctionStmts.containsKey(field.symbol) &&
!Symbols.isOptional(field.symbol) && field.expr != null) {
recordTypeNode.initFunction.initFunctionStmts
.put(field.symbol, createStructFieldUpdate(recordTypeNode.initFunction, field,
recordTypeNode.initFunction.receiver.symbol));
}
}
BLangStatement[] initStmts = recordTypeNode.initFunction.initFunctionStmts
.values().toArray(new BLangStatement[0]);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) recordTypeNode.initFunction.body;
for (int i = 0; i < recordTypeNode.initFunction.initFunctionStmts.size(); i++) {
initFnBody.stmts.add(i, initStmts[i]);
}
if (recordTypeNode.isAnonymous && recordTypeNode.isLocal) {
BLangUserDefinedType userDefinedType = desugarLocalAnonRecordTypeNode(recordTypeNode);
TypeDefBuilderHelper.addTypeDefinition(recordTypeNode.type, recordTypeNode.type.tsymbol, recordTypeNode,
env);
recordTypeNode.desugared = true;
result = userDefinedType;
return;
}
result = recordTypeNode;
}
private BLangUserDefinedType desugarLocalAnonRecordTypeNode(BLangRecordTypeNode recordTypeNode) {
return ASTBuilderUtil.createUserDefineTypeNode(recordTypeNode.symbol.name.value, recordTypeNode.type,
recordTypeNode.pos);
}
@Override
public void visit(BLangArrayType arrayType) {
arrayType.elemtype = rewrite(arrayType.elemtype, env);
result = arrayType;
}
@Override
public void visit(BLangConstrainedType constrainedType) {
constrainedType.constraint = rewrite(constrainedType.constraint, env);
result = constrainedType;
}
@Override
public void visit(BLangStreamType streamType) {
streamType.constraint = rewrite(streamType.constraint, env);
streamType.error = rewrite(streamType.error, env);
result = streamType;
}
@Override
public void visit(BLangValueType valueType) {
result = valueType;
}
@Override
public void visit(BLangUserDefinedType userDefinedType) {
result = userDefinedType;
}
@Override
public void visit(BLangUnionTypeNode unionTypeNode) {
List<BLangType> rewrittenMembers = new ArrayList<>();
unionTypeNode.memberTypeNodes.forEach(typeNode -> rewrittenMembers.add(rewrite(typeNode, env)));
unionTypeNode.memberTypeNodes = rewrittenMembers;
result = unionTypeNode;
}
@Override
public void visit(BLangErrorType errorType) {
errorType.detailType = rewrite(errorType.detailType, env);
result = errorType;
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
functionTypeNode.params.forEach(param -> rewrite(param.typeNode, env));
functionTypeNode.returnTypeNode = rewrite(functionTypeNode.returnTypeNode, env);
result = functionTypeNode;
}
@Override
public void visit(BLangBuiltInRefTypeNode refTypeNode) {
result = refTypeNode;
}
@Override
public void visit(BLangTupleTypeNode tupleTypeNode) {
List<BLangType> rewrittenMembers = new ArrayList<>();
tupleTypeNode.memberTypeNodes.forEach(member -> rewrittenMembers.add(rewrite(member, env)));
tupleTypeNode.memberTypeNodes = rewrittenMembers;
tupleTypeNode.restParamType = rewrite(tupleTypeNode.restParamType, env);
result = tupleTypeNode;
}
@Override
public void visit(BLangBlockFunctionBody body) {
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(body.stmts, bodyEnv);
result = body;
}
@Override
public void visit(BLangExprFunctionBody exprBody) {
BLangBlockFunctionBody body = ASTBuilderUtil.createBlockFunctionBody(exprBody.pos, new ArrayList<>());
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(exprBody.pos, body);
returnStmt.expr = rewriteExpr(exprBody.expr);
result = body;
}
@Override
public void visit(BLangExternalFunctionBody body) {
for (BLangAnnotationAttachment attachment : body.annAttachments) {
rewrite(attachment, env);
}
result = body;
}
@Override | class Desugar extends BLangNodeVisitor {
private static final CompilerContext.Key<Desugar> DESUGAR_KEY =
new CompilerContext.Key<>();
private static final String QUERY_TABLE_WITH_JOIN_CLAUSE = "queryTableWithJoinClause";
private static final String QUERY_TABLE_WITHOUT_JOIN_CLAUSE = "queryTableWithoutJoinClause";
private static final String BASE_64 = "base64";
private static final String ERROR_REASON_FUNCTION_NAME = "reason";
private static final String ERROR_DETAIL_FUNCTION_NAME = "detail";
private static final String TO_STRING_FUNCTION_NAME = "toString";
private static final String LENGTH_FUNCTION_NAME = "length";
private static final String ERROR_REASON_NULL_REFERENCE_ERROR = "NullReferenceException";
private static final String CONSTRUCT_FROM = "constructFrom";
private static final String SLICE_LANGLIB_METHOD = "slice";
private static final String PUSH_LANGLIB_METHOD = "push";
private static final String DESUGARED_VARARG_KEY = "$vararg$";
public static final String XML_INTERNAL_SELECT_DESCENDANTS = "selectDescendants";
public static final String XML_INTERNAL_CHILDREN = "children";
public static final String XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT = "getFilteredChildrenFlat";
public static final String XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING = "getElementNameNilLifting";
public static final String XML_INTERNAL_GET_ATTRIBUTE = "getAttribute";
public static final String XML_INTERNAL_GET_ELEMENTS = "getElements";
private SymbolTable symTable;
private SymbolResolver symResolver;
private final SymbolEnter symbolEnter;
private ClosureDesugar closureDesugar;
private QueryDesugar queryDesugar;
private AnnotationDesugar annotationDesugar;
private Types types;
private Names names;
private ServiceDesugar serviceDesugar;
private BLangNode result;
private NodeCloner nodeCloner;
private SemanticAnalyzer semanticAnalyzer;
private BLangStatementLink currentLink;
public Stack<BLangLockStmt> enclLocks = new Stack<>();
private SymbolEnv env;
private int lambdaFunctionCount = 0;
private int transactionIndex = 0;
private int recordCount = 0;
private int errorCount = 0;
private int annonVarCount = 0;
private int initFuncIndex = 0;
private int indexExprCount = 0;
private int letCount = 0;
private int varargCount = 0;
private Stack<BLangMatch> matchStmtStack = new Stack<>();
Stack<BLangExpression> accessExprStack = new Stack<>();
private BLangMatchTypedBindingPatternClause successPattern;
private BLangAssignment safeNavigationAssignment;
static boolean isJvmTarget = false;
public static Desugar getInstance(CompilerContext context) {
Desugar desugar = context.get(DESUGAR_KEY);
if (desugar == null) {
desugar = new Desugar(context);
}
return desugar;
}
private Desugar(CompilerContext context) {
isJvmTarget = true;
context.put(DESUGAR_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.closureDesugar = ClosureDesugar.getInstance(context);
this.queryDesugar = QueryDesugar.getInstance(context);
this.annotationDesugar = AnnotationDesugar.getInstance(context);
this.types = Types.getInstance(context);
this.names = Names.getInstance(context);
this.names = Names.getInstance(context);
this.serviceDesugar = ServiceDesugar.getInstance(context);
this.nodeCloner = NodeCloner.getInstance(context);
this.semanticAnalyzer = SemanticAnalyzer.getInstance(context);
}
public BLangPackage perform(BLangPackage pkgNode) {
annotationDesugar.initializeAnnotationMap(pkgNode);
SymbolEnv env = this.symTable.pkgEnvMap.get(pkgNode.symbol);
return rewrite(pkgNode, env);
}
private void addAttachedFunctionsToPackageLevel(BLangPackage pkgNode, SymbolEnv env) {
for (BLangTypeDefinition typeDef : pkgNode.typeDefinitions) {
if (typeDef.typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
continue;
}
if (typeDef.symbol.tag == SymTag.OBJECT) {
BLangObjectTypeNode objectTypeNode = (BLangObjectTypeNode) typeDef.typeNode;
objectTypeNode.functions.forEach(f -> {
if (!pkgNode.objAttachedFunctions.contains(f.symbol)) {
pkgNode.functions.add(f);
pkgNode.topLevelNodes.add(f);
}
});
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
continue;
}
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(objectTypeNode, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env);
objectTypeNode.generatedInitFunction = tempGeneratedInitFunction;
pkgNode.functions.add(objectTypeNode.generatedInitFunction);
pkgNode.topLevelNodes.add(objectTypeNode.generatedInitFunction);
if (objectTypeNode.initFunction != null) {
pkgNode.functions.add(objectTypeNode.initFunction);
pkgNode.topLevelNodes.add(objectTypeNode.initFunction);
}
} else if (typeDef.symbol.tag == SymTag.RECORD) {
BLangRecordTypeNode recordTypeNode = (BLangRecordTypeNode) typeDef.typeNode;
recordTypeNode.initFunction = rewrite(
TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
pkgNode.functions.add(recordTypeNode.initFunction);
pkgNode.topLevelNodes.add(recordTypeNode.initFunction);
}
}
}
private BLangFunction createGeneratedInitializerFunction(BLangObjectTypeNode objectTypeNode, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForObjectType(objectTypeNode, env);
if (objectTypeNode.initFunction == null) {
return generatedInitFunc;
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc;
BAttachedFunction generatedInitializerFunc =
((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc;
addRequiredParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc,
generatedInitializerFunc);
addRestParamsToGeneratedInitFunction(objectTypeNode.initFunction, generatedInitFunc, generatedInitializerFunc);
generatedInitFunc.returnTypeNode = objectTypeNode.initFunction.returnTypeNode;
generatedInitializerFunc.symbol.retType = generatedInitFunc.returnTypeNode.type;
((BInvokableType) generatedInitFunc.symbol.type).paramTypes = initializerFunc.type.paramTypes;
((BInvokableType) generatedInitFunc.symbol.type).retType = initializerFunc.type.retType;
((BInvokableType) generatedInitFunc.symbol.type).restType = initializerFunc.type.restType;
generatedInitializerFunc.type = initializerFunc.type;
generatedInitFunc.desugared = false;
return generatedInitFunc;
}
private void addRequiredParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc,
BAttachedFunction generatedInitializerFunc) {
if (initFunction.requiredParams.isEmpty()) {
return;
}
for (BLangSimpleVariable requiredParameter : initFunction.requiredParams) {
BLangSimpleVariable var =
ASTBuilderUtil.createVariable(initFunction.pos,
requiredParameter.name.getValue(), requiredParameter.type,
createRequiredParamExpr(requiredParameter.expr),
new BVarSymbol(0, names.fromString(requiredParameter.name.getValue()),
requiredParameter.symbol.pkgID,
requiredParameter.type, requiredParameter.symbol.owner));
generatedInitFunc.requiredParams.add(var);
generatedInitializerFunc.symbol.params.add(var.symbol);
}
}
private BLangExpression createRequiredParamExpr(BLangExpression expr) {
if (expr == null) {
return null;
}
if (expr.getKind() == NodeKind.LAMBDA) {
BLangFunction func = ((BLangLambdaFunction) expr).function;
return createLambdaFunction(func.pos, func.name.value, func.requiredParams, func.returnTypeNode, func.body);
}
BLangExpression expression = this.nodeCloner.clone(expr);
if (expression.getKind() == NodeKind.ARROW_EXPR) {
BLangIdentifier func = (BLangIdentifier) ((BLangArrowFunction) expression).functionName;
((BLangArrowFunction) expression).functionName = ASTBuilderUtil.createIdentifier(func.pos,
"$" + func.getValue() + "$");
}
return expression;
}
private void addRestParamsToGeneratedInitFunction(BLangFunction initFunction, BLangFunction generatedInitFunc,
BAttachedFunction generatedInitializerFunc) {
if (initFunction.restParam == null) {
return;
}
BLangSimpleVariable restParam = initFunction.restParam;
generatedInitFunc.restParam =
ASTBuilderUtil.createVariable(initFunction.pos,
restParam.name.getValue(), restParam.type, null, new BVarSymbol(0,
names.fromString(restParam.name.getValue()), restParam.symbol.pkgID,
restParam.type, restParam.symbol.owner));
generatedInitializerFunc.symbol.restParam = generatedInitFunc.restParam.symbol;
}
/**
* Create package init functions.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private void createPackageInitFunctions(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
pkgNode.initFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.INIT_FUNCTION_SUFFIX,
symTable);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
for (BLangXMLNS xmlns : pkgNode.xmlnsList) {
initFnBody.addStatement(createNamespaceDeclrStatement(xmlns));
}
pkgNode.startFunction = ASTBuilderUtil.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
Names.START_FUNCTION_SUFFIX,
symTable);
pkgNode.stopFunction = ASTBuilderUtil.createInitFunctionWithNilReturn(pkgNode.pos, alias,
Names.STOP_FUNCTION_SUFFIX);
createInvokableSymbol(pkgNode.initFunction, env);
createInvokableSymbol(pkgNode.startFunction, env);
createInvokableSymbol(pkgNode.stopFunction, env);
}
private void addUserDefinedModuleInitInvocationAndReturn(BLangPackage pkgNode) {
Optional<BLangFunction> userDefInitOptional = pkgNode.functions.stream()
.filter(bLangFunction -> !bLangFunction.attachedFunction &&
bLangFunction.name.value.equals(Names.USER_DEFINED_INIT_SUFFIX.value))
.findFirst();
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
if (!userDefInitOptional.isPresent()) {
addNilReturnStatement(initFnBody);
return;
}
BLangFunction userDefInit = userDefInitOptional.get();
BLangInvocation userDefInitInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
userDefInitInvocation.pos = pkgNode.initFunction.pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(userDefInit.name.value);
userDefInitInvocation.name = name;
userDefInitInvocation.symbol = userDefInit.symbol;
BLangIdentifier pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
pkgAlias.setLiteral(false);
pkgAlias.setValue(pkgNode.packageID.name.value);
userDefInitInvocation.pkgAlias = pkgAlias;
userDefInitInvocation.type = userDefInit.returnTypeNode.type;
userDefInitInvocation.requiredArgs = Collections.emptyList();
BLangReturn returnStmt = (BLangReturn) TreeBuilder.createReturnNode();
returnStmt.pos = pkgNode.initFunction.pos;
returnStmt.expr = userDefInitInvocation;
initFnBody.stmts.add(returnStmt);
}
/**
* Create invokable symbol for function.
*
* @param bLangFunction function node
* @param env Symbol environment
*/
private void createInvokableSymbol(BLangFunction bLangFunction, SymbolEnv env) {
BType returnType = bLangFunction.returnTypeNode.type == null ?
symResolver.resolveTypeNode(bLangFunction.returnTypeNode, env) : bLangFunction.returnTypeNode.type;
BInvokableType invokableType = new BInvokableType(new ArrayList<>(), getRestType(bLangFunction),
returnType, null);
BInvokableSymbol functionSymbol = Symbols.createFunctionSymbol(Flags.asMask(bLangFunction.flagSet),
new Name(bLangFunction.name.value), env.enclPkg.packageID, invokableType, env.enclPkg.symbol, true);
functionSymbol.retType = returnType;
for (BLangVariable param : bLangFunction.requiredParams) {
functionSymbol.params.add(param.symbol);
}
functionSymbol.scope = new Scope(functionSymbol);
bLangFunction.symbol = functionSymbol;
}
/**
* Add nil return statement.
*
* @param bLangBlockStmt block statement node
*/
private void addNilReturnStatement(BlockNode bLangBlockStmt) {
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(((BLangNode) bLangBlockStmt).pos, symTable.nilType);
bLangBlockStmt.addStatement(returnStmt);
}
/**
* Create namespace declaration statement for XMNLNS.
*
* @param xmlns XMLNS node
* @return XMLNS statement
*/
private BLangXMLNSStatement createNamespaceDeclrStatement(BLangXMLNS xmlns) {
BLangXMLNSStatement xmlnsStmt = (BLangXMLNSStatement) TreeBuilder.createXMLNSDeclrStatementNode();
xmlnsStmt.xmlnsDecl = xmlns;
xmlnsStmt.pos = xmlns.pos;
return xmlnsStmt;
}
@Override
public void visit(BLangPackage pkgNode) {
if (pkgNode.completedPhases.contains(CompilerPhase.DESUGAR)) {
result = pkgNode;
return;
}
createPackageInitFunctions(pkgNode, env);
addAttachedFunctionsToPackageLevel(pkgNode, env);
pkgNode.constants.stream()
.filter(constant -> constant.expr.getKind() == NodeKind.LITERAL ||
constant.expr.getKind() == NodeKind.NUMERIC_LITERAL)
.forEach(constant -> pkgNode.typeDefinitions.add(constant.associatedTypeDefinition));
BLangBlockStmt serviceAttachments = serviceDesugar.rewriteServiceVariables(pkgNode.services, env);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) pkgNode.initFunction.body;
for (BLangConstant constant : pkgNode.constants) {
if (constant.symbol.type.tag == TypeTags.MAP) {
BLangSimpleVarRef constVarRef = ASTBuilderUtil.createVariableRef(constant.pos, constant.symbol);
constant.expr = rewrite(constant.expr, SymbolEnv.createTypeEnv(constant.typeNode,
pkgNode.initFunction.symbol.scope, env));
BLangInvocation frozenConstValExpr =
createLangLibInvocationNode(
"cloneReadOnly", constant.expr, new ArrayList<>(), constant.expr.type, constant.pos);
BLangAssignment constInit =
ASTBuilderUtil.createAssignmentStmt(constant.pos, constVarRef, frozenConstValExpr);
initFnBody.stmts.add(constInit);
}
}
pkgNode.globalVars.forEach(globalVar -> {
BLangAssignment assignment = createAssignmentStmt(globalVar);
if (assignment.expr != null) {
initFnBody.stmts.add(assignment);
}
});
pkgNode.services.forEach(service -> serviceDesugar.engageCustomServiceDesugar(service, env));
annotationDesugar.rewritePackageAnnotations(pkgNode, env);
addUserDefinedModuleInitInvocationAndReturn(pkgNode);
pkgNode.typeDefinitions.sort(Comparator.comparing(t -> t.precedence));
pkgNode.typeDefinitions = rewrite(pkgNode.typeDefinitions, env);
pkgNode.xmlnsList = rewrite(pkgNode.xmlnsList, env);
pkgNode.constants = rewrite(pkgNode.constants, env);
pkgNode.globalVars = rewrite(pkgNode.globalVars, env);
pkgNode.functions = rewrite(pkgNode.functions, env);
serviceDesugar.rewriteListeners(pkgNode.globalVars, env, pkgNode.startFunction, pkgNode.stopFunction);
ASTBuilderUtil.appendStatements(serviceAttachments, (BLangBlockFunctionBody) pkgNode.initFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.startFunction.body);
addNilReturnStatement((BLangBlockFunctionBody) pkgNode.stopFunction.body);
pkgNode.initFunction = splitInitFunction(pkgNode, env);
pkgNode.initFunction = rewrite(pkgNode.initFunction, env);
pkgNode.startFunction = rewrite(pkgNode.startFunction, env);
pkgNode.stopFunction = rewrite(pkgNode.stopFunction, env);
closureDesugar.visit(pkgNode);
for (BLangTestablePackage testablePkg : pkgNode.getTestablePkgs()) {
rewrite(testablePkg, this.symTable.pkgEnvMap.get(testablePkg.symbol));
}
pkgNode.completedPhases.add(CompilerPhase.DESUGAR);
initFuncIndex = 0;
result = pkgNode;
}
@Override
public void visit(BLangImportPackage importPkgNode) {
BPackageSymbol pkgSymbol = importPkgNode.symbol;
SymbolEnv pkgEnv = this.symTable.pkgEnvMap.get(pkgSymbol);
rewrite(pkgEnv.node, pkgEnv);
result = importPkgNode;
}
@Override
public void visit(BLangTypeDefinition typeDef) {
if (typeDef.typeNode.getKind() == NodeKind.OBJECT_TYPE
|| typeDef.typeNode.getKind() == NodeKind.RECORD_TYPE) {
typeDef.typeNode = rewrite(typeDef.typeNode, env);
}
typeDef.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = typeDef;
}
@Override
public void visit(BLangObjectTypeNode objectTypeNode) {
objectTypeNode.fields.addAll(objectTypeNode.referencedFields);
if (objectTypeNode.flagSet.contains(Flag.ABSTRACT)) {
result = objectTypeNode;
return;
}
for (BLangSimpleVariable bLangSimpleVariable : objectTypeNode.fields) {
bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env);
}
Map<BSymbol, BLangStatement> initFuncStmts = objectTypeNode.generatedInitFunction.initFunctionStmts;
for (BLangSimpleVariable field : objectTypeNode.fields) {
if (!initFuncStmts.containsKey(field.symbol) && field.expr != null) {
initFuncStmts.put(field.symbol,
createStructFieldUpdate(objectTypeNode.generatedInitFunction, field,
objectTypeNode.generatedInitFunction.receiver.symbol));
}
}
BLangStatement[] initStmts = initFuncStmts.values().toArray(new BLangStatement[0]);
BLangBlockFunctionBody generatedInitFnBody =
(BLangBlockFunctionBody) objectTypeNode.generatedInitFunction.body;
int i;
for (i = 0; i < initStmts.length; i++) {
generatedInitFnBody.stmts.add(i, initStmts[i]);
}
if (objectTypeNode.initFunction != null) {
((BLangReturn) generatedInitFnBody.stmts.get(i)).expr =
createUserDefinedInitInvocation(objectTypeNode);
}
for (BLangFunction fn : objectTypeNode.functions) {
rewrite(fn, this.env);
}
rewrite(objectTypeNode.generatedInitFunction, this.env);
rewrite(objectTypeNode.initFunction, this.env);
result = objectTypeNode;
}
private BLangInvocation createUserDefinedInitInvocation(BLangObjectTypeNode objectTypeNode) {
ArrayList<BLangExpression> paramRefs = new ArrayList<>();
for (BLangSimpleVariable var : objectTypeNode.generatedInitFunction.requiredParams) {
paramRefs.add(ASTBuilderUtil.createVariableRef(objectTypeNode.pos, var.symbol));
}
BLangInvocation invocation = ASTBuilderUtil.createInvocationExprMethod(objectTypeNode.pos,
((BObjectTypeSymbol) objectTypeNode.symbol).initializerFunc.symbol,
paramRefs, Collections.emptyList(), symResolver);
if (objectTypeNode.generatedInitFunction.restParam != null) {
BLangSimpleVarRef restVarRef = ASTBuilderUtil.createVariableRef(objectTypeNode.pos,
objectTypeNode.generatedInitFunction.restParam.symbol);
BLangRestArgsExpression bLangRestArgsExpression = new BLangRestArgsExpression();
bLangRestArgsExpression.expr = restVarRef;
bLangRestArgsExpression.pos = objectTypeNode.generatedInitFunction.pos;
bLangRestArgsExpression.type = objectTypeNode.generatedInitFunction.restParam.type;
bLangRestArgsExpression.expectedType = bLangRestArgsExpression.type;
invocation.restArgs.add(bLangRestArgsExpression);
}
invocation.exprSymbol =
((BObjectTypeSymbol) objectTypeNode.symbol).generatedInitializerFunc.symbol.receiverSymbol;
return rewriteExpr(invocation);
}
@Override
public void visit(BLangRecordTypeNode recordTypeNode) {
recordTypeNode.fields.addAll(recordTypeNode.referencedFields);
for (BLangSimpleVariable bLangSimpleVariable : recordTypeNode.fields) {
bLangSimpleVariable.typeNode = rewrite(bLangSimpleVariable.typeNode, env);
}
if (recordTypeNode.initFunction == null) {
recordTypeNode.initFunction = TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env,
names, symTable);
env.enclPkg.addFunction(recordTypeNode.initFunction);
env.enclPkg.topLevelNodes.add(recordTypeNode.initFunction);
}
for (BLangSimpleVariable field : recordTypeNode.fields) {
if (!recordTypeNode.initFunction.initFunctionStmts.containsKey(field.symbol) &&
!Symbols.isOptional(field.symbol) && field.expr != null) {
recordTypeNode.initFunction.initFunctionStmts
.put(field.symbol, createStructFieldUpdate(recordTypeNode.initFunction, field,
recordTypeNode.initFunction.receiver.symbol));
}
}
BLangStatement[] initStmts = recordTypeNode.initFunction.initFunctionStmts
.values().toArray(new BLangStatement[0]);
BLangBlockFunctionBody initFnBody = (BLangBlockFunctionBody) recordTypeNode.initFunction.body;
for (int i = 0; i < recordTypeNode.initFunction.initFunctionStmts.size(); i++) {
initFnBody.stmts.add(i, initStmts[i]);
}
if (recordTypeNode.isAnonymous && recordTypeNode.isLocal) {
BLangUserDefinedType userDefinedType = desugarLocalAnonRecordTypeNode(recordTypeNode);
TypeDefBuilderHelper.addTypeDefinition(recordTypeNode.type, recordTypeNode.type.tsymbol, recordTypeNode,
env);
recordTypeNode.desugared = true;
result = userDefinedType;
return;
}
result = recordTypeNode;
}
private BLangUserDefinedType desugarLocalAnonRecordTypeNode(BLangRecordTypeNode recordTypeNode) {
return ASTBuilderUtil.createUserDefineTypeNode(recordTypeNode.symbol.name.value, recordTypeNode.type,
recordTypeNode.pos);
}
@Override
public void visit(BLangArrayType arrayType) {
arrayType.elemtype = rewrite(arrayType.elemtype, env);
result = arrayType;
}
@Override
public void visit(BLangConstrainedType constrainedType) {
constrainedType.constraint = rewrite(constrainedType.constraint, env);
result = constrainedType;
}
@Override
public void visit(BLangStreamType streamType) {
streamType.constraint = rewrite(streamType.constraint, env);
streamType.error = rewrite(streamType.error, env);
result = streamType;
}
@Override
public void visit(BLangValueType valueType) {
result = valueType;
}
@Override
public void visit(BLangUserDefinedType userDefinedType) {
result = userDefinedType;
}
@Override
public void visit(BLangUnionTypeNode unionTypeNode) {
List<BLangType> rewrittenMembers = new ArrayList<>();
unionTypeNode.memberTypeNodes.forEach(typeNode -> rewrittenMembers.add(rewrite(typeNode, env)));
unionTypeNode.memberTypeNodes = rewrittenMembers;
result = unionTypeNode;
}
@Override
public void visit(BLangErrorType errorType) {
errorType.detailType = rewrite(errorType.detailType, env);
result = errorType;
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
functionTypeNode.params.forEach(param -> rewrite(param.typeNode, env));
functionTypeNode.returnTypeNode = rewrite(functionTypeNode.returnTypeNode, env);
result = functionTypeNode;
}
@Override
public void visit(BLangBuiltInRefTypeNode refTypeNode) {
result = refTypeNode;
}
@Override
public void visit(BLangTupleTypeNode tupleTypeNode) {
List<BLangType> rewrittenMembers = new ArrayList<>();
tupleTypeNode.memberTypeNodes.forEach(member -> rewrittenMembers.add(rewrite(member, env)));
tupleTypeNode.memberTypeNodes = rewrittenMembers;
tupleTypeNode.restParamType = rewrite(tupleTypeNode.restParamType, env);
result = tupleTypeNode;
}
@Override
public void visit(BLangBlockFunctionBody body) {
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(body.stmts, bodyEnv);
result = body;
}
@Override
public void visit(BLangExprFunctionBody exprBody) {
BLangBlockFunctionBody body = ASTBuilderUtil.createBlockFunctionBody(exprBody.pos, new ArrayList<>());
BLangReturn returnStmt = ASTBuilderUtil.createReturnStmt(exprBody.pos, body);
returnStmt.expr = rewriteExpr(exprBody.expr);
result = body;
}
@Override
public void visit(BLangExternalFunctionBody body) {
for (BLangAnnotationAttachment attachment : body.annAttachments) {
rewrite(attachment, env);
}
result = body;
}
@Override |
Add a comment here to explain that this is the special case where we split after removing the root of a path that is just the root. | private String[] splitToElements(String str) {
String[] arr = str.split(this.parentFileSystem.getSeparator());
if (arr.length == 1 && arr[0].isEmpty()) {
return new String[0];
}
return arr;
} | if (arr.length == 1 && arr[0].isEmpty()) { | private String[] splitToElements(String str) {
String[] arr = str.split(this.parentFileSystem.getSeparator());
/*
This is a special case where we split after removing the root from a path that is just the root. Or otherwise
have an empty path.
*/
if (arr.length == 1 && arr[0].isEmpty()) {
return new String[0];
}
return arr;
} | class AzurePath implements Path {
private final ClientLogger logger = new ClientLogger(AzurePath.class);
private static final String ROOT_DIR_SUFFIX = ":";
private final AzureFileSystem parentFileSystem;
private final String pathString;
AzurePath(AzureFileSystem parentFileSystem, String s, String... strings) {
if (strings == null) {
strings = new String[0];
}
this.parentFileSystem = parentFileSystem;
Flux<String> elementFlux =
Flux.fromArray(s.split(this.parentFileSystem.getSeparator()))
.concatWith(Flux.fromArray(strings)
.flatMap(str -> Flux.fromArray(str.split(this.parentFileSystem.getSeparator()))))
.filter(str -> !str.isEmpty());
this.pathString = String.join(this.parentFileSystem.getSeparator(), elementFlux.toIterable());
elementFlux.skip(1)
.flatMap(str -> str.contains(ROOT_DIR_SUFFIX)
? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX
+ " is an invalid character except to identify the root element of this path if there is one.")))
: Mono.just(str)).blockLast();
elementFlux.take(1)
.flatMap(str -> str.contains(ROOT_DIR_SUFFIX) && str.indexOf(ROOT_DIR_SUFFIX) < str.length() - 1
? Mono.error(Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may"
+ " only be used as the last character in the root component of a path")))
: Mono.just(str)).blockLast();
}
/**
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem() {
return this.parentFileSystem;
}
/**
* A path is considered absolute in this file system if it contains a root component.
*
* {@inheritDoc}
*/
@Override
public boolean isAbsolute() {
return this.getRoot() != null;
}
/**
* The root component of this path also identifies the Azure Storage Container in which the file is stored. This
* method will not validate that the root component corresponds to an actual file store/container in this
* file system. It will simply return the root component of the path if one is present and syntactically valid.
*
* {@inheritDoc}
*/
@Override
public Path getRoot() {
String firstElement = pathString.split(parentFileSystem.getSeparator())[0];
if (firstElement.endsWith(ROOT_DIR_SUFFIX)) {
return this.parentFileSystem.getPath(firstElement);
}
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Path getFileName() {
if (this.withoutRoot().isEmpty()) {
return null;
} else {
return this.parentFileSystem.getPath(Flux.fromArray(this.splitToElements()).last().block());
}
}
/**
* {@inheritDoc}
*/
@Override
public Path getParent() {
/*
If this path only has one element, there is no parent. Note the root is included in the parent, so we don't
use getNameCount here.
*/
if (this.splitToElements().length == 1) {
return null;
}
return this.parentFileSystem.getPath(
this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator())));
}
/**
* {@inheritDoc}
*/
@Override
public int getNameCount() {
return this.splitToElements(this.withoutRoot()).length;
}
/**
* {@inheritDoc}
*/
@Override
public Path getName(int i) {
if (i < 0 || i >= this.getNameCount()) {
throw new IllegalArgumentException();
}
return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]);
}
/**
* {@inheritDoc}
*/
@Override
public Path subpath(int begin, int end) {
if (begin < 0 || begin >= this.getNameCount()
|| end <= begin || end > this.getNameCount()) {
throw new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid",
begin, end));
}
Iterable<String> subnames = Flux.fromArray(this.splitToElements(this.withoutRoot()))
.skip(begin)
.take(end - begin)
.toIterable();
return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames));
}
/**
* In this implementation, a root component starts with another root component if the two root components are
* equivalent strings. In other words, if the files are stored in the same container.
*
* {@inheritDoc}
*/
@Override
public boolean startsWith(Path path) {
/*
There can only be one instance of a file system with a given id, so comparing object identity is equivalent
to checking ids here.
*/
if (path.getFileSystem() != this.parentFileSystem) {
return false;
}
String[] thisPathElements = this.splitToElements();
String[] otherPathElements = ((AzurePath) path).splitToElements();
if (otherPathElements.length > thisPathElements.length) {
return false;
}
for (int i = 0; i < otherPathElements.length; i++) {
if (!otherPathElements[i].equals(thisPathElements[i])) {
return false;
}
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public boolean startsWith(String s) {
return this.startsWith(this.parentFileSystem.getPath(s));
}
/**
* In this implementation, a root component ends with another root component if the two root components are
* equivalent strings. In other words, if the files are stored in the same container.
*
* {@inheritDoc}
*/
@Override
public boolean endsWith(Path path) {
/*
There can only be one instance of a file system with a given id, so comparing object identity is equivalent
to checking ids here.
*/
if (path.getFileSystem() != this.parentFileSystem) {
return false;
}
String[] thisPathElements = this.splitToElements();
String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator());
if (otherPathElements.length > thisPathElements.length) {
return false;
}
if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) {
return false;
}
for (int i = 1; i <= otherPathElements.length; i++) {
if (!otherPathElements[otherPathElements.length - i]
.equals(thisPathElements[thisPathElements.length - i])) {
return false;
}
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public boolean endsWith(String s) {
return this.endsWith(this.parentFileSystem.getPath(s));
}
/**
* This file system follows the standard practice mentioned in the original docs.
*
* {@inheritDoc}
*/
@Override
public Path normalize() {
Deque<String> stack = new ArrayDeque<>();
String[] pathElements = this.splitToElements();
Path root = this.getRoot();
String rootStr = root == null ? null : root.toString();
for (String element : pathElements) {
if (element.equals(".")) {
continue;
} else if (element.equals("..")) {
if (rootStr != null) {
if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) {
continue;
} else {
stack.removeLast();
}
} else {
if (stack.isEmpty()) {
stack.addLast(element);
} else if (stack.peek().equals("..")) {
stack.addLast(element);
} else {
stack.removeLast();
}
}
} else {
stack.addLast(element);
}
}
return this.parentFileSystem.getPath("", Arrays.copyOf(stack.toArray(), stack.size(), String[].class));
}
/**
* If the other path has a root component, it is considered absolute, and it is returned.
*
* {@inheritDoc}
*/
@Override
public Path resolve(Path path) {
if (path.isAbsolute()) {
return path;
}
if (path.getNameCount() == 0) {
return this;
}
return this.parentFileSystem.getPath(this.toString(), path.toString());
}
/**
* {@inheritDoc}
*/
@Override
public Path resolve(String s) {
return this.resolve(this.parentFileSystem.getPath(s));
}
/**
* {@inheritDoc}
*/
@Override
public Path resolveSibling(Path path) {
if (path.isAbsolute()) {
return path;
}
Path parent = this.getParent();
return parent == null ? path : parent.resolve(path);
}
/**
* {@inheritDoc}
*/
@Override
public Path resolveSibling(String s) {
return this.resolveSibling(this.parentFileSystem.getPath(s));
}
/**
* If both paths have a root component, it is still to relativize one against the other.
*
* {@inheritDoc}
*/
@Override
public Path relativize(Path path) {
if (path.getRoot() == null ^ this.getRoot() == null) {
throw Utility.logError(logger,
new IllegalArgumentException("Both paths must be absolute or neither can be"));
}
AzurePath thisNormalized = (AzurePath) this.normalize();
Path otherNormalized = path.normalize();
Deque<String> deque = new ArrayDeque<>(
Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator())));
int i = 0;
String[] thisElements = thisNormalized.splitToElements();
while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) {
deque.removeFirst();
i++;
}
while (i < thisElements.length) {
deque.addFirst("..");
i++;
}
return this.parentFileSystem.getPath("", Arrays.copyOf(deque.toArray(), deque.size(), String[].class));
}
/**
* No authority component is defined for the {@code URI} returned by this method. This implementation offers the
* same equivalence guarantee as the default provider.
*
* {@inheritDoc}
*/
@Override
public URI toUri() {
try {
return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(),
null, null);
} catch (URISyntaxException e) {
throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e));
}
}
/**
* {@inheritDoc}
*/
@Override
public Path toAbsolutePath() {
if (this.isAbsolute()) {
return this;
}
return this.parentFileSystem.getDefaultDirectory().resolve(this);
}
/**
* Unsupported.
* <p>
* {@inheritDoc}
*/
@Override
public Path toRealPath(LinkOption... linkOptions) throws IOException {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public File toFile() {
throw new UnsupportedOperationException();
}
/**
* Unsupported.
* <p>
* {@inheritDoc}
*/
@Override
public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers)
throws IOException {
throw new UnsupportedOperationException();
}
/**
* Unsupported.
* <p>
* {@inheritDoc}
*/
@Override
public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*/
@Override
public Iterator<Path> iterator() {
return Flux.fromArray(this.splitToElements(this.withoutRoot()))
.map(s -> this.parentFileSystem.getPath(s))
.toIterable()
.iterator();
}
/**
* This result of this method is identical to a string comparison on the underlying path strings.
*
* {@inheritDoc}
*/
@Override
public int compareTo(Path path) {
if (!(path instanceof AzurePath)) {
throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath."));
}
return this.pathString.compareTo(((AzurePath) path).pathString);
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return this.pathString;
}
/**
* A path is considered equal to another path if it is associated with the same file system instance and if the
* path strings are equivalent.
*
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AzurePath paths = (AzurePath) o;
return Objects.equals(parentFileSystem, paths.parentFileSystem)
&& Objects.equals(pathString, paths.pathString);
}
@Override
public int hashCode() {
return Objects.hash(parentFileSystem, pathString);
}
boolean validRoot(String fileStoreName) {
Boolean validRootName = Flux.fromIterable(parentFileSystem.getFileStores())
.map(FileStore::name)
.hasElement(fileStoreName)
.block();
return validRootName != null && validRootName;
}
private String withoutRoot() {
Path root = this.getRoot();
String str = this.pathString;
if (root != null) {
str = this.pathString.substring(root.toString().length());
}
if (str.startsWith(this.parentFileSystem.getSeparator())) {
str = str.substring(1);
}
return str;
}
private String[] splitToElements() {
return this.splitToElements(this.pathString);
}
} | class AzurePath implements Path {
private final ClientLogger logger = new ClientLogger(AzurePath.class);
private static final String ROOT_DIR_SUFFIX = ":";
private final AzureFileSystem parentFileSystem;
private final String pathString;
AzurePath(AzureFileSystem parentFileSystem, String first, String... more) {
this.parentFileSystem = parentFileSystem;
/*
Break all strings into their respective elements and remove empty elements. This has the effect of stripping
any trailing, leading, or internal delimiters so there are no duplicates/empty elements when we join.
*/
List<String> elements = new ArrayList<>(Arrays.asList(first.split(parentFileSystem.getSeparator())));
if (more != null) {
for (String next : more) {
elements.addAll(Arrays.asList(next.split(parentFileSystem.getSeparator())));
}
}
elements.removeIf(String::isEmpty);
this.pathString = String.join(this.parentFileSystem.getSeparator(), elements);
for (int i = 0; i < elements.size(); i++) {
String element = elements.get(i);
/*
If there is a root component, it must be the first element. A root component takes the format of
"<fileStoreName>:". The ':', or ROOT_DIR_SUFFIX, if present, can only appear once, and can only be the last
character of the first element.
*/
if (i == 0) {
if (element.contains(ROOT_DIR_SUFFIX) && element.indexOf(ROOT_DIR_SUFFIX) < element.length() - 1) {
throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " may"
+ " only be used as the last character in the root component of a path"));
}
} else if (element.contains(ROOT_DIR_SUFFIX)) {
throw Utility.logError(logger, new InvalidPathException(this.pathString, ROOT_DIR_SUFFIX + " is an "
+ "invalid character except to identify the root element of this path if there is one."));
}
}
}
/**
* {@inheritDoc}
*/
@Override
public FileSystem getFileSystem() {
return this.parentFileSystem;
}
/**
* A path is considered absolute in this file system if it contains a root component.
*
* {@inheritDoc}
*/
@Override
public boolean isAbsolute() {
return this.getRoot() != null;
}
/**
* The root component of this path also identifies the Azure Storage Container in which the file is stored. This
* method will not validate that the root component corresponds to an actual file store/container in this
* file system. It will simply return the root component of the path if one is present and syntactically valid.
*
* {@inheritDoc}
*/
@Override
public Path getRoot() {
String firstElement = this.splitToElements()[0];
if (firstElement.endsWith(ROOT_DIR_SUFFIX)) {
return this.parentFileSystem.getPath(firstElement);
}
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Path getFileName() {
if (this.withoutRoot().isEmpty()) {
return null;
} else {
List<String> elements = Arrays.asList(this.splitToElements());
return this.parentFileSystem.getPath(elements.get(elements.size() - 1));
}
}
/**
* {@inheritDoc}
*/
@Override
public Path getParent() {
/*
If this path only has one element, there is no parent. Note the root is included in the parent, so we don't
use getNameCount here.
*/
if (this.splitToElements().length == 1) {
return null;
}
return this.parentFileSystem.getPath(
this.pathString.substring(0, this.pathString.lastIndexOf(this.parentFileSystem.getSeparator())));
}
/**
* {@inheritDoc}
*/
@Override
public int getNameCount() {
return this.splitToElements(this.withoutRoot()).length;
}
/**
* {@inheritDoc}
*/
@Override
public Path getName(int i) {
if (i < 0 || i >= this.getNameCount()) {
throw Utility.logError(logger, new IllegalArgumentException(String.format("Index %d is out of bounds", i)));
}
return this.parentFileSystem.getPath(this.splitToElements(this.withoutRoot())[i]);
}
/**
* {@inheritDoc}
*/
@Override
public Path subpath(int begin, int end) {
if (begin < 0 || begin >= this.getNameCount()
|| end <= begin || end > this.getNameCount()) {
throw Utility.logError(logger,
new IllegalArgumentException(String.format("Values of begin: %d and end: %d are invalid", begin, end)));
}
String[] subnames = Stream.of(this.splitToElements(this.withoutRoot()))
.skip(begin)
.limit(end - begin)
.toArray(String[]::new);
return this.parentFileSystem.getPath(String.join(this.parentFileSystem.getSeparator(), subnames));
}
/**
* In this implementation, a root component starts with another root component if the two root components are
* equivalent strings. In other words, if the files are stored in the same container.
*
* {@inheritDoc}
*/
@Override
public boolean startsWith(Path path) {
if (!path.getFileSystem().equals(this.parentFileSystem)) {
return false;
}
String[] thisPathElements = this.splitToElements();
String[] otherPathElements = ((AzurePath) path).splitToElements();
if (otherPathElements.length > thisPathElements.length) {
return false;
}
for (int i = 0; i < otherPathElements.length; i++) {
if (!otherPathElements[i].equals(thisPathElements[i])) {
return false;
}
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public boolean startsWith(String s) {
return this.startsWith(this.parentFileSystem.getPath(s));
}
/**
* In this implementation, a root component ends with another root component if the two root components are
* equivalent strings. In other words, if the files are stored in the same container.
*
* {@inheritDoc}
*/
@Override
public boolean endsWith(Path path) {
/*
There can only be one instance of a file system with a given id, so comparing object identity is equivalent
to checking ids here.
*/
if (path.getFileSystem() != this.parentFileSystem) {
return false;
}
String[] thisPathElements = this.splitToElements();
String[] otherPathElements = ((AzurePath) path).pathString.split(this.parentFileSystem.getSeparator());
if (otherPathElements.length > thisPathElements.length) {
return false;
}
if (path.getRoot() != null && otherPathElements.length != thisPathElements.length) {
return false;
}
for (int i = 1; i <= otherPathElements.length; i++) {
if (!otherPathElements[otherPathElements.length - i]
.equals(thisPathElements[thisPathElements.length - i])) {
return false;
}
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public boolean endsWith(String s) {
return this.endsWith(this.parentFileSystem.getPath(s));
}
/**
* This file system follows the standard practice mentioned in the original docs.
*
* {@inheritDoc}
*/
@Override
public Path normalize() {
Deque<String> stack = new ArrayDeque<>();
String[] pathElements = this.splitToElements();
Path root = this.getRoot();
String rootStr = root == null ? null : root.toString();
for (String element : pathElements) {
if (element.equals(".")) {
continue;
} else if (element.equals("..")) {
if (rootStr != null) {
if (!stack.isEmpty() && stack.peekLast().equals(rootStr)) {
continue;
} else {
stack.removeLast();
}
} else {
if (stack.isEmpty()) {
stack.addLast(element);
} else if (stack.peek().equals("..")) {
stack.addLast(element);
} else {
stack.removeLast();
}
}
} else {
stack.addLast(element);
}
}
return this.parentFileSystem.getPath("", stack.toArray(new String[0]));
}
/**
* If the other path has a root component, it is considered absolute, and it is returned.
*
* {@inheritDoc}
*/
@Override
public Path resolve(Path path) {
if (path.isAbsolute()) {
return path;
}
if (path.getNameCount() == 0) {
return this;
}
return this.parentFileSystem.getPath(this.toString(), path.toString());
}
/**
* {@inheritDoc}
*/
@Override
public Path resolve(String s) {
return this.resolve(this.parentFileSystem.getPath(s));
}
/**
* {@inheritDoc}
*/
@Override
public Path resolveSibling(Path path) {
if (path.isAbsolute()) {
return path;
}
Path parent = this.getParent();
return parent == null ? path : parent.resolve(path);
}
/**
* {@inheritDoc}
*/
@Override
public Path resolveSibling(String s) {
return this.resolveSibling(this.parentFileSystem.getPath(s));
}
/**
* If both paths have a root component, it is still to relativize one against the other.
*
* {@inheritDoc}
*/
@Override
public Path relativize(Path path) {
if (path.getRoot() == null ^ this.getRoot() == null) {
throw Utility.logError(logger,
new IllegalArgumentException("Both paths must be absolute or neither can be"));
}
AzurePath thisNormalized = (AzurePath) this.normalize();
Path otherNormalized = path.normalize();
Deque<String> deque = new ArrayDeque<>(
Arrays.asList(otherNormalized.toString().split(this.parentFileSystem.getSeparator())));
int i = 0;
String[] thisElements = thisNormalized.splitToElements();
while (i < thisElements.length && !deque.isEmpty() && thisElements[i].equals(deque.peekFirst())) {
deque.removeFirst();
i++;
}
while (i < thisElements.length) {
deque.addFirst("..");
i++;
}
return this.parentFileSystem.getPath("", deque.toArray(new String[0]));
}
/**
* No authority component is defined for the {@code URI} returned by this method. This implementation offers the
* same equivalence guarantee as the default provider.
*
* {@inheritDoc}
*/
@Override
public URI toUri() {
try {
return new URI(this.parentFileSystem.provider().getScheme(), null, "/" + this.toAbsolutePath().toString(),
null, null);
} catch (URISyntaxException e) {
throw Utility.logError(logger, new IllegalStateException("Unable to create valid URI from path", e));
}
}
/**
* {@inheritDoc}
*/
@Override
public Path toAbsolutePath() {
if (this.isAbsolute()) {
return this;
}
return this.parentFileSystem.getDefaultDirectory().resolve(this);
}
/**
* Unsupported.
* <p>
* {@inheritDoc}
*/
@Override
public Path toRealPath(LinkOption... linkOptions) throws IOException {
throw new UnsupportedOperationException("Symbolic links are not supported.");
}
/**
* {@inheritDoc}
*/
@Override
public File toFile() {
throw new UnsupportedOperationException();
}
/**
* Unsupported.
* <p>
* {@inheritDoc}
*/
@Override
public WatchKey register(WatchService watchService, WatchEvent.Kind<?>[] kinds, WatchEvent.Modifier... modifiers)
throws IOException {
throw new UnsupportedOperationException("WatchEvents are not supported.");
}
/**
* Unsupported.
* <p>
* {@inheritDoc}
*/
@Override
public WatchKey register(WatchService watchService, WatchEvent.Kind<?>... kinds) throws IOException {
throw new UnsupportedOperationException("WatchEvents are not supported.");
}
/**
* Unsupported
* <p>
* {@inheritDoc}
*/
@Override
public Iterator<Path> iterator() {
return Arrays.asList(Stream.of(this.splitToElements(this.withoutRoot()))
.map(s -> this.parentFileSystem.getPath(s))
.toArray(Path[]::new))
.iterator();
}
/**
* This result of this method is identical to a string comparison on the underlying path strings.
*
* {@inheritDoc}
*/
@Override
public int compareTo(Path path) {
if (!(path instanceof AzurePath)) {
throw Utility.logError(logger, new ClassCastException("Other path is not an instance of AzurePath."));
}
return this.pathString.compareTo(((AzurePath) path).pathString);
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return this.pathString;
}
/**
* A path is considered equal to another path if it is associated with the same file system instance and if the
* path strings are equivalent.
*
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AzurePath paths = (AzurePath) o;
return Objects.equals(parentFileSystem, paths.parentFileSystem)
&& Objects.equals(pathString, paths.pathString);
}
@Override
public int hashCode() {
return Objects.hash(parentFileSystem, pathString);
}
boolean validRoot(String fileStoreName) {
return StreamSupport.stream(parentFileSystem.getFileStores().spliterator(), false)
.map(FileStore::name)
.anyMatch(fileStoreName::equals);
}
private String withoutRoot() {
Path root = this.getRoot();
String str = this.pathString;
if (root != null) {
str = this.pathString.substring(root.toString().length());
}
if (str.startsWith(this.parentFileSystem.getSeparator())) {
str = str.substring(1);
}
return str;
}
private String[] splitToElements() {
return this.splitToElements(this.pathString);
}
} |
Maybe it would be better to open a JIRA issue for supporting UPSERT (as a placeholder, simply stating that the semantics need to be defined first) and then link to that ticket from here rather than the one that causes us to disallow it? What do you think? | public void validateInsert(SqlInsert insert) {
super.validateInsert(insert);
if (insert.isUpsert()) {
throw new ValidationException("UPSERT INTO statement is not supported");
}
} | public void validateInsert(SqlInsert insert) {
if (insert.isUpsert()) {
throw new ValidationException(
"UPSERT INTO statement is not supported. Please use INSERT INTO instead.");
}
} | class FlinkCalciteSqlValidator extends SqlValidatorImpl {
private SqlNode sqlNodeForExpectedOutputType;
private RelDataType expectedOutputType;
public FlinkCalciteSqlValidator(
SqlOperatorTable opTab,
SqlValidatorCatalogReader catalogReader,
RelDataTypeFactory typeFactory,
SqlValidator.Config config) {
super(opTab, catalogReader, typeFactory, config);
}
public void setExpectedOutputType(SqlNode sqlNode, RelDataType expectedOutputType) {
this.sqlNodeForExpectedOutputType = sqlNode;
this.expectedOutputType = expectedOutputType;
}
public Optional<RelDataType> getExpectedOutputType(SqlNode sqlNode) {
if (sqlNode == sqlNodeForExpectedOutputType) {
return Optional.of(expectedOutputType);
}
return Optional.empty();
}
@Override
public void validateLiteral(SqlLiteral literal) {
if (literal.getTypeName() == DECIMAL) {
final BigDecimal decimal = literal.getValueAs(BigDecimal.class);
if (decimal.precision() > DecimalType.MAX_PRECISION) {
throw newValidationError(
literal, Static.RESOURCE.numberLiteralOutOfRange(decimal.toString()));
}
}
super.validateLiteral(literal);
}
@Override
protected void validateJoin(SqlJoin join, SqlValidatorScope scope) {
if (join.getJoinType() == JoinType.LEFT
&& SqlUtil.stripAs(join.getRight()).getKind() == SqlKind.COLLECTION_TABLE) {
SqlNode right = SqlUtil.stripAs(join.getRight());
if (right instanceof SqlBasicCall) {
SqlBasicCall call = (SqlBasicCall) right;
SqlNode operand0 = call.operand(0);
if (operand0 instanceof SqlBasicCall
&& ((SqlBasicCall) operand0).getOperator()
instanceof SqlWindowTableFunction) {
return;
}
}
final SqlNode condition = join.getCondition();
if (condition != null
&& (!SqlUtil.isLiteral(condition)
|| ((SqlLiteral) condition).getValueAs(Boolean.class)
!= Boolean.TRUE)) {
throw new ValidationException(
String.format(
"Left outer joins with a table function do not accept a predicate such as %s. "
+ "Only literal TRUE is accepted.",
condition));
}
}
super.validateJoin(join, scope);
}
@Override
public void validateColumnListParams(
SqlFunction function, List<RelDataType> argTypes, List<SqlNode> operands) {
}
@Override
} | class FlinkCalciteSqlValidator extends SqlValidatorImpl {
private SqlNode sqlNodeForExpectedOutputType;
private RelDataType expectedOutputType;
public FlinkCalciteSqlValidator(
SqlOperatorTable opTab,
SqlValidatorCatalogReader catalogReader,
RelDataTypeFactory typeFactory,
SqlValidator.Config config) {
super(opTab, catalogReader, typeFactory, config);
}
public void setExpectedOutputType(SqlNode sqlNode, RelDataType expectedOutputType) {
this.sqlNodeForExpectedOutputType = sqlNode;
this.expectedOutputType = expectedOutputType;
}
public Optional<RelDataType> getExpectedOutputType(SqlNode sqlNode) {
if (sqlNode == sqlNodeForExpectedOutputType) {
return Optional.of(expectedOutputType);
}
return Optional.empty();
}
@Override
public void validateLiteral(SqlLiteral literal) {
if (literal.getTypeName() == DECIMAL) {
final BigDecimal decimal = literal.getValueAs(BigDecimal.class);
if (decimal.precision() > DecimalType.MAX_PRECISION) {
throw newValidationError(
literal, Static.RESOURCE.numberLiteralOutOfRange(decimal.toString()));
}
}
super.validateLiteral(literal);
}
@Override
protected void validateJoin(SqlJoin join, SqlValidatorScope scope) {
if (join.getJoinType() == JoinType.LEFT
&& SqlUtil.stripAs(join.getRight()).getKind() == SqlKind.COLLECTION_TABLE) {
SqlNode right = SqlUtil.stripAs(join.getRight());
if (right instanceof SqlBasicCall) {
SqlBasicCall call = (SqlBasicCall) right;
SqlNode operand0 = call.operand(0);
if (operand0 instanceof SqlBasicCall
&& ((SqlBasicCall) operand0).getOperator()
instanceof SqlWindowTableFunction) {
return;
}
}
final SqlNode condition = join.getCondition();
if (condition != null
&& (!SqlUtil.isLiteral(condition)
|| ((SqlLiteral) condition).getValueAs(Boolean.class)
!= Boolean.TRUE)) {
throw new ValidationException(
String.format(
"Left outer joins with a table function do not accept a predicate such as %s. "
+ "Only literal TRUE is accepted.",
condition));
}
}
super.validateJoin(join, scope);
}
@Override
public void validateColumnListParams(
SqlFunction function, List<RelDataType> argTypes, List<SqlNode> operands) {
}
@Override
} | |
I don't mind either. I like your substitution in principle but agree we shouldn't have substitutions when we can avoid them... my suggestion was more hypotethical about contributing a similar patch upstream: if the driver could store the `Version` initialized in a static block, we wouldn't need patching. But I wonder if there's a reason they went to such lenghts of lazily initializing it (?). Regarding the `@Fold` annotation... it seems it has been moved in GraalVM into an internal module - but don't worry it's not important. In principle the Fold annotation allows (allowed?) a method computation to be replaced with a constant, whose value is defined by running that method once during build time. | void addNativeImageResources(BuildProducer<NativeImageResourceBuildItem> resources) {
resources.produce(new NativeImageResourceBuildItem("mariadb.properties", "driver.properties"));
} | resources.produce(new NativeImageResourceBuildItem("mariadb.properties", "driver.properties")); | void addNativeImageResources(BuildProducer<NativeImageResourceBuildItem> resources) {
resources.produce(new NativeImageResourceBuildItem("mariadb.properties"));
} | class JDBCMariaDBProcessor {
@BuildStep
FeatureBuildItem feature() {
return new FeatureBuildItem(Feature.JDBC_MARIADB);
}
@BuildStep
void registerDriver(BuildProducer<JdbcDriverBuildItem> jdbcDriver) {
jdbcDriver.produce(
new JdbcDriverBuildItem(DatabaseKind.MARIADB, "org.mariadb.jdbc.Driver", "org.mariadb.jdbc.MariaDbDataSource"));
}
@BuildStep
DevServicesDatasourceConfigurationHandlerBuildItem devDbHandler() {
return DevServicesDatasourceConfigurationHandlerBuildItem.jdbc(DatabaseKind.MARIADB);
}
@BuildStep
void configureAgroalConnection(BuildProducer<AdditionalBeanBuildItem> additionalBeans,
Capabilities capabilities) {
if (capabilities.isPresent(Capability.AGROAL)) {
additionalBeans.produce(new AdditionalBeanBuildItem.Builder().addBeanClass(MariaDBAgroalConnectionConfigurer.class)
.setDefaultScope(BuiltinScope.APPLICATION.getName())
.setUnremovable()
.build());
}
}
@BuildStep
void registerServiceBinding(Capabilities capabilities,
BuildProducer<ServiceProviderBuildItem> serviceProvider,
BuildProducer<DefaultDataSourceDbKindBuildItem> dbKind) {
if (capabilities.isPresent(Capability.KUBERNETES_SERVICE_BINDING)) {
serviceProvider.produce(
new ServiceProviderBuildItem("io.quarkus.kubernetes.service.binding.runtime.ServiceBindingConverter",
MariaDBServiceBindingConverter.class.getName()));
}
dbKind.produce(new DefaultDataSourceDbKindBuildItem(DatabaseKind.MARIADB));
}
@BuildStep(onlyIf = NativeOrNativeSourcesBuild.class)
} | class JDBCMariaDBProcessor {
@BuildStep
FeatureBuildItem feature() {
return new FeatureBuildItem(Feature.JDBC_MARIADB);
}
@BuildStep
void registerDriver(BuildProducer<JdbcDriverBuildItem> jdbcDriver, BuildProducer<DefaultDataSourceDbKindBuildItem> dbKind) {
jdbcDriver.produce(
new JdbcDriverBuildItem(DatabaseKind.MARIADB, "org.mariadb.jdbc.Driver", "org.mariadb.jdbc.MariaDbDataSource"));
dbKind.produce(new DefaultDataSourceDbKindBuildItem(DatabaseKind.MARIADB));
}
@BuildStep
DevServicesDatasourceConfigurationHandlerBuildItem devDbHandler() {
return DevServicesDatasourceConfigurationHandlerBuildItem.jdbc(DatabaseKind.MARIADB);
}
@BuildStep
void configureAgroalConnection(BuildProducer<AdditionalBeanBuildItem> additionalBeans,
Capabilities capabilities) {
if (capabilities.isPresent(Capability.AGROAL)) {
additionalBeans.produce(new AdditionalBeanBuildItem.Builder().addBeanClass(MariaDBAgroalConnectionConfigurer.class)
.setDefaultScope(BuiltinScope.APPLICATION.getName())
.setUnremovable()
.build());
}
}
@BuildStep
void registerAuthenticationPlugins(BuildProducer<ServiceProviderBuildItem> serviceProvider) {
serviceProvider
.produce(ServiceProviderBuildItem.allProvidersFromClassPath("org.mariadb.jdbc.plugin.AuthenticationPlugin"));
}
@BuildStep(onlyIf = NativeOrNativeSourcesBuild.class)
@BuildStep
void registerServiceBinding(Capabilities capabilities,
BuildProducer<ServiceProviderBuildItem> serviceProvider) {
if (capabilities.isPresent(Capability.KUBERNETES_SERVICE_BINDING)) {
serviceProvider.produce(
new ServiceProviderBuildItem("io.quarkus.kubernetes.service.binding.runtime.ServiceBindingConverter",
MariaDBServiceBindingConverter.class.getName()));
}
}
} |
We also need a ut for alter table? | public void testNormal() throws DdlException {
ExceptionChecker.expectThrowsNoException(
() -> createTable("create table test.tbl1\n" + "(k1 int, k2 int)\n" + "duplicate key(k1)\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1'); "));
ExceptionChecker.expectThrowsNoException(() -> createTable("create table test.tbl2\n" + "(k1 int, k2 int)\n"
+ "duplicate key(k1)\n" + "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1'); "));
ExceptionChecker.expectThrowsNoException(
() -> createTable("create table test.tbl3\n" + "(k1 varchar(40), k2 int)\n" + "duplicate key(k1)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1');"));
ExceptionChecker.expectThrowsNoException(
() -> createTable("create table test.tbl4\n" + "(k1 varchar(40), k2 int, v1 int sum)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1');"));
ExceptionChecker.expectThrowsNoException(() -> createTable(
"create table test.tbl5\n" + "(k1 varchar(40), k2 int, v1 int sum)\n" + "aggregate key(k1,k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1');"));
ExceptionChecker.expectThrowsNoException(() -> createTable(
"create table test.tbl6\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl7\n" + "(k1 varchar(40), k2 int)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1');"));
ConfigBase.setMutableConfig("disable_storage_medium_check", "true");
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tb7(key1 int, key2 varchar(10)) \n"
+ "distributed by hash(key1) buckets 1 properties('replication_num' = '1', 'storage_medium' = 'ssd');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.compression1(key1 int, key2 varchar(10)) \n"
+ "distributed by hash(key1) buckets 1 \n"
+ "properties('replication_num' = '1', 'compression' = 'lz4f');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.compression2(key1 int, key2 varchar(10)) \n"
+ "distributed by hash(key1) buckets 1 \n"
+ "properties('replication_num' = '1', 'compression' = 'snappy');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl8\n" + "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_type' = 'int');"));
/**
* create table with list partition
*/
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl9\n"
+ "(k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int)\n"
+ "partition by list(k1)\n"
+ "(\n"
+ "partition p1 values in (\"1\"),\n"
+ "partition p2 values in (\"2\")\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl10\n"
+ "(k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int)\n"
+ "partition by list(k1)\n"
+ "(\n"
+ "partition p1 values in (\"1\", \"3\", \"5\"),\n"
+ "partition p2 values in (\"2\", \"4\", \"6\"),\n"
+ "partition p3 values in (\"7\", \"8\")\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl11\n"
+ "(k1 int not null, k2 varchar(128) not null, k3 int, v1 int, v2 int)\n"
+ "partition by list(k1, k2)\n"
+ "(\n"
+ "partition p1 values in ((\"1\", \"beijing\")),\n"
+ "partition p2 values in ((\"2\", \"beijing\"))\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl12\n"
+ "(k1 int not null, k2 varchar(128) not null, k3 int, v1 int, v2 int)\n"
+ "partition by list(k1, k2)\n"
+ "(\n"
+ "partition p1 values in ((\"1\", \"beijing\"), (\"1\", \"shanghai\")),\n"
+ "partition p2 values in ((\"2\", \"beijing\"), (\"2\", \"shanghai\")),\n"
+ "partition p3 values in ((\"3\", \"tianjin\"))\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl13\n"
+ "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_col' = 'v1');"));
Database db = Env.getCurrentInternalCatalog().getDbOrDdlException("default_cluster:test");
OlapTable tbl6 = (OlapTable) db.getTableOrDdlException("tbl6");
Assert.assertTrue(tbl6.getColumn("k1").isKey());
Assert.assertTrue(tbl6.getColumn("k2").isKey());
Assert.assertTrue(tbl6.getColumn("k3").isKey());
OlapTable tbl7 = (OlapTable) db.getTableOrDdlException("tbl7");
Assert.assertTrue(tbl7.getColumn("k1").isKey());
Assert.assertFalse(tbl7.getColumn("k2").isKey());
Assert.assertTrue(tbl7.getColumn("k2").getAggregationType() == AggregateType.NONE);
OlapTable tbl8 = (OlapTable) db.getTableOrDdlException("tbl8");
Assert.assertTrue(tbl8.getColumn("k1").isKey());
Assert.assertTrue(tbl8.getColumn("k2").isKey());
Assert.assertFalse(tbl8.getColumn("v1").isKey());
Assert.assertTrue(tbl8.getColumn(Column.SEQUENCE_COL).getAggregationType() == AggregateType.REPLACE);
OlapTable tbl13 = (OlapTable) db.getTableOrDdlException("tbl13");
Assert.assertTrue(tbl13.getColumn(Column.SEQUENCE_COL).getAggregationType() == AggregateType.REPLACE);
Assert.assertTrue(tbl13.getColumn(Column.SEQUENCE_COL).getType() == Type.INT);
Assert.assertEquals(tbl13.getSequenceMapCol(), "v1");
} | ExceptionChecker | public void testNormal() throws DdlException {
ExceptionChecker.expectThrowsNoException(
() -> createTable("create table test.tbl1\n" + "(k1 int, k2 int)\n" + "duplicate key(k1)\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1'); "));
ExceptionChecker.expectThrowsNoException(() -> createTable("create table test.tbl2\n" + "(k1 int, k2 int)\n"
+ "duplicate key(k1)\n" + "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1'); "));
ExceptionChecker.expectThrowsNoException(
() -> createTable("create table test.tbl3\n" + "(k1 varchar(40), k2 int)\n" + "duplicate key(k1)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1');"));
ExceptionChecker.expectThrowsNoException(
() -> createTable("create table test.tbl4\n" + "(k1 varchar(40), k2 int, v1 int sum)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1');"));
ExceptionChecker.expectThrowsNoException(() -> createTable(
"create table test.tbl5\n" + "(k1 varchar(40), k2 int, v1 int sum)\n" + "aggregate key(k1,k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1');"));
ExceptionChecker.expectThrowsNoException(() -> createTable(
"create table test.tbl6\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl7\n" + "(k1 varchar(40), k2 int)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1');"));
ConfigBase.setMutableConfig("disable_storage_medium_check", "true");
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tb7(key1 int, key2 varchar(10)) \n"
+ "distributed by hash(key1) buckets 1 properties('replication_num' = '1', 'storage_medium' = 'ssd');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.compression1(key1 int, key2 varchar(10)) \n"
+ "distributed by hash(key1) buckets 1 \n"
+ "properties('replication_num' = '1', 'compression' = 'lz4f');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.compression2(key1 int, key2 varchar(10)) \n"
+ "distributed by hash(key1) buckets 1 \n"
+ "properties('replication_num' = '1', 'compression' = 'snappy');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl8\n" + "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_type' = 'int');"));
/**
* create table with list partition
*/
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl9\n"
+ "(k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int)\n"
+ "partition by list(k1)\n"
+ "(\n"
+ "partition p1 values in (\"1\"),\n"
+ "partition p2 values in (\"2\")\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl10\n"
+ "(k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int)\n"
+ "partition by list(k1)\n"
+ "(\n"
+ "partition p1 values in (\"1\", \"3\", \"5\"),\n"
+ "partition p2 values in (\"2\", \"4\", \"6\"),\n"
+ "partition p3 values in (\"7\", \"8\")\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl11\n"
+ "(k1 int not null, k2 varchar(128) not null, k3 int, v1 int, v2 int)\n"
+ "partition by list(k1, k2)\n"
+ "(\n"
+ "partition p1 values in ((\"1\", \"beijing\")),\n"
+ "partition p2 values in ((\"2\", \"beijing\"))\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl12\n"
+ "(k1 int not null, k2 varchar(128) not null, k3 int, v1 int, v2 int)\n"
+ "partition by list(k1, k2)\n"
+ "(\n"
+ "partition p1 values in ((\"1\", \"beijing\"), (\"1\", \"shanghai\")),\n"
+ "partition p2 values in ((\"2\", \"beijing\"), (\"2\", \"shanghai\")),\n"
+ "partition p3 values in ((\"3\", \"tianjin\"))\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsNoException(() -> createTable("create table test.tbl13\n"
+ "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_col' = 'v1');"));
Database db = Env.getCurrentInternalCatalog().getDbOrDdlException("default_cluster:test");
OlapTable tbl6 = (OlapTable) db.getTableOrDdlException("tbl6");
Assert.assertTrue(tbl6.getColumn("k1").isKey());
Assert.assertTrue(tbl6.getColumn("k2").isKey());
Assert.assertTrue(tbl6.getColumn("k3").isKey());
OlapTable tbl7 = (OlapTable) db.getTableOrDdlException("tbl7");
Assert.assertTrue(tbl7.getColumn("k1").isKey());
Assert.assertFalse(tbl7.getColumn("k2").isKey());
Assert.assertTrue(tbl7.getColumn("k2").getAggregationType() == AggregateType.NONE);
OlapTable tbl8 = (OlapTable) db.getTableOrDdlException("tbl8");
Assert.assertTrue(tbl8.getColumn("k1").isKey());
Assert.assertTrue(tbl8.getColumn("k2").isKey());
Assert.assertFalse(tbl8.getColumn("v1").isKey());
Assert.assertTrue(tbl8.getColumn(Column.SEQUENCE_COL).getAggregationType() == AggregateType.REPLACE);
OlapTable tbl13 = (OlapTable) db.getTableOrDdlException("tbl13");
Assert.assertTrue(tbl13.getColumn(Column.SEQUENCE_COL).getAggregationType() == AggregateType.REPLACE);
Assert.assertTrue(tbl13.getColumn(Column.SEQUENCE_COL).getType() == Type.INT);
Assert.assertEquals(tbl13.getSequenceMapCol(), "v1");
} | class CreateTableTest {
private static String runningDir = "fe/mocked/CreateTableTest2/" + UUID.randomUUID().toString() + "/";
private static ConnectContext connectContext;
@BeforeClass
public static void beforeClass() throws Exception {
Config.disable_storage_medium_check = true;
UtFrameUtils.createDorisCluster(runningDir);
connectContext = UtFrameUtils.createDefaultCtx();
String createDbStmtStr = "create database test;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, connectContext);
Env.getCurrentEnv().createDb(createDbStmt);
}
@AfterClass
public static void tearDown() {
File file = new File(runningDir);
file.delete();
}
private static void createTable(String sql) throws Exception {
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(sql, connectContext);
Env.getCurrentEnv().createTable(createTableStmt);
}
@Test
public void testDuplicateCreateTable() throws Exception {
Env env = Env.getCurrentEnv();
String sql = "create table if not exists test.tbl1_colocate\n" + "(k1 int, k2 int)\n" + "duplicate key(k1)\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1','colocate_with'='test'); ";
createTable(sql);
Set<Long> tabletIdSetAfterCreateFirstTable = env.getTabletInvertedIndex().getReplicaMetaTable().rowKeySet();
Set<TabletMeta> tabletMetaSetBeforeCreateFirstTable =
new HashSet<>(env.getTabletInvertedIndex().getTabletMetaTable().values());
Set<Long> colocateTableIdBeforeCreateFirstTable = env.getColocateTableIndex().getTable2Group().keySet();
Assert.assertTrue(colocateTableIdBeforeCreateFirstTable.size() > 0);
Assert.assertTrue(tabletIdSetAfterCreateFirstTable.size() > 0);
createTable(sql);
Set<Long> tabletIdSetAfterDuplicateCreateTable1 = env.getTabletInvertedIndex().getReplicaMetaTable().rowKeySet();
Set<Long> tabletIdSetAfterDuplicateCreateTable2 = env.getTabletInvertedIndex().getBackingReplicaMetaTable().columnKeySet();
Set<Long> tabletIdSetAfterDuplicateCreateTable3 = env.getTabletInvertedIndex().getTabletMetaMap().keySet();
Set<TabletMeta> tabletIdSetAfterDuplicateCreateTable4 =
new HashSet<>(env.getTabletInvertedIndex().getTabletMetaTable().values());
Assert.assertTrue(tabletIdSetAfterCreateFirstTable.equals(tabletIdSetAfterDuplicateCreateTable1));
Assert.assertTrue(tabletIdSetAfterCreateFirstTable.equals(tabletIdSetAfterDuplicateCreateTable2));
Assert.assertTrue(tabletIdSetAfterCreateFirstTable.equals(tabletIdSetAfterDuplicateCreateTable3));
Assert.assertTrue(tabletMetaSetBeforeCreateFirstTable.equals(tabletIdSetAfterDuplicateCreateTable4));
Set<Long> colocateTableIdAfterCreateFirstTable = env.getColocateTableIndex().getTable2Group().keySet();
Assert.assertTrue(colocateTableIdBeforeCreateFirstTable.equals(colocateTableIdAfterCreateFirstTable));
}
@Test
@Test
public void testAbnormal() throws DdlException {
ExceptionChecker.expectThrowsWithMsg(DdlException.class,
"Floating point type should not be used in distribution column",
() -> createTable("create table test.atbl1\n" + "(k1 int, k2 float)\n" + "duplicate key(k1)\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1'); "));
ExceptionChecker.expectThrowsWithMsg(AnalysisException.class,
"Floating point type column can not be partition column",
() -> createTable("create table test.atbl3\n" + "(k1 int, k2 int, k3 float)\n" + "duplicate key(k1)\n"
+ "partition by range(k3)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1'); "));
ExceptionChecker.expectThrowsWithMsg(DdlException.class,
"Varchar should not in the middle of short keys",
() -> createTable("create table test.atbl3\n" + "(k1 varchar(40), k2 int, k3 int)\n"
+ "duplicate key(k1, k2, k3)\n" + "distributed by hash(k1) buckets 1\n"
+ "properties('replication_num' = '1', 'short_key' = '3');"));
ExceptionChecker.expectThrowsWithMsg(DdlException.class, "Short key is too large. should less than: 3",
() -> createTable("create table test.atbl4\n" + "(k1 int, k2 int, k3 int)\n"
+ "duplicate key(k1, k2, k3)\n" + "distributed by hash(k1) buckets 1\n"
+ "properties('replication_num' = '1', 'short_key' = '4');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Failed to find 3 backends for policy",
() -> createTable("create table test.atbl5\n" + "(k1 int, k2 int, k3 int)\n"
+ "duplicate key(k1, k2, k3)\n" + "distributed by hash(k1) buckets 1\n"
+ "properties('replication_num' = '3');"));
ExceptionChecker.expectThrowsNoException(
() -> createTable("create table test.atbl6\n" + "(k1 int, k2 int)\n" + "duplicate key(k1)\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1'); "));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Table 'atbl6' already exists",
() -> createTable("create table test.atbl6\n" + "(k1 int, k2 int, k3 int)\n"
+ "duplicate key(k1, k2, k3)\n" + "distributed by hash(k1) buckets 1\n"
+ "properties('replication_num' = '1');"));
ConfigBase.setMutableConfig("disable_storage_medium_check", "false");
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, " Failed to find 1 backends for policy:",
() -> createTable("create table test.tb7(key1 int, key2 varchar(10)) distributed by hash(key1) \n"
+ "buckets 1 properties('replication_num' = '1', 'storage_medium' = 'ssd');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "sequence column only support UNIQUE_KEYS",
() -> createTable("create table test.atbl8\n" + "(k1 varchar(40), k2 int, v1 int sum)\n"
+ "aggregate key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_type' = 'int');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "sequence type only support integer types and date types",
() -> createTable("create table test.atbl8\n" + "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_type' = 'double');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "The sequence_col and sequence_type cannot be set at the same time",
() -> createTable("create table test.atbl8\n" + "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_type' = 'int', 'function_column.sequence_col' = 'v1');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "The specified sequence column[v3] not exists",
() -> createTable("create table test.atbl8\n" + "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_col' = 'v3');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Sequence type only support integer types and date types",
() -> createTable("create table test.atbl8\n" + "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_col' = 'k1');"));
/**
* create table with list partition
*/
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "Syntax error", () -> createTable("create table test.tbl9\n"
+ "(k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int)\n"
+ "partition by list(k1)\n"
+ "(\n"
+ "partition p1 values in (\"1\"),\n"
+ "partition p2 values in ()\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsWithMsg(IllegalArgumentException.class, "partition key desc list size[2] is not equal to partition column size[1]",
() -> createTable("create table test.tbl10\n"
+ "(k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int)\n"
+ "partition by list(k1)\n"
+ "(\n"
+ "partition p1 values in (\"1\", \"3\", \"5\"),\n"
+ "partition p2 values in (\"2\", \"4\", \"6\"),\n"
+ "partition p3 values in ((\"7\", \"8\"))\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsWithMsg(IllegalArgumentException.class, "partition key desc list size[1] is not equal to partition column size[2]",
() -> createTable("create table test.tbl11\n"
+ "(k1 int not null, k2 varchar(128) not null, k3 int, v1 int, v2 int)\n"
+ "partition by list(k1, k2)\n"
+ "(\n"
+ "partition p1 values in ((\"1\", \"beijing\")),\n"
+ "partition p2 values in (\"2\", \"beijing\")\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsWithMsg(IllegalArgumentException.class, "partition key desc list size[3] is not equal to partition column size[2]",
() -> createTable("create table test.tbl12\n"
+ "(k1 int not null, k2 varchar(128) not null, k3 int, v1 int, v2 int)\n"
+ "partition by list(k1, k2)\n"
+ "(\n"
+ "partition p1 values in ((\"1\", \"beijing\"), (\"1\", \"shanghai\")),\n"
+ "partition p2 values in ((\"2\", \"beijing\"), (\"2\", \"shanghai\")),\n"
+ "partition p3 values in ((\"3\", \"tianjin\", \"3\"))\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "Syntax error",
() -> createTable("create table test.tbl13\n"
+ "(k1 int not null, k2 varchar(128) not null, k3 int, v1 int, v2 int)\n"
+ "partition by list(k1, k2)\n"
+ "(\n"
+ "partition p1 values in ((\"1\", \"beijing\"), (\"1\", \"shanghai\")),\n"
+ "partition p2 values in ((\"2\", \"beijing\"), (\"2\", \"shanghai\")),\n"
+ "partition p3 values in ()\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
/**
* create table with both list and range partition
*/
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "You can only use in values to create list partitions",
() -> createTable("CREATE TABLE test.tbl14 (\n"
+ " k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY LIST(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES less than (\"1\"),\n"
+ " PARTITION p2 VALUES less than (\"2\"),\n"
+ " partition p3 values less than (\"5\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "You can only use fixed or less than values to create range partitions",
() -> createTable("CREATE TABLE test.tbl15 (\n"
+ " k1 int, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY range(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES in (\"1\"),\n"
+ " PARTITION p2 VALUES in (\"2\"),\n"
+ " partition p3 values in (\"5\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "You can only use in values to create list partitions",
() -> createTable("CREATE TABLE test.tbl15 (\n"
+ " k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY LIST(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES in (\"1\"),\n"
+ " PARTITION p2 VALUES in (\"2\"),\n"
+ " partition p3 values less than (\"5\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "You can only use fixed or less than values to create range partitions",
() -> createTable("CREATE TABLE test.tbl16 (\n"
+ " k1 int, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY RANGE(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES less than (\"1\"),\n"
+ " PARTITION p2 VALUES less than (\"2\"),\n"
+ " partition p3 values in (\"5\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Invalid number format: beijing",
() -> createTable("CREATE TABLE test.tbl17 (\n"
+ " k1 int, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY range(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES less than (\"beijing\"),\n"
+ " PARTITION p2 VALUES less than (\"shanghai\"),\n"
+ " partition p3 values less than (\"tianjin\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Invalid number format: beijing",
() -> createTable("CREATE TABLE test.tbl18 (\n"
+ " k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY list(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES in (\"beijing\"),\n"
+ " PARTITION p2 VALUES in (\"shanghai\"),\n"
+ " partition p3 values in (\"tianjin\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
/**
* dynamic partition table
*/
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Only support dynamic partition properties on range partition table",
() -> createTable("CREATE TABLE test.tbl19\n"
+ "(\n"
+ " k1 DATE not null\n"
+ ")\n"
+ "PARTITION BY LIST(k1) ()\n"
+ "DISTRIBUTED BY HASH(k1)\n"
+ "PROPERTIES\n"
+ "(\n"
+ " \"dynamic_partition.enable\" = \"true\",\n"
+ " \"dynamic_partition.time_unit\" = \"MONTH\",\n"
+ " \"dynamic_partition.end\" = \"2\",\n"
+ " \"dynamic_partition.prefix\" = \"p\",\n"
+ " \"dynamic_partition.buckets\" = \"8\",\n"
+ " \"dynamic_partition.start_day_of_month\" = \"3\"\n"
+ ");\n"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Only support dynamic partition properties on range partition table",
() -> createTable("CREATE TABLE test.tbl20\n"
+ "(\n"
+ " k1 DATE\n"
+ ")\n"
+ "DISTRIBUTED BY HASH(k1)\n"
+ "PROPERTIES\n"
+ "(\n"
+ " \"dynamic_partition.enable\" = \"true\",\n"
+ " \"dynamic_partition.time_unit\" = \"MONTH\",\n"
+ " \"dynamic_partition.end\" = \"2\",\n"
+ " \"dynamic_partition.prefix\" = \"p\",\n"
+ " \"dynamic_partition.buckets\" = \"8\",\n"
+ " \"dynamic_partition.start_day_of_month\" = \"3\"\n"
+ ");"));
ExceptionChecker.expectThrowsWithMsg(AnalysisException.class,
"Create unique keys table should not contain random distribution desc",
() -> createTable("CREATE TABLE test.tbl21\n"
+ "(\n"
+ " `k1` bigint(20) NULL COMMENT \"\",\n"
+ " `k2` largeint(40) NULL COMMENT \"\",\n"
+ " `v1` varchar(204) NULL COMMENT \"\",\n"
+ " `v2` smallint(6) NULL DEFAULT \"10\" COMMENT \"\"\n"
+ ") ENGINE=OLAP\n"
+ "UNIQUE KEY(`k1`, `k2`)\n"
+ "DISTRIBUTED BY RANDOM BUCKETS 32\n"
+ "PROPERTIES (\n"
+ "\"replication_allocation\" = \"tag.location.default: 1\"\n"
+ ");"));
ExceptionChecker.expectThrowsWithMsg(AnalysisException.class,
"Create aggregate keys table with value columns of which aggregate type"
+ " is REPLACE should not contain random distribution desc",
() -> createTable("CREATE TABLE test.tbl22\n"
+ "(\n"
+ " `k1` bigint(20) NULL COMMENT \"\",\n"
+ " `k2` largeint(40) NULL COMMENT \"\",\n"
+ " `v1` bigint(20) REPLACE NULL COMMENT \"\",\n"
+ " `v2` smallint(6) REPLACE_IF_NOT_NULL NULL DEFAULT \"10\" COMMENT \"\"\n"
+ ") ENGINE=OLAP\n"
+ "AGGREGATE KEY(`k1`, `k2`)\n"
+ "DISTRIBUTED BY RANDOM BUCKETS 32\n"
+ "PROPERTIES (\n"
+ "\"replication_allocation\" = \"tag.location.default: 1\"\n"
+ ");"));
}
@Test
public void testZOrderTable() {
ExceptionChecker.expectThrowsNoException(() -> createTable(
"create table test.zorder_tbl1\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1',"
+ " 'data_sort.sort_type' = 'lexical');"));
ExceptionChecker.expectThrowsNoException(() -> createTable(
"create table test.zorder_tbl2\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1',"
+ " 'data_sort.sort_type' = 'zorder');"));
ExceptionChecker.expectThrowsNoException(() -> createTable(
"create table test.zorder_tbl3\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1',"
+ " 'data_sort.sort_type' = 'zorder',"
+ " 'data_sort.col_num' = '2');"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "z-order needs 2 columns at least, 3 columns at most",
() -> createTable("create table test.zorder_tbl4\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1',"
+ " 'data_sort.sort_type' = 'zorder',"
+ " 'data_sort.col_num' = '1');"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "param data_sort.col_num error",
() -> createTable("create table test.zorder_tbl4\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1',"
+ " 'data_sort.sort_type' = 'zorder',"
+ " 'data_sort.col_num' = '');"));
}
@Test
public void testCreateTableWithArrayType() throws Exception {
ExceptionChecker.expectThrowsNoException(() -> {
createTable("create table test.table1(k1 INT, k2 Array<int>) duplicate key (k1) "
+ "distributed by hash(k1) buckets 1 properties('replication_num' = '1');");
});
ExceptionChecker.expectThrowsNoException(() -> {
createTable("create table test.table2(k1 INT, k2 Array<Array<int>>) duplicate key (k1) "
+ "distributed by hash(k1) buckets 1 properties('replication_num' = '1');");
});
ExceptionChecker.expectThrowsNoException(() -> {
createTable("CREATE TABLE test.table3 (\n"
+ " `k1` INT(11) NULL COMMENT \"\",\n"
+ " `k2` ARRAY<ARRAY<SMALLINT>> NULL COMMENT \"\",\n"
+ " `k3` ARRAY<ARRAY<ARRAY<INT(11)>>> NULL COMMENT \"\",\n"
+ " `k4` ARRAY<ARRAY<ARRAY<ARRAY<BIGINT>>>> NULL COMMENT \"\",\n"
+ " `k5` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<CHAR>>>>> NULL COMMENT \"\",\n"
+ " `k6` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<VARCHAR(20)>>>>>> NULL COMMENT \"\",\n"
+ " `k7` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<DATE>>>>>>> NULL COMMENT \"\",\n"
+ " `k8` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<DATETIME>>>>>>>> NULL COMMENT \"\",\n"
+ " `k11` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<DECIMAL(20, 6)>>>>>>>>> NULL COMMENT \"\"\n"
+ ") ENGINE=OLAP\n"
+ "DUPLICATE KEY(`k1`)\n"
+ "DISTRIBUTED BY HASH(`k1`) BUCKETS 3\n"
+ "PROPERTIES (\n"
+ "\"replication_allocation\" = \"tag.location.default: 1\"\n"
+ ");");
});
ExceptionChecker.expectThrowsWithMsg(AnalysisException.class, "Type exceeds the maximum nesting depth of 9",
() -> {
createTable("CREATE TABLE test.table4 (\n"
+ " `k1` INT(11) NULL COMMENT \"\",\n"
+ " `k2` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<DECIMAL(20, 6)>>>>>>>>>> NULL COMMENT \"\"\n"
+ ") ENGINE=OLAP\n"
+ "DUPLICATE KEY(`k1`)\n"
+ "DISTRIBUTED BY HASH(`k1`) BUCKETS 3\n"
+ "PROPERTIES (\n"
+ "\"replication_allocation\" = \"tag.location.default: 1\"\n"
+ ");");
});
ExceptionChecker.expectThrowsNoException(() -> {
createTable("create table test.table5(\n"
+ "\tk1 int,\n"
+ "\tv1 array<int>\n"
+ ") distributed by hash(k1) buckets 1\n"
+ "properties(\"replication_num\" = \"1\");");
});
}
} | class CreateTableTest {
private static String runningDir = "fe/mocked/CreateTableTest2/" + UUID.randomUUID().toString() + "/";
private static ConnectContext connectContext;
@BeforeClass
public static void beforeClass() throws Exception {
Config.disable_storage_medium_check = true;
UtFrameUtils.createDorisCluster(runningDir);
connectContext = UtFrameUtils.createDefaultCtx();
String createDbStmtStr = "create database test;";
CreateDbStmt createDbStmt = (CreateDbStmt) UtFrameUtils.parseAndAnalyzeStmt(createDbStmtStr, connectContext);
Env.getCurrentEnv().createDb(createDbStmt);
}
@AfterClass
public static void tearDown() {
File file = new File(runningDir);
file.delete();
}
private static void createTable(String sql) throws Exception {
CreateTableStmt createTableStmt = (CreateTableStmt) UtFrameUtils.parseAndAnalyzeStmt(sql, connectContext);
Env.getCurrentEnv().createTable(createTableStmt);
}
@Test
public void testDuplicateCreateTable() throws Exception {
Env env = Env.getCurrentEnv();
String sql = "create table if not exists test.tbl1_colocate\n" + "(k1 int, k2 int)\n" + "duplicate key(k1)\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1','colocate_with'='test'); ";
createTable(sql);
Set<Long> tabletIdSetAfterCreateFirstTable = env.getTabletInvertedIndex().getReplicaMetaTable().rowKeySet();
Set<TabletMeta> tabletMetaSetBeforeCreateFirstTable =
new HashSet<>(env.getTabletInvertedIndex().getTabletMetaTable().values());
Set<Long> colocateTableIdBeforeCreateFirstTable = env.getColocateTableIndex().getTable2Group().keySet();
Assert.assertTrue(colocateTableIdBeforeCreateFirstTable.size() > 0);
Assert.assertTrue(tabletIdSetAfterCreateFirstTable.size() > 0);
createTable(sql);
Set<Long> tabletIdSetAfterDuplicateCreateTable1 = env.getTabletInvertedIndex().getReplicaMetaTable().rowKeySet();
Set<Long> tabletIdSetAfterDuplicateCreateTable2 = env.getTabletInvertedIndex().getBackingReplicaMetaTable().columnKeySet();
Set<Long> tabletIdSetAfterDuplicateCreateTable3 = env.getTabletInvertedIndex().getTabletMetaMap().keySet();
Set<TabletMeta> tabletIdSetAfterDuplicateCreateTable4 =
new HashSet<>(env.getTabletInvertedIndex().getTabletMetaTable().values());
Assert.assertTrue(tabletIdSetAfterCreateFirstTable.equals(tabletIdSetAfterDuplicateCreateTable1));
Assert.assertTrue(tabletIdSetAfterCreateFirstTable.equals(tabletIdSetAfterDuplicateCreateTable2));
Assert.assertTrue(tabletIdSetAfterCreateFirstTable.equals(tabletIdSetAfterDuplicateCreateTable3));
Assert.assertTrue(tabletMetaSetBeforeCreateFirstTable.equals(tabletIdSetAfterDuplicateCreateTable4));
Set<Long> colocateTableIdAfterCreateFirstTable = env.getColocateTableIndex().getTable2Group().keySet();
Assert.assertTrue(colocateTableIdBeforeCreateFirstTable.equals(colocateTableIdAfterCreateFirstTable));
}
@Test
@Test
public void testAbnormal() throws DdlException {
ExceptionChecker.expectThrowsWithMsg(DdlException.class,
"Floating point type should not be used in distribution column",
() -> createTable("create table test.atbl1\n" + "(k1 int, k2 float)\n" + "duplicate key(k1)\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1'); "));
ExceptionChecker.expectThrowsWithMsg(AnalysisException.class,
"Floating point type column can not be partition column",
() -> createTable("create table test.atbl3\n" + "(k1 int, k2 int, k3 float)\n" + "duplicate key(k1)\n"
+ "partition by range(k3)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1'); "));
ExceptionChecker.expectThrowsWithMsg(DdlException.class,
"Varchar should not in the middle of short keys",
() -> createTable("create table test.atbl3\n" + "(k1 varchar(40), k2 int, k3 int)\n"
+ "duplicate key(k1, k2, k3)\n" + "distributed by hash(k1) buckets 1\n"
+ "properties('replication_num' = '1', 'short_key' = '3');"));
ExceptionChecker.expectThrowsWithMsg(DdlException.class, "Short key is too large. should less than: 3",
() -> createTable("create table test.atbl4\n" + "(k1 int, k2 int, k3 int)\n"
+ "duplicate key(k1, k2, k3)\n" + "distributed by hash(k1) buckets 1\n"
+ "properties('replication_num' = '1', 'short_key' = '4');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Failed to find 3 backends for policy",
() -> createTable("create table test.atbl5\n" + "(k1 int, k2 int, k3 int)\n"
+ "duplicate key(k1, k2, k3)\n" + "distributed by hash(k1) buckets 1\n"
+ "properties('replication_num' = '3');"));
ExceptionChecker.expectThrowsNoException(
() -> createTable("create table test.atbl6\n" + "(k1 int, k2 int)\n" + "duplicate key(k1)\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1'); "));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Table 'atbl6' already exists",
() -> createTable("create table test.atbl6\n" + "(k1 int, k2 int, k3 int)\n"
+ "duplicate key(k1, k2, k3)\n" + "distributed by hash(k1) buckets 1\n"
+ "properties('replication_num' = '1');"));
ConfigBase.setMutableConfig("disable_storage_medium_check", "false");
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, " Failed to find 1 backends for policy:",
() -> createTable("create table test.tb7(key1 int, key2 varchar(10)) distributed by hash(key1) \n"
+ "buckets 1 properties('replication_num' = '1', 'storage_medium' = 'ssd');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "sequence column only support UNIQUE_KEYS",
() -> createTable("create table test.atbl8\n" + "(k1 varchar(40), k2 int, v1 int sum)\n"
+ "aggregate key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_type' = 'int');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "sequence type only support integer types and date types",
() -> createTable("create table test.atbl8\n" + "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_type' = 'double');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "The sequence_col and sequence_type cannot be set at the same time",
() -> createTable("create table test.atbl8\n" + "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_type' = 'int', 'function_column.sequence_col' = 'v1');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "The specified sequence column[v3] not exists",
() -> createTable("create table test.atbl8\n" + "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_col' = 'v3');"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Sequence type only support integer types and date types",
() -> createTable("create table test.atbl8\n" + "(k1 varchar(40), k2 int, v1 int)\n"
+ "unique key(k1, k2)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k2) buckets 1\n" + "properties('replication_num' = '1',\n"
+ "'function_column.sequence_col' = 'k1');"));
/**
* create table with list partition
*/
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "Syntax error", () -> createTable("create table test.tbl9\n"
+ "(k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int)\n"
+ "partition by list(k1)\n"
+ "(\n"
+ "partition p1 values in (\"1\"),\n"
+ "partition p2 values in ()\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsWithMsg(IllegalArgumentException.class, "partition key desc list size[2] is not equal to partition column size[1]",
() -> createTable("create table test.tbl10\n"
+ "(k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int)\n"
+ "partition by list(k1)\n"
+ "(\n"
+ "partition p1 values in (\"1\", \"3\", \"5\"),\n"
+ "partition p2 values in (\"2\", \"4\", \"6\"),\n"
+ "partition p3 values in ((\"7\", \"8\"))\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsWithMsg(IllegalArgumentException.class, "partition key desc list size[1] is not equal to partition column size[2]",
() -> createTable("create table test.tbl11\n"
+ "(k1 int not null, k2 varchar(128) not null, k3 int, v1 int, v2 int)\n"
+ "partition by list(k1, k2)\n"
+ "(\n"
+ "partition p1 values in ((\"1\", \"beijing\")),\n"
+ "partition p2 values in (\"2\", \"beijing\")\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsWithMsg(IllegalArgumentException.class, "partition key desc list size[3] is not equal to partition column size[2]",
() -> createTable("create table test.tbl12\n"
+ "(k1 int not null, k2 varchar(128) not null, k3 int, v1 int, v2 int)\n"
+ "partition by list(k1, k2)\n"
+ "(\n"
+ "partition p1 values in ((\"1\", \"beijing\"), (\"1\", \"shanghai\")),\n"
+ "partition p2 values in ((\"2\", \"beijing\"), (\"2\", \"shanghai\")),\n"
+ "partition p3 values in ((\"3\", \"tianjin\", \"3\"))\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "Syntax error",
() -> createTable("create table test.tbl13\n"
+ "(k1 int not null, k2 varchar(128) not null, k3 int, v1 int, v2 int)\n"
+ "partition by list(k1, k2)\n"
+ "(\n"
+ "partition p1 values in ((\"1\", \"beijing\"), (\"1\", \"shanghai\")),\n"
+ "partition p2 values in ((\"2\", \"beijing\"), (\"2\", \"shanghai\")),\n"
+ "partition p3 values in ()\n"
+ ")\n"
+ "distributed by hash(k2) buckets 1\n"
+ "properties('replication_num' = '1');"));
/**
* create table with both list and range partition
*/
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "You can only use in values to create list partitions",
() -> createTable("CREATE TABLE test.tbl14 (\n"
+ " k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY LIST(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES less than (\"1\"),\n"
+ " PARTITION p2 VALUES less than (\"2\"),\n"
+ " partition p3 values less than (\"5\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "You can only use fixed or less than values to create range partitions",
() -> createTable("CREATE TABLE test.tbl15 (\n"
+ " k1 int, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY range(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES in (\"1\"),\n"
+ " PARTITION p2 VALUES in (\"2\"),\n"
+ " partition p3 values in (\"5\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "You can only use in values to create list partitions",
() -> createTable("CREATE TABLE test.tbl15 (\n"
+ " k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY LIST(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES in (\"1\"),\n"
+ " PARTITION p2 VALUES in (\"2\"),\n"
+ " partition p3 values less than (\"5\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "You can only use fixed or less than values to create range partitions",
() -> createTable("CREATE TABLE test.tbl16 (\n"
+ " k1 int, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY RANGE(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES less than (\"1\"),\n"
+ " PARTITION p2 VALUES less than (\"2\"),\n"
+ " partition p3 values in (\"5\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Invalid number format: beijing",
() -> createTable("CREATE TABLE test.tbl17 (\n"
+ " k1 int, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY range(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES less than (\"beijing\"),\n"
+ " PARTITION p2 VALUES less than (\"shanghai\"),\n"
+ " partition p3 values less than (\"tianjin\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Invalid number format: beijing",
() -> createTable("CREATE TABLE test.tbl18 (\n"
+ " k1 int not null, k2 varchar(128), k3 int, v1 int, v2 int\n"
+ ")\n"
+ "PARTITION BY list(k1)\n"
+ "(\n"
+ " PARTITION p1 VALUES in (\"beijing\"),\n"
+ " PARTITION p2 VALUES in (\"shanghai\"),\n"
+ " partition p3 values in (\"tianjin\")\n"
+ ")DISTRIBUTED BY HASH(k2) BUCKETS 10\n"
+ "PROPERTIES(\"replication_num\" = \"1\");"));
/**
* dynamic partition table
*/
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Only support dynamic partition properties on range partition table",
() -> createTable("CREATE TABLE test.tbl19\n"
+ "(\n"
+ " k1 DATE not null\n"
+ ")\n"
+ "PARTITION BY LIST(k1) ()\n"
+ "DISTRIBUTED BY HASH(k1)\n"
+ "PROPERTIES\n"
+ "(\n"
+ " \"dynamic_partition.enable\" = \"true\",\n"
+ " \"dynamic_partition.time_unit\" = \"MONTH\",\n"
+ " \"dynamic_partition.end\" = \"2\",\n"
+ " \"dynamic_partition.prefix\" = \"p\",\n"
+ " \"dynamic_partition.buckets\" = \"8\",\n"
+ " \"dynamic_partition.start_day_of_month\" = \"3\"\n"
+ ");\n"));
ExceptionChecker
.expectThrowsWithMsg(DdlException.class, "Only support dynamic partition properties on range partition table",
() -> createTable("CREATE TABLE test.tbl20\n"
+ "(\n"
+ " k1 DATE\n"
+ ")\n"
+ "DISTRIBUTED BY HASH(k1)\n"
+ "PROPERTIES\n"
+ "(\n"
+ " \"dynamic_partition.enable\" = \"true\",\n"
+ " \"dynamic_partition.time_unit\" = \"MONTH\",\n"
+ " \"dynamic_partition.end\" = \"2\",\n"
+ " \"dynamic_partition.prefix\" = \"p\",\n"
+ " \"dynamic_partition.buckets\" = \"8\",\n"
+ " \"dynamic_partition.start_day_of_month\" = \"3\"\n"
+ ");"));
ExceptionChecker.expectThrowsWithMsg(AnalysisException.class,
"Create unique keys table should not contain random distribution desc",
() -> createTable("CREATE TABLE test.tbl21\n"
+ "(\n"
+ " `k1` bigint(20) NULL COMMENT \"\",\n"
+ " `k2` largeint(40) NULL COMMENT \"\",\n"
+ " `v1` varchar(204) NULL COMMENT \"\",\n"
+ " `v2` smallint(6) NULL DEFAULT \"10\" COMMENT \"\"\n"
+ ") ENGINE=OLAP\n"
+ "UNIQUE KEY(`k1`, `k2`)\n"
+ "DISTRIBUTED BY RANDOM BUCKETS 32\n"
+ "PROPERTIES (\n"
+ "\"replication_allocation\" = \"tag.location.default: 1\"\n"
+ ");"));
ExceptionChecker.expectThrowsWithMsg(AnalysisException.class,
"Create aggregate keys table with value columns of which aggregate type"
+ " is REPLACE should not contain random distribution desc",
() -> createTable("CREATE TABLE test.tbl22\n"
+ "(\n"
+ " `k1` bigint(20) NULL COMMENT \"\",\n"
+ " `k2` largeint(40) NULL COMMENT \"\",\n"
+ " `v1` bigint(20) REPLACE NULL COMMENT \"\",\n"
+ " `v2` smallint(6) REPLACE_IF_NOT_NULL NULL DEFAULT \"10\" COMMENT \"\"\n"
+ ") ENGINE=OLAP\n"
+ "AGGREGATE KEY(`k1`, `k2`)\n"
+ "DISTRIBUTED BY RANDOM BUCKETS 32\n"
+ "PROPERTIES (\n"
+ "\"replication_allocation\" = \"tag.location.default: 1\"\n"
+ ");"));
}
@Test
public void testZOrderTable() {
ExceptionChecker.expectThrowsNoException(() -> createTable(
"create table test.zorder_tbl1\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1',"
+ " 'data_sort.sort_type' = 'lexical');"));
ExceptionChecker.expectThrowsNoException(() -> createTable(
"create table test.zorder_tbl2\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1',"
+ " 'data_sort.sort_type' = 'zorder');"));
ExceptionChecker.expectThrowsNoException(() -> createTable(
"create table test.zorder_tbl3\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1',"
+ " 'data_sort.sort_type' = 'zorder',"
+ " 'data_sort.col_num' = '2');"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "z-order needs 2 columns at least, 3 columns at most",
() -> createTable("create table test.zorder_tbl4\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1',"
+ " 'data_sort.sort_type' = 'zorder',"
+ " 'data_sort.col_num' = '1');"));
ExceptionChecker
.expectThrowsWithMsg(AnalysisException.class, "param data_sort.col_num error",
() -> createTable("create table test.zorder_tbl4\n" + "(k1 varchar(40), k2 int, k3 int)\n" + "duplicate key(k1, k2, k3)\n"
+ "partition by range(k2)\n" + "(partition p1 values less than(\"10\"))\n"
+ "distributed by hash(k1) buckets 1\n" + "properties('replication_num' = '1',"
+ " 'data_sort.sort_type' = 'zorder',"
+ " 'data_sort.col_num' = '');"));
}
@Test
public void testCreateTableWithArrayType() throws Exception {
ExceptionChecker.expectThrowsNoException(() -> {
createTable("create table test.table1(k1 INT, k2 Array<int>) duplicate key (k1) "
+ "distributed by hash(k1) buckets 1 properties('replication_num' = '1');");
});
ExceptionChecker.expectThrowsNoException(() -> {
createTable("create table test.table2(k1 INT, k2 Array<Array<int>>) duplicate key (k1) "
+ "distributed by hash(k1) buckets 1 properties('replication_num' = '1');");
});
ExceptionChecker.expectThrowsNoException(() -> {
createTable("CREATE TABLE test.table3 (\n"
+ " `k1` INT(11) NULL COMMENT \"\",\n"
+ " `k2` ARRAY<ARRAY<SMALLINT>> NULL COMMENT \"\",\n"
+ " `k3` ARRAY<ARRAY<ARRAY<INT(11)>>> NULL COMMENT \"\",\n"
+ " `k4` ARRAY<ARRAY<ARRAY<ARRAY<BIGINT>>>> NULL COMMENT \"\",\n"
+ " `k5` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<CHAR>>>>> NULL COMMENT \"\",\n"
+ " `k6` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<VARCHAR(20)>>>>>> NULL COMMENT \"\",\n"
+ " `k7` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<DATE>>>>>>> NULL COMMENT \"\",\n"
+ " `k8` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<DATETIME>>>>>>>> NULL COMMENT \"\",\n"
+ " `k11` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<DECIMAL(20, 6)>>>>>>>>> NULL COMMENT \"\"\n"
+ ") ENGINE=OLAP\n"
+ "DUPLICATE KEY(`k1`)\n"
+ "DISTRIBUTED BY HASH(`k1`) BUCKETS 3\n"
+ "PROPERTIES (\n"
+ "\"replication_allocation\" = \"tag.location.default: 1\"\n"
+ ");");
});
ExceptionChecker.expectThrowsWithMsg(AnalysisException.class, "Type exceeds the maximum nesting depth of 9",
() -> {
createTable("CREATE TABLE test.table4 (\n"
+ " `k1` INT(11) NULL COMMENT \"\",\n"
+ " `k2` ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<ARRAY<DECIMAL(20, 6)>>>>>>>>>> NULL COMMENT \"\"\n"
+ ") ENGINE=OLAP\n"
+ "DUPLICATE KEY(`k1`)\n"
+ "DISTRIBUTED BY HASH(`k1`) BUCKETS 3\n"
+ "PROPERTIES (\n"
+ "\"replication_allocation\" = \"tag.location.default: 1\"\n"
+ ");");
});
ExceptionChecker.expectThrowsNoException(() -> {
createTable("create table test.table5(\n"
+ "\tk1 int,\n"
+ "\tv1 array<int>\n"
+ ") distributed by hash(k1) buckets 1\n"
+ "properties(\"replication_num\" = \"1\");");
});
}
} |
Ok, let's start with `CLASS`... | private List<BeanInfo> findMatching(TypeAndQualifiers typeAndQualifiers) {
List<BeanInfo> resolved = new ArrayList<>();
Collection<BeanInfo> potentialBeans = typeAndQualifiers.type.kind() == CLASS
? beanDeployment.getBeansByType(typeAndQualifiers.type)
: beanDeployment.getBeans();
for (BeanInfo b : potentialBeans) {
if (Beans.matches(b, typeAndQualifiers)) {
resolved.add(b);
}
}
return resolved.isEmpty() ? Collections.emptyList() : resolved;
} | Collection<BeanInfo> potentialBeans = typeAndQualifiers.type.kind() == CLASS | private List<BeanInfo> findMatching(TypeAndQualifiers typeAndQualifiers) {
List<BeanInfo> resolved = new ArrayList<>();
Collection<BeanInfo> potentialBeans = typeAndQualifiers.type.kind() == CLASS
? beanDeployment.getBeansByType(typeAndQualifiers.type)
: beanDeployment.getBeans();
for (BeanInfo b : potentialBeans) {
if (Beans.matches(b, typeAndQualifiers)) {
resolved.add(b);
}
}
return resolved.isEmpty() ? Collections.emptyList() : resolved;
} | class BeanResolverImpl implements BeanResolver {
private final BeanDeployment beanDeployment;
private final Map<TypeAndQualifiers, List<BeanInfo>> resolved;
BeanResolverImpl(BeanDeployment beanDeployment) {
this.beanDeployment = beanDeployment;
this.resolved = new ConcurrentHashMap<>();
}
@Override
public Set<BeanInfo> resolveBeans(Type requiredType, AnnotationInstance... requiredQualifiers) {
Objects.requireNonNull(requiredType, "Required type must not be null");
Set<AnnotationInstance> qualifiers;
if (requiredQualifiers.length == 0) {
qualifiers = Collections.emptySet();
} else {
qualifiers = new HashSet<>();
Collections.addAll(qualifiers, requiredQualifiers);
}
TypeAndQualifiers typeAndQualifiers = new TypeAndQualifiers(requiredType, qualifiers);
List<BeanInfo> beans = findMatching(typeAndQualifiers);
Set<BeanInfo> ret;
if (beans.isEmpty()) {
ret = Collections.emptySet();
} else if (beans.size() == 1) {
ret = Collections.singleton(beans.get(0));
} else {
ret = new HashSet<>(beans);
}
return ret;
}
@Override
public BeanInfo resolveAmbiguity(Set<BeanInfo> beans) {
if (beans == null || beans.isEmpty()) {
return null;
}
if (beans.size() > 1) {
BeanInfo selected = Beans.resolveAmbiguity(beans);
if (selected != null) {
return selected;
}
throw new AmbiguousResolutionException(beans.toString());
} else {
return beans.iterator().next();
}
}
List<BeanInfo> resolve(TypeAndQualifiers typeAndQualifiers) {
return resolved.computeIfAbsent(typeAndQualifiers, this::findMatching);
}
List<BeanInfo> findTypeMatching(Type type) {
List<BeanInfo> resolved = new ArrayList<>();
Collection<BeanInfo> potentialBeans = type.kind() == CLASS ? beanDeployment.getBeansByType(type)
: beanDeployment.getBeans();
for (BeanInfo b : potentialBeans) {
if (Beans.matchesType(b, type)) {
resolved.add(b);
}
}
return resolved.isEmpty() ? Collections.emptyList() : resolved;
}
boolean matches(Type requiredType, Type beanType) {
return matchesNoBoxing(Types.box(requiredType), Types.box(beanType));
}
boolean matchesNoBoxing(Type requiredType, Type beanType) {
if (requiredType == beanType) {
return true;
}
if (ARRAY.equals(requiredType.kind())) {
if (ARRAY.equals(beanType.kind())) {
return matchesNoBoxing(requiredType.asArrayType().component(), beanType.asArrayType().component());
}
} else if (CLASS.equals(requiredType.kind())) {
if (CLASS.equals(beanType.kind())) {
return requiredType.name().equals(beanType.name());
} else if (PARAMETERIZED_TYPE.equals(beanType.kind())) {
if (!requiredType.name().equals(beanType.asParameterizedType().name())) {
return false;
}
return containsUnboundedTypeVariablesOrObjects(beanType.asParameterizedType().arguments());
}
} else if (PARAMETERIZED_TYPE.equals(requiredType.kind())) {
if (CLASS.equals(beanType.kind())) {
if (!beanType.name().equals(requiredType.asParameterizedType().name())) {
return false;
}
return containsUnboundedTypeVariablesOrObjects(requiredType.asParameterizedType().arguments());
} else if (PARAMETERIZED_TYPE.equals(beanType.kind())) {
if (!requiredType.name().equals(beanType.name())) {
return false;
}
List<Type> requiredTypeArguments = requiredType.asParameterizedType().arguments();
List<Type> beanTypeArguments = beanType.asParameterizedType().arguments();
if (requiredTypeArguments.size() != beanTypeArguments.size()) {
throw new IllegalArgumentException("Invalid argument combination " + requiredType + "; " + beanType);
}
for (int i = 0; i < requiredTypeArguments.size(); i++) {
if (!parametersMatch(requiredTypeArguments.get(i), beanTypeArguments.get(i))) {
return false;
}
}
return true;
}
} else if (WILDCARD_TYPE.equals(requiredType.kind())) {
return parametersMatch(requiredType, beanType);
}
return false;
}
boolean parametersMatch(Type requiredParameter, Type beanParameter) {
if (isActualType(requiredParameter) && isActualType(beanParameter)) {
/*
* the required type parameter and the bean type parameter are actual types with identical raw type, and, if the
* type is parameterized, the bean
* type parameter is assignable to the required type parameter according to these rules, or
*/
return matches(requiredParameter, beanParameter);
}
if (WILDCARD_TYPE.equals(requiredParameter.kind()) && isActualType(beanParameter)) {
/*
* the required type parameter is a wildcard, the bean type parameter is an actual type and the actual type is
* assignable to the upper bound, if
* any, of the wildcard and assignable from the lower bound, if any, of the wildcard, or
*/
return parametersMatch(requiredParameter.asWildcardType(), beanParameter);
}
if (WILDCARD_TYPE.equals(requiredParameter.kind()) && TYPE_VARIABLE.equals(beanParameter.kind())) {
/*
* the required type parameter is a wildcard, the bean type parameter is a type variable and the upper bound of the
* type variable is assignable to
* or assignable from the upper bound, if any, of the wildcard and assignable from the lower bound, if any, of the
* wildcard, or
*/
return parametersMatch(requiredParameter.asWildcardType(), beanParameter.asTypeVariable());
}
if (isActualType(requiredParameter) && TYPE_VARIABLE.equals(beanParameter.kind())) {
/*
* the required type parameter is an actual type, the bean type parameter is a type variable and the actual type is
* assignable to the upper bound,
* if any, of the type variable, or
*/
return parametersMatch(requiredParameter, beanParameter.asTypeVariable());
}
if (TYPE_VARIABLE.equals(requiredParameter.kind()) && TYPE_VARIABLE.equals(beanParameter.kind())) {
/*
* the required type parameter and the bean type parameter are both type variables and the upper bound of the
* required type parameter is assignable
* to the upper bound, if any, of the bean type parameter
*/
return parametersMatch(requiredParameter.asTypeVariable(), beanParameter.asTypeVariable());
}
return false;
}
boolean parametersMatch(WildcardType requiredParameter, Type beanParameter) {
return (lowerBoundsOfWildcardMatch(beanParameter, requiredParameter)
&& upperBoundsOfWildcardMatch(requiredParameter, beanParameter));
}
boolean parametersMatch(WildcardType requiredParameter, TypeVariable beanParameter) {
List<Type> beanParameterBounds = getUppermostTypeVariableBounds(beanParameter);
if (!lowerBoundsOfWildcardMatch(beanParameterBounds, requiredParameter)) {
return false;
}
List<Type> requiredUpperBounds = Collections.singletonList(requiredParameter.extendsBound());
return (boundsMatch(requiredUpperBounds, beanParameterBounds) || boundsMatch(beanParameterBounds, requiredUpperBounds));
}
boolean parametersMatch(Type requiredParameter, TypeVariable beanParameter) {
for (Type bound : getUppermostTypeVariableBounds(beanParameter)) {
if (!beanDeployment.getAssignabilityCheck().isAssignableFrom(bound, requiredParameter)) {
return false;
}
}
return true;
}
boolean parametersMatch(TypeVariable requiredParameter, TypeVariable beanParameter) {
return boundsMatch(getUppermostTypeVariableBounds(beanParameter), getUppermostTypeVariableBounds(requiredParameter));
}
/**
* Returns <tt>true</tt> iff for each bound T, there is at least one bound from <tt>stricterBounds</tt> assignable to T.
* This reflects that
* <tt>stricterBounds</tt> are at least as strict as <tt>bounds</tt> are.
*/
boolean boundsMatch(List<Type> bounds, List<Type> stricterBounds) {
bounds = getUppermostBounds(bounds);
stricterBounds = getUppermostBounds(stricterBounds);
for (Type bound : bounds) {
for (Type stricterBound : stricterBounds) {
if (!beanDeployment.getAssignabilityCheck().isAssignableFrom(bound, stricterBound)) {
return false;
}
}
}
return true;
}
boolean lowerBoundsOfWildcardMatch(Type parameter, WildcardType requiredParameter) {
return lowerBoundsOfWildcardMatch(singletonList(parameter), requiredParameter);
}
boolean lowerBoundsOfWildcardMatch(List<Type> beanParameterBounds, WildcardType requiredParameter) {
if (requiredParameter.superBound() != null) {
if (!boundsMatch(beanParameterBounds, singletonList(requiredParameter.superBound()))) {
return false;
}
}
return true;
}
boolean upperBoundsOfWildcardMatch(WildcardType requiredParameter, Type parameter) {
return boundsMatch(singletonList(requiredParameter.extendsBound()), singletonList(parameter));
}
/*
* TypeVariable bounds are treated specially - CDI assignability rules are applied. Standard Java covariant assignability
* rules are applied to all other
* types of bounds. This is not explicitly mentioned in the specification but is implied.
*/
List<Type> getUppermostTypeVariableBounds(TypeVariable bound) {
if (TYPE_VARIABLE.equals(bound.bounds().get(0).kind())) {
return getUppermostTypeVariableBounds(bound.bounds().get(0).asTypeVariable());
}
return bound.bounds();
}
List<Type> getUppermostBounds(List<Type> bounds) {
if (TYPE_VARIABLE.equals(bounds.get(0).kind())) {
return getUppermostTypeVariableBounds(bounds.get(0).asTypeVariable());
}
return bounds;
}
static boolean isActualType(Type type) {
return CLASS.equals(type.kind()) || PARAMETERIZED_TYPE.equals(type.kind()) || ARRAY.equals(type.kind());
}
static boolean containsUnboundedTypeVariablesOrObjects(List<Type> types) {
for (Type type : types) {
if (ClassType.OBJECT_TYPE.equals(type)) {
continue;
}
if (Kind.TYPE_VARIABLE.equals(type.kind())) {
List<Type> bounds = type.asTypeVariable().bounds();
if (bounds.isEmpty() || bounds.size() == 1 && ClassType.OBJECT_TYPE.equals(bounds.get(0))) {
continue;
}
}
return false;
}
return true;
}
} | class BeanResolverImpl implements BeanResolver {
private final BeanDeployment beanDeployment;
private final Map<TypeAndQualifiers, List<BeanInfo>> resolved;
BeanResolverImpl(BeanDeployment beanDeployment) {
this.beanDeployment = beanDeployment;
this.resolved = new ConcurrentHashMap<>();
}
@Override
public Set<BeanInfo> resolveBeans(Type requiredType, AnnotationInstance... requiredQualifiers) {
Objects.requireNonNull(requiredType, "Required type must not be null");
Set<AnnotationInstance> qualifiers;
if (requiredQualifiers.length == 0) {
qualifiers = Collections.emptySet();
} else {
qualifiers = new HashSet<>();
Collections.addAll(qualifiers, requiredQualifiers);
}
TypeAndQualifiers typeAndQualifiers = new TypeAndQualifiers(requiredType, qualifiers);
List<BeanInfo> beans = findMatching(typeAndQualifiers);
Set<BeanInfo> ret;
if (beans.isEmpty()) {
ret = Collections.emptySet();
} else if (beans.size() == 1) {
ret = Collections.singleton(beans.get(0));
} else {
ret = new HashSet<>(beans);
}
return ret;
}
@Override
public BeanInfo resolveAmbiguity(Set<BeanInfo> beans) {
if (beans == null || beans.isEmpty()) {
return null;
}
if (beans.size() > 1) {
BeanInfo selected = Beans.resolveAmbiguity(beans);
if (selected != null) {
return selected;
}
throw new AmbiguousResolutionException(beans.toString());
} else {
return beans.iterator().next();
}
}
List<BeanInfo> resolve(TypeAndQualifiers typeAndQualifiers) {
return resolved.computeIfAbsent(typeAndQualifiers, this::findMatching);
}
List<BeanInfo> findTypeMatching(Type type) {
List<BeanInfo> resolved = new ArrayList<>();
Collection<BeanInfo> potentialBeans = type.kind() == CLASS ? beanDeployment.getBeansByType(type)
: beanDeployment.getBeans();
for (BeanInfo b : potentialBeans) {
if (Beans.matchesType(b, type)) {
resolved.add(b);
}
}
return resolved.isEmpty() ? Collections.emptyList() : resolved;
}
boolean matches(Type requiredType, Type beanType) {
return matchesNoBoxing(Types.box(requiredType), Types.box(beanType));
}
boolean matchesNoBoxing(Type requiredType, Type beanType) {
if (requiredType == beanType) {
return true;
}
if (ARRAY.equals(requiredType.kind())) {
if (ARRAY.equals(beanType.kind())) {
return matchesNoBoxing(requiredType.asArrayType().component(), beanType.asArrayType().component());
}
} else if (CLASS.equals(requiredType.kind())) {
if (CLASS.equals(beanType.kind())) {
return requiredType.name().equals(beanType.name());
} else if (PARAMETERIZED_TYPE.equals(beanType.kind())) {
if (!requiredType.name().equals(beanType.asParameterizedType().name())) {
return false;
}
return containsUnboundedTypeVariablesOrObjects(beanType.asParameterizedType().arguments());
}
} else if (PARAMETERIZED_TYPE.equals(requiredType.kind())) {
if (CLASS.equals(beanType.kind())) {
if (!beanType.name().equals(requiredType.asParameterizedType().name())) {
return false;
}
return containsUnboundedTypeVariablesOrObjects(requiredType.asParameterizedType().arguments());
} else if (PARAMETERIZED_TYPE.equals(beanType.kind())) {
if (!requiredType.name().equals(beanType.name())) {
return false;
}
List<Type> requiredTypeArguments = requiredType.asParameterizedType().arguments();
List<Type> beanTypeArguments = beanType.asParameterizedType().arguments();
if (requiredTypeArguments.size() != beanTypeArguments.size()) {
throw new IllegalArgumentException("Invalid argument combination " + requiredType + "; " + beanType);
}
for (int i = 0; i < requiredTypeArguments.size(); i++) {
if (!parametersMatch(requiredTypeArguments.get(i), beanTypeArguments.get(i))) {
return false;
}
}
return true;
}
} else if (WILDCARD_TYPE.equals(requiredType.kind())) {
return parametersMatch(requiredType, beanType);
}
return false;
}
boolean parametersMatch(Type requiredParameter, Type beanParameter) {
if (isActualType(requiredParameter) && isActualType(beanParameter)) {
/*
* the required type parameter and the bean type parameter are actual types with identical raw type, and, if the
* type is parameterized, the bean
* type parameter is assignable to the required type parameter according to these rules, or
*/
return matches(requiredParameter, beanParameter);
}
if (WILDCARD_TYPE.equals(requiredParameter.kind()) && isActualType(beanParameter)) {
/*
* the required type parameter is a wildcard, the bean type parameter is an actual type and the actual type is
* assignable to the upper bound, if
* any, of the wildcard and assignable from the lower bound, if any, of the wildcard, or
*/
return parametersMatch(requiredParameter.asWildcardType(), beanParameter);
}
if (WILDCARD_TYPE.equals(requiredParameter.kind()) && TYPE_VARIABLE.equals(beanParameter.kind())) {
/*
* the required type parameter is a wildcard, the bean type parameter is a type variable and the upper bound of the
* type variable is assignable to
* or assignable from the upper bound, if any, of the wildcard and assignable from the lower bound, if any, of the
* wildcard, or
*/
return parametersMatch(requiredParameter.asWildcardType(), beanParameter.asTypeVariable());
}
if (isActualType(requiredParameter) && TYPE_VARIABLE.equals(beanParameter.kind())) {
/*
* the required type parameter is an actual type, the bean type parameter is a type variable and the actual type is
* assignable to the upper bound,
* if any, of the type variable, or
*/
return parametersMatch(requiredParameter, beanParameter.asTypeVariable());
}
if (TYPE_VARIABLE.equals(requiredParameter.kind()) && TYPE_VARIABLE.equals(beanParameter.kind())) {
/*
* the required type parameter and the bean type parameter are both type variables and the upper bound of the
* required type parameter is assignable
* to the upper bound, if any, of the bean type parameter
*/
return parametersMatch(requiredParameter.asTypeVariable(), beanParameter.asTypeVariable());
}
return false;
}
boolean parametersMatch(WildcardType requiredParameter, Type beanParameter) {
return (lowerBoundsOfWildcardMatch(beanParameter, requiredParameter)
&& upperBoundsOfWildcardMatch(requiredParameter, beanParameter));
}
boolean parametersMatch(WildcardType requiredParameter, TypeVariable beanParameter) {
List<Type> beanParameterBounds = getUppermostTypeVariableBounds(beanParameter);
if (!lowerBoundsOfWildcardMatch(beanParameterBounds, requiredParameter)) {
return false;
}
List<Type> requiredUpperBounds = Collections.singletonList(requiredParameter.extendsBound());
return (boundsMatch(requiredUpperBounds, beanParameterBounds) || boundsMatch(beanParameterBounds, requiredUpperBounds));
}
boolean parametersMatch(Type requiredParameter, TypeVariable beanParameter) {
for (Type bound : getUppermostTypeVariableBounds(beanParameter)) {
if (!beanDeployment.getAssignabilityCheck().isAssignableFrom(bound, requiredParameter)) {
return false;
}
}
return true;
}
boolean parametersMatch(TypeVariable requiredParameter, TypeVariable beanParameter) {
return boundsMatch(getUppermostTypeVariableBounds(beanParameter), getUppermostTypeVariableBounds(requiredParameter));
}
/**
* Returns <tt>true</tt> iff for each bound T, there is at least one bound from <tt>stricterBounds</tt> assignable to T.
* This reflects that
* <tt>stricterBounds</tt> are at least as strict as <tt>bounds</tt> are.
*/
boolean boundsMatch(List<Type> bounds, List<Type> stricterBounds) {
bounds = getUppermostBounds(bounds);
stricterBounds = getUppermostBounds(stricterBounds);
for (Type bound : bounds) {
for (Type stricterBound : stricterBounds) {
if (!beanDeployment.getAssignabilityCheck().isAssignableFrom(bound, stricterBound)) {
return false;
}
}
}
return true;
}
boolean lowerBoundsOfWildcardMatch(Type parameter, WildcardType requiredParameter) {
return lowerBoundsOfWildcardMatch(singletonList(parameter), requiredParameter);
}
boolean lowerBoundsOfWildcardMatch(List<Type> beanParameterBounds, WildcardType requiredParameter) {
if (requiredParameter.superBound() != null) {
if (!boundsMatch(beanParameterBounds, singletonList(requiredParameter.superBound()))) {
return false;
}
}
return true;
}
boolean upperBoundsOfWildcardMatch(WildcardType requiredParameter, Type parameter) {
return boundsMatch(singletonList(requiredParameter.extendsBound()), singletonList(parameter));
}
/*
* TypeVariable bounds are treated specially - CDI assignability rules are applied. Standard Java covariant assignability
* rules are applied to all other
* types of bounds. This is not explicitly mentioned in the specification but is implied.
*/
List<Type> getUppermostTypeVariableBounds(TypeVariable bound) {
if (TYPE_VARIABLE.equals(bound.bounds().get(0).kind())) {
return getUppermostTypeVariableBounds(bound.bounds().get(0).asTypeVariable());
}
return bound.bounds();
}
List<Type> getUppermostBounds(List<Type> bounds) {
if (TYPE_VARIABLE.equals(bounds.get(0).kind())) {
return getUppermostTypeVariableBounds(bounds.get(0).asTypeVariable());
}
return bounds;
}
static boolean isActualType(Type type) {
return CLASS.equals(type.kind()) || PARAMETERIZED_TYPE.equals(type.kind()) || ARRAY.equals(type.kind());
}
static boolean containsUnboundedTypeVariablesOrObjects(List<Type> types) {
for (Type type : types) {
if (ClassType.OBJECT_TYPE.equals(type)) {
continue;
}
if (Kind.TYPE_VARIABLE.equals(type.kind())) {
List<Type> bounds = type.asTypeVariable().bounds();
if (bounds.isEmpty() || bounds.size() == 1 && ClassType.OBJECT_TYPE.equals(bounds.get(0))) {
continue;
}
}
return false;
}
return true;
}
} |
The error msg is confusing. The `if` says `not a client local file`, but err msg says "not support local file from client"? | private void handleLoadStmt() {
try {
LoadStmt loadStmt = (LoadStmt) parsedStmt;
EtlJobType jobType = loadStmt.getEtlJobType();
if (jobType == EtlJobType.UNKNOWN) {
throw new DdlException("Unknown load job type");
}
if (jobType == EtlJobType.HADOOP) {
throw new DdlException("Load job by hadoop cluster is disabled."
+ " Try using broker load. See 'help broker load;'");
}
LoadManager loadManager = context.getEnv().getLoadManager();
if (jobType == EtlJobType.LOCAL_FILE) {
if (!context.getCapability().isClientLocalFile()) {
throw new DdlException("Doris server does not support load local file from mysql client.");
}
LoadJobRowResult submitResult = loadManager.executeMySqlLoadJobFromStmt(context, loadStmt);
context.getState().setOk(submitResult.getRecords(), submitResult.getWarnings(),
submitResult.toString());
} else {
loadManager.createLoadJobFromStmt(loadStmt);
context.getState().setOk();
}
} catch (UserException e) {
LOG.debug("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
} | throw new DdlException("Doris server does not support load local file from mysql client."); | private void handleLoadStmt() {
try {
LoadStmt loadStmt = (LoadStmt) parsedStmt;
EtlJobType jobType = loadStmt.getEtlJobType();
if (jobType == EtlJobType.UNKNOWN) {
throw new DdlException("Unknown load job type");
}
if (jobType == EtlJobType.HADOOP) {
throw new DdlException("Load job by hadoop cluster is disabled."
+ " Try using broker load. See 'help broker load;'");
}
LoadManager loadManager = context.getEnv().getLoadManager();
if (jobType == EtlJobType.LOCAL_FILE) {
if (!context.getCapability().supportClientLocalFile()) {
context.getState().setError(ErrorCode.ERR_NOT_ALLOWED_COMMAND, "This client is not support"
+ " to load client local file.");
return;
}
LoadJobRowResult submitResult = loadManager.executeMySqlLoadJobFromStmt(context, loadStmt);
context.getState().setOk(submitResult.getRecords(), submitResult.getWarnings(),
submitResult.toString());
} else {
loadManager.createLoadJobFromStmt(loadStmt);
context.getState().setOk();
}
} catch (UserException e) {
LOG.debug("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
} | class StmtExecutor implements ProfileWriter {
private static final Logger LOG = LogManager.getLogger(StmtExecutor.class);
private static final AtomicLong STMT_ID_GENERATOR = new AtomicLong(0);
private static final int MAX_DATA_TO_SEND_FOR_TXN = 100;
private static final String NULL_VALUE_FOR_LOAD = "\\N";
private final Object writeProfileLock = new Object();
private ConnectContext context;
private final StatementContext statementContext;
private MysqlSerializer serializer;
private OriginStatement originStmt;
private StatementBase parsedStmt;
private Analyzer analyzer;
private RuntimeProfile profile;
private RuntimeProfile summaryProfile;
private RuntimeProfile plannerRuntimeProfile;
private volatile boolean isFinishedProfile = false;
private String queryType = "Query";
private volatile Coordinator coord = null;
private MasterOpExecutor masterOpExecutor = null;
private RedirectStatus redirectStatus = null;
private Planner planner;
private boolean isProxy;
private ShowResultSet proxyResultSet = null;
private Data.PQueryStatistics.Builder statisticsForAuditLog;
private boolean isCached;
private QueryPlannerProfile plannerProfile = new QueryPlannerProfile();
private String stmtName;
private PrepareStmt prepareStmt;
public StmtExecutor(ConnectContext context, OriginStatement originStmt, boolean isProxy) {
this.context = context;
this.originStmt = originStmt;
this.serializer = context.getSerializer();
this.isProxy = isProxy;
this.statementContext = new StatementContext(context, originStmt);
this.context.setStatementContext(statementContext);
}
public StmtExecutor(ConnectContext context, String stmt) {
this(context, new OriginStatement(stmt, 0), false);
this.stmtName = stmt;
}
public StmtExecutor(ConnectContext ctx, StatementBase parsedStmt) {
this.context = ctx;
this.parsedStmt = parsedStmt;
this.originStmt = parsedStmt.getOrigStmt();
this.serializer = context.getSerializer();
this.isProxy = false;
if (parsedStmt instanceof LogicalPlanAdapter) {
this.statementContext = ((LogicalPlanAdapter) parsedStmt).getStatementContext();
this.statementContext.setConnectContext(ctx);
this.statementContext.setOriginStatement(originStmt);
this.statementContext.setParsedStatement(parsedStmt);
} else {
this.statementContext = new StatementContext(ctx, originStmt);
this.statementContext.setParsedStatement(parsedStmt);
}
this.context.setStatementContext(statementContext);
}
public static InternalService.PDataRow getRowStringValue(List<Expr> cols) throws UserException {
if (cols.isEmpty()) {
return null;
}
InternalService.PDataRow.Builder row = InternalService.PDataRow.newBuilder();
for (Expr expr : cols) {
if (!expr.isLiteralOrCastExpr()) {
throw new UserException(
"do not support non-literal expr in transactional insert operation: " + expr.toSql());
}
if (expr instanceof NullLiteral) {
row.addColBuilder().setValue(NULL_VALUE_FOR_LOAD);
} else if (expr instanceof ArrayLiteral) {
row.addColBuilder().setValue(expr.getStringValueForArray());
} else {
row.addColBuilder().setValue(expr.getStringValue());
}
}
return row.build();
}
public void setCoord(Coordinator coord) {
this.coord = coord;
}
public Analyzer getAnalyzer() {
return analyzer;
}
private void initProfile(QueryPlannerProfile plannerProfile, boolean waiteBeReport) {
RuntimeProfile queryProfile;
if (coord == null) {
queryProfile = new RuntimeProfile("Execution Profile " + DebugUtil.printId(context.queryId()));
} else {
queryProfile = coord.getQueryProfile();
}
if (profile == null) {
profile = new RuntimeProfile("Query");
summaryProfile = new RuntimeProfile("Summary");
profile.addChild(summaryProfile);
summaryProfile.addInfoString(ProfileManager.START_TIME, TimeUtils.longToTimeString(context.getStartTime()));
updateSummaryProfile(waiteBeReport);
for (Map.Entry<String, String> entry : getSummaryInfo().entrySet()) {
summaryProfile.addInfoString(entry.getKey(), entry.getValue());
}
summaryProfile.addInfoString(ProfileManager.TRACE_ID, context.getSessionVariable().getTraceId());
plannerRuntimeProfile = new RuntimeProfile("Execution Summary");
summaryProfile.addChild(plannerRuntimeProfile);
profile.addChild(queryProfile);
} else {
updateSummaryProfile(waiteBeReport);
}
plannerProfile.initRuntimeProfile(plannerRuntimeProfile);
queryProfile.getCounterTotalTime().setValue(TimeUtils.getEstimatedTime(plannerProfile.getQueryBeginTime()));
endProfile(waiteBeReport);
}
private void endProfile(boolean waitProfileDone) {
if (context != null && context.getSessionVariable().enableProfile() && coord != null) {
coord.endProfile(waitProfileDone);
}
}
private void updateSummaryProfile(boolean waiteBeReport) {
Preconditions.checkNotNull(summaryProfile);
long currentTimestamp = System.currentTimeMillis();
long totalTimeMs = currentTimestamp - context.getStartTime();
summaryProfile.addInfoString(ProfileManager.END_TIME,
waiteBeReport ? TimeUtils.longToTimeString(currentTimestamp) : "N/A");
summaryProfile.addInfoString(ProfileManager.TOTAL_TIME, DebugUtil.getPrettyStringMs(totalTimeMs));
summaryProfile.addInfoString(ProfileManager.QUERY_STATE,
!waiteBeReport && context.getState().getStateType().equals(MysqlStateType.OK) ? "RUNNING" :
context.getState().toString());
}
private Map<String, String> getSummaryInfo() {
Map<String, String> infos = Maps.newLinkedHashMap();
infos.put(ProfileManager.JOB_ID, "N/A");
infos.put(ProfileManager.QUERY_ID, DebugUtil.printId(context.queryId()));
infos.put(ProfileManager.QUERY_TYPE, queryType);
infos.put(ProfileManager.DORIS_VERSION, Version.DORIS_BUILD_VERSION);
infos.put(ProfileManager.USER, context.getQualifiedUser());
infos.put(ProfileManager.DEFAULT_DB, context.getDatabase());
infos.put(ProfileManager.SQL_STATEMENT, originStmt.originStmt);
infos.put(ProfileManager.IS_CACHED, isCached ? "Yes" : "No");
Map<String, Integer> beToInstancesNum =
coord == null ? Maps.newTreeMap() : coord.getBeToInstancesNum();
infos.put(ProfileManager.TOTAL_INSTANCES_NUM,
String.valueOf(beToInstancesNum.values().stream().reduce(0, Integer::sum)));
infos.put(ProfileManager.INSTANCES_NUM_PER_BE, beToInstancesNum.toString());
infos.put(ProfileManager.PARALLEL_FRAGMENT_EXEC_INSTANCE,
String.valueOf(context.sessionVariable.parallelExecInstanceNum));
return infos;
}
public void addProfileToSpan() {
Span span = Span.fromContext(Context.current());
if (!span.isRecording()) {
return;
}
for (Map.Entry<String, String> entry : getSummaryInfo().entrySet()) {
span.setAttribute(entry.getKey(), entry.getValue());
}
}
public Planner planner() {
return planner;
}
public boolean isForwardToMaster() {
if (Env.getCurrentEnv().isMaster()) {
return false;
}
if ((parsedStmt instanceof QueryStmt) && !Env.getCurrentEnv().isMaster()
&& !Env.getCurrentEnv().canRead()) {
return true;
}
if (redirectStatus == null) {
return false;
} else {
return redirectStatus.isForwardToMaster();
}
}
public ByteBuffer getOutputPacket() {
if (masterOpExecutor == null) {
return null;
} else {
return masterOpExecutor.getOutputPacket();
}
}
public ShowResultSet getProxyResultSet() {
return proxyResultSet;
}
public ShowResultSet getShowResultSet() {
if (masterOpExecutor == null) {
return null;
} else {
return masterOpExecutor.getProxyResultSet();
}
}
public String getProxyStatus() {
if (masterOpExecutor == null) {
return MysqlStateType.UNKNOWN.name();
}
return masterOpExecutor.getProxyStatus();
}
public boolean isQueryStmt() {
return parsedStmt != null && parsedStmt instanceof QueryStmt;
}
/**
* Used for audit in ConnectProcessor.
* <p>
* TODO: There are three interface in StatementBase be called when doing audit:
* toDigest needAuditEncryption when parsedStmt is not a query
* and isValuesOrConstantSelect when parsedStmt is instance of InsertStmt.
* toDigest: is used to compute Statement fingerprint for blocking some queries
* needAuditEncryption: when this interface return true,
* log statement use toSql function instead of log original string
* isValuesOrConstantSelect: when this interface return true, original string is truncated at 1024
*
* @return parsed and analyzed statement for Stale planner.
* an unresolved LogicalPlan wrapped with a LogicalPlanAdapter for Nereids.
*/
public StatementBase getParsedStmt() {
return parsedStmt;
}
public void execute() throws Exception {
UUID uuid = UUID.randomUUID();
TUniqueId queryId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());
Span executeSpan = context.getTracer().spanBuilder("execute").setParent(Context.current()).startSpan();
try (Scope scope = executeSpan.makeCurrent()) {
execute(queryId);
} finally {
executeSpan.end();
}
}
public void execute(TUniqueId queryId) throws Exception {
context.setStartTime();
plannerProfile.setQueryBeginTime();
context.setStmtId(STMT_ID_GENERATOR.incrementAndGet());
context.setQueryId(queryId);
if (parsedStmt instanceof QueryStmt) {
context.getState().setIsQuery(true);
}
if (parsedStmt instanceof LogicalPlanAdapter) {
context.getState().setNereids(true);
if (parsedStmt.getExplainOptions() == null
&& !(((LogicalPlanAdapter) parsedStmt).getLogicalPlan() instanceof Command)) {
context.getState().setIsQuery(true);
}
}
try {
if (context.isTxnModel() && !(parsedStmt instanceof InsertStmt)
&& !(parsedStmt instanceof TransactionStmt)) {
throw new TException("This is in a transaction, only insert, commit, rollback is acceptable.");
}
analyzeVariablesInStmt();
if (!context.isTxnModel()) {
Span queryAnalysisSpan =
context.getTracer().spanBuilder("query analysis").setParent(Context.current()).startSpan();
try (Scope scope = queryAnalysisSpan.makeCurrent()) {
try {
analyze(context.getSessionVariable().toThrift());
} catch (NereidsException e) {
if (!context.getSessionVariable().enableFallbackToOriginalPlanner) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw e.getException();
}
LOG.warn("fall back to legacy planner, because: {}", e.getMessage(), e);
parsedStmt = null;
context.getState().setNereids(false);
analyze(context.getSessionVariable().toThrift());
}
} catch (Exception e) {
queryAnalysisSpan.recordException(e);
throw e;
} finally {
queryAnalysisSpan.end();
}
if (isForwardToMaster()) {
if (isProxy) {
throw new UserException("The statement has been forwarded to master FE("
+ Env.getCurrentEnv().getSelfNode().first + ") and failed to execute"
+ " because Master FE is not ready. You may need to check FE's status");
}
forwardToMaster();
if (masterOpExecutor != null && masterOpExecutor.getQueryId() != null) {
context.setQueryId(masterOpExecutor.getQueryId());
}
return;
} else {
LOG.debug("no need to transfer to Master. stmt: {}", context.getStmtId());
}
} else {
analyzer = new Analyzer(context.getEnv(), context);
parsedStmt.analyze(analyzer);
}
if (prepareStmt instanceof PrepareStmt) {
handlePrepareStmt();
return;
}
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof LogicalPlanAdapter) {
if (!parsedStmt.isExplain()) {
try {
Env.getCurrentEnv().getSqlBlockRuleMgr().matchSql(
originStmt.originStmt, context.getSqlHash(), context.getQualifiedUser());
} catch (AnalysisException e) {
LOG.warn(e.getMessage());
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
List<ScanNode> scanNodeList = planner.getScanNodes();
for (ScanNode scanNode : scanNodeList) {
if (scanNode instanceof OlapScanNode) {
OlapScanNode olapScanNode = (OlapScanNode) scanNode;
Env.getCurrentEnv().getSqlBlockRuleMgr().checkLimitations(
olapScanNode.getSelectedPartitionNum().longValue(),
olapScanNode.getSelectedTabletsNum(),
olapScanNode.getCardinality(),
context.getQualifiedUser());
}
}
}
int retryTime = Config.max_query_retry_time;
for (int i = 0; i < retryTime; i++) {
try {
if (i > 0) {
UUID uuid = UUID.randomUUID();
TUniqueId newQueryId = new TUniqueId(uuid.getMostSignificantBits(),
uuid.getLeastSignificantBits());
AuditLog.getQueryAudit().log("Query {} {} times with new query id: {}",
DebugUtil.printId(queryId), i, DebugUtil.printId(newQueryId));
context.setQueryId(newQueryId);
}
handleQueryStmt();
break;
} catch (RpcException e) {
if (i == retryTime - 1) {
throw e;
}
if (!context.getMysqlChannel().isSend()) {
LOG.warn("retry {} times. stmt: {}", (i + 1), parsedStmt.getOrigStmt().originStmt);
} else {
throw e;
}
} finally {
endProfile(true);
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
}
} else if (parsedStmt instanceof SetStmt) {
handleSetStmt();
} else if (parsedStmt instanceof EnterStmt) {
handleEnterStmt();
} else if (parsedStmt instanceof SwitchStmt) {
handleSwitchStmt();
} else if (parsedStmt instanceof UseStmt) {
handleUseStmt();
} else if (parsedStmt instanceof TransactionStmt) {
handleTransactionStmt();
} else if (parsedStmt instanceof CreateTableAsSelectStmt) {
handleCtasStmt();
} else if (parsedStmt instanceof InsertStmt) {
try {
handleInsertStmt();
if (!((InsertStmt) parsedStmt).getQueryStmt().isExplain()) {
queryType = "Insert";
}
} catch (Throwable t) {
LOG.warn("handle insert stmt fail: {}", t.getMessage());
throw t;
}
} else if (parsedStmt instanceof LoadStmt) {
handleLoadStmt();
} else if (parsedStmt instanceof DdlStmt) {
handleDdlStmt();
} else if (parsedStmt instanceof ShowStmt) {
handleShow();
} else if (parsedStmt instanceof KillStmt) {
handleKill();
} else if (parsedStmt instanceof ExportStmt) {
handleExportStmt();
} else if (parsedStmt instanceof UnlockTablesStmt) {
handleUnlockTablesStmt();
} else if (parsedStmt instanceof LockTablesStmt) {
handleLockTablesStmt();
} else if (parsedStmt instanceof UnsupportedStmt) {
handleUnsupportedStmt();
} else {
context.getState().setError(ErrorCode.ERR_NOT_SUPPORTED_YET, "Do not support this query.");
}
} catch (IOException e) {
LOG.warn("execute IOException. {}", context.getQueryIdentifier(), e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, e.getMessage());
throw e;
} catch (UserException e) {
LOG.warn("execute Exception. {}, {}", context.getQueryIdentifier(),
e.getMessage());
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR);
} catch (Exception e) {
LOG.warn("execute Exception. {}", context.getQueryIdentifier(), e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR,
e.getClass().getSimpleName() + ", msg: " + Util.getRootCauseMessage(e));
if (parsedStmt instanceof KillStmt) {
context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR);
}
} finally {
try {
SessionVariable sessionVariable = context.getSessionVariable();
VariableMgr.revertSessionValue(sessionVariable);
sessionVariable.setIsSingleSetVar(false);
sessionVariable.clearSessionOriginValue();
} catch (DdlException e) {
LOG.warn("failed to revert Session value. {}", context.getQueryIdentifier(), e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
}
if (!context.isTxnModel() && parsedStmt instanceof InsertStmt) {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
if (insertStmt.isTransactionBegin() && context.getState().getStateType() == MysqlStateType.ERR) {
try {
String errMsg = Strings.emptyToNull(context.getState().getErrorMessage());
Env.getCurrentGlobalTransactionMgr().abortTransaction(
insertStmt.getDbObj().getId(), insertStmt.getTransactionId(),
(errMsg == null ? "unknown reason" : errMsg));
} catch (Exception abortTxnException) {
LOG.warn("errors when abort txn. {}", context.getQueryIdentifier(), abortTxnException);
}
}
}
}
}
/**
* get variables in stmt.
*
* @throws DdlException
*/
private void analyzeVariablesInStmt() throws DdlException {
SessionVariable sessionVariable = context.getSessionVariable();
if (parsedStmt != null && parsedStmt instanceof SelectStmt) {
SelectStmt selectStmt = (SelectStmt) parsedStmt;
Map<String, String> optHints = selectStmt.getSelectList().getOptHints();
if (optHints != null) {
sessionVariable.setIsSingleSetVar(true);
for (String key : optHints.keySet()) {
VariableMgr.setVar(sessionVariable, new SetVar(key, new StringLiteral(optHints.get(key))));
}
}
}
}
private void forwardToMaster() throws Exception {
boolean isQuery = parsedStmt instanceof QueryStmt;
masterOpExecutor = new MasterOpExecutor(originStmt, context, redirectStatus, isQuery);
LOG.debug("need to transfer to Master. stmt: {}", context.getStmtId());
masterOpExecutor.execute();
}
@Override
public void writeProfile(boolean isLastWriteProfile) {
if (!context.getSessionVariable().enableProfile()) {
return;
}
synchronized (writeProfileLock) {
if (isFinishedProfile) {
return;
}
initProfile(plannerProfile, isLastWriteProfile);
profile.computeTimeInChildProfile();
ProfileManager.getInstance().pushProfile(profile);
isFinishedProfile = isLastWriteProfile;
}
}
public void analyze(TQueryOptions tQueryOptions) throws UserException {
if (LOG.isDebugEnabled()) {
LOG.debug("begin to analyze stmt: {}, forwarded stmt id: {}",
context.getStmtId(), context.getForwardedStmtId());
}
boolean preparedStmtReanalyzed = false;
PrepareStmtContext preparedStmtCtx = null;
if (parsedStmt instanceof ExecuteStmt) {
ExecuteStmt execStmt = (ExecuteStmt) parsedStmt;
preparedStmtCtx = context.getPreparedStmt(execStmt.getName());
if (preparedStmtCtx == null) {
throw new UserException("Could not execute, since `" + execStmt.getName() + "` not exist");
}
preparedStmtCtx.stmt.asignValues(execStmt.getArgs());
parsedStmt = preparedStmtCtx.stmt.getInnerStmt();
planner = preparedStmtCtx.planner;
analyzer = preparedStmtCtx.analyzer;
Preconditions.checkState(parsedStmt.isAnalyzed());
LOG.debug("already prepared stmt: {}", preparedStmtCtx.stmtString);
if (!preparedStmtCtx.stmt.needReAnalyze()) {
return;
}
preparedStmtReanalyzed = true;
preparedStmtCtx.stmt.analyze(analyzer);
}
parse();
if (isForwardToMaster()) {
return;
}
analyzer = new Analyzer(context.getEnv(), context);
if (parsedStmt instanceof PrepareStmt || context.getCommand() == MysqlCommand.COM_STMT_PREPARE) {
if (context.getCommand() == MysqlCommand.COM_STMT_PREPARE) {
prepareStmt = new PrepareStmt(parsedStmt,
String.valueOf(context.getEnv().getNextStmtId()), true /*binary protocol*/);
} else {
prepareStmt = (PrepareStmt) parsedStmt;
}
prepareStmt.setContext(context);
prepareStmt.analyze(analyzer);
parsedStmt = prepareStmt.getInnerStmt();
}
if (parsedStmt instanceof ShowStmt) {
SelectStmt selectStmt = ((ShowStmt) parsedStmt).toSelectStmt(analyzer);
if (selectStmt != null) {
setParsedStmt(selectStmt);
}
}
if (parsedStmt instanceof QueryStmt
|| parsedStmt instanceof InsertStmt
|| parsedStmt instanceof CreateTableAsSelectStmt
|| parsedStmt instanceof LogicalPlanAdapter) {
Map<Long, TableIf> tableMap = Maps.newTreeMap();
QueryStmt queryStmt;
Set<String> parentViewNameSet = Sets.newHashSet();
if (parsedStmt instanceof QueryStmt) {
queryStmt = (QueryStmt) parsedStmt;
queryStmt.getTables(analyzer, false, tableMap, parentViewNameSet);
} else if (parsedStmt instanceof CreateTableAsSelectStmt) {
CreateTableAsSelectStmt parsedStmt = (CreateTableAsSelectStmt) this.parsedStmt;
queryStmt = parsedStmt.getQueryStmt();
queryStmt.getTables(analyzer, false, tableMap, parentViewNameSet);
} else if (parsedStmt instanceof InsertStmt) {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
insertStmt.getTables(analyzer, tableMap, parentViewNameSet);
}
List<TableIf> tables = Lists.newArrayList(tableMap.values());
int analyzeTimes = 2;
for (int i = 1; i <= analyzeTimes; i++) {
MetaLockUtils.readLockTables(tables);
try {
analyzeAndGenerateQueryPlan(tQueryOptions);
break;
} catch (MVSelectFailedException e) {
/*
* If there is MVSelectFailedException after the first planner,
* there will be error mv rewritten in query.
* So, the query should be reanalyzed without mv rewritten and planner again.
* Attention: Only error rewritten tuple is forbidden to mv rewrite in the second time.
*/
if (i == analyzeTimes) {
throw e;
} else {
resetAnalyzerAndStmt();
}
} catch (UserException e) {
throw e;
} catch (Exception e) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
if (parsedStmt instanceof LogicalPlanAdapter) {
throw new NereidsException(new AnalysisException("Unexpected exception: " + e.getMessage(), e));
}
throw new AnalysisException("Unexpected exception: " + e.getMessage());
} finally {
MetaLockUtils.readUnlockTables(tables);
}
}
} else {
try {
parsedStmt.analyze(analyzer);
} catch (UserException e) {
throw e;
} catch (Exception e) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw new AnalysisException("Unexpected exception: " + e.getMessage());
}
}
if (preparedStmtReanalyzed) {
LOG.debug("update planner and analyzer after prepared statement reanalyzed");
preparedStmtCtx.planner = planner;
preparedStmtCtx.analyzer = analyzer;
Preconditions.checkNotNull(preparedStmtCtx.stmt);
preparedStmtCtx.analyzer.setPrepareStmt(preparedStmtCtx.stmt);
}
}
private void parse() throws AnalysisException, DdlException {
if (parsedStmt == null) {
SqlScanner input = new SqlScanner(new StringReader(originStmt.originStmt),
context.getSessionVariable().getSqlMode());
SqlParser parser = new SqlParser(input);
try {
StatementBase parsedStmt = setParsedStmt(SqlParserUtils.getStmt(parser, originStmt.idx));
parsedStmt.setOrigStmt(originStmt);
parsedStmt.setUserInfo(context.getCurrentUserIdentity());
} catch (Error e) {
LOG.info("error happened when parsing stmt {}, id: {}", originStmt, context.getStmtId(), e);
throw new AnalysisException("sql parsing error, please check your sql");
} catch (AnalysisException e) {
String syntaxError = parser.getErrorMsg(originStmt.originStmt);
LOG.info("analysis exception happened when parsing stmt {}, id: {}, error: {}",
originStmt, context.getStmtId(), syntaxError, e);
if (syntaxError == null) {
throw e;
} else {
throw new AnalysisException(syntaxError, e);
}
} catch (Exception e) {
LOG.info("unexpected exception happened when parsing stmt {}, id: {}, error: {}",
originStmt, context.getStmtId(), parser.getErrorMsg(originStmt.originStmt), e);
throw new AnalysisException("Unexpected exception: " + e.getMessage());
}
analyzeVariablesInStmt();
}
redirectStatus = parsedStmt.getRedirectStatus();
}
private void analyzeAndGenerateQueryPlan(TQueryOptions tQueryOptions) throws UserException {
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof InsertStmt) {
QueryStmt queryStmt = null;
if (parsedStmt instanceof QueryStmt) {
queryStmt = (QueryStmt) parsedStmt;
}
if (parsedStmt instanceof InsertStmt) {
queryStmt = (QueryStmt) ((InsertStmt) parsedStmt).getQueryStmt();
}
if (queryStmt.getOrderByElements() != null && queryStmt.getOrderByElements().isEmpty()) {
queryStmt.removeOrderByElements();
}
}
parsedStmt.analyze(analyzer);
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof InsertStmt) {
ExprRewriter rewriter = analyzer.getExprRewriter();
rewriter.reset();
if (context.getSessionVariable().isEnableFoldConstantByBe()) {
parsedStmt.foldConstant(rewriter);
}
ExplainOptions explainOptions = parsedStmt.getExplainOptions();
boolean reAnalyze = false;
parsedStmt.rewriteExprs(rewriter);
reAnalyze = rewriter.changed();
if (analyzer.containSubquery()) {
parsedStmt = setParsedStmt(StmtRewriter.rewrite(analyzer, parsedStmt));
reAnalyze = true;
}
if (parsedStmt instanceof SelectStmt) {
if (StmtRewriter.rewriteByPolicy(parsedStmt, analyzer)) {
reAnalyze = true;
}
}
if (parsedStmt instanceof SetOperationStmt) {
List<SetOperationStmt.SetOperand> operands = ((SetOperationStmt) parsedStmt).getOperands();
for (SetOperationStmt.SetOperand operand : operands) {
if (StmtRewriter.rewriteByPolicy(operand.getQueryStmt(), analyzer)) {
reAnalyze = true;
}
}
}
if (parsedStmt instanceof InsertStmt) {
QueryStmt queryStmt = ((InsertStmt) parsedStmt).getQueryStmt();
if (queryStmt != null && StmtRewriter.rewriteByPolicy(queryStmt, analyzer)) {
reAnalyze = true;
}
}
if (reAnalyze) {
List<Type> origResultTypes = Lists.newArrayList();
for (Expr e : parsedStmt.getResultExprs()) {
origResultTypes.add(e.getType());
}
List<String> origColLabels =
Lists.newArrayList(parsedStmt.getColLabels());
analyzer = new Analyzer(context.getEnv(), context);
if (prepareStmt != null) {
prepareStmt.reset();
prepareStmt.analyze(analyzer);
}
parsedStmt.reset();
parsedStmt.analyze(analyzer);
parsedStmt.castResultExprs(origResultTypes);
parsedStmt.setColLabels(origColLabels);
if (LOG.isTraceEnabled()) {
LOG.trace("rewrittenStmt: " + parsedStmt.toSql());
}
if (explainOptions != null) {
parsedStmt.setIsExplain(explainOptions);
}
}
}
plannerProfile.setQueryAnalysisFinishTime();
if (parsedStmt instanceof LogicalPlanAdapter) {
planner = new NereidsPlanner(statementContext);
} else {
planner = new OriginalPlanner(analyzer);
}
if (parsedStmt instanceof QueryStmt
|| parsedStmt instanceof InsertStmt
|| parsedStmt instanceof LogicalPlanAdapter) {
planner.plan(parsedStmt, tQueryOptions);
}
plannerProfile.setQueryPlanFinishTime();
}
private void resetAnalyzerAndStmt() {
analyzer = new Analyzer(context.getEnv(), context);
parsedStmt.reset();
if (parsedStmt instanceof QueryStmt) {
((QueryStmt) parsedStmt).resetSelectList();
}
if (parsedStmt instanceof InsertStmt) {
((InsertStmt) parsedStmt).getQueryStmt().resetSelectList();
}
}
public void cancel() {
Coordinator coordRef = coord;
if (coordRef != null) {
coordRef.cancel();
}
}
private void handleKill() throws DdlException {
KillStmt killStmt = (KillStmt) parsedStmt;
int id = killStmt.getConnectionId();
ConnectContext killCtx = context.getConnectScheduler().getContext(id);
if (killCtx == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_NO_SUCH_THREAD, id);
}
if (context == killCtx) {
context.setKilled();
} else {
if (!killCtx.getQualifiedUser().equals(ConnectContext.get().getQualifiedUser())
&& !Env.getCurrentEnv().getAuth().checkGlobalPriv(ConnectContext.get(),
PrivPredicate.ADMIN)) {
ErrorReport.reportDdlException(ErrorCode.ERR_KILL_DENIED_ERROR, id);
}
killCtx.kill(killStmt.isConnectionKill());
}
context.getState().setOk();
}
private void handleSetStmt() {
try {
SetStmt setStmt = (SetStmt) parsedStmt;
SetExecutor executor = new SetExecutor(context, setStmt);
executor.execute();
} catch (DdlException e) {
context.getState().setError(ErrorCode.ERR_LOCAL_VARIABLE, e.getMessage());
return;
}
context.getState().setOk();
}
private boolean sendCachedValues(MysqlChannel channel, List<InternalService.PCacheValue> cacheValues,
SelectStmt selectStmt, boolean isSendFields, boolean isEos)
throws Exception {
RowBatch batch = null;
boolean isSend = isSendFields;
for (InternalService.PCacheValue value : cacheValues) {
TResultBatch resultBatch = new TResultBatch();
for (ByteString one : value.getRowsList()) {
resultBatch.addToRows(ByteBuffer.wrap(one.toByteArray()));
}
resultBatch.setPacketSeq(1);
resultBatch.setIsCompressed(false);
batch = new RowBatch();
batch.setBatch(resultBatch);
batch.setEos(true);
if (!isSend) {
sendFields(selectStmt.getColLabels(), exprToType(selectStmt.getResultExprs()));
isSend = true;
}
for (ByteBuffer row : batch.getBatch().getRows()) {
channel.sendOnePacket(row);
}
context.updateReturnRows(batch.getBatch().getRows().size());
}
if (isEos) {
if (batch != null) {
statisticsForAuditLog = batch.getQueryStatistics() == null
? null : batch.getQueryStatistics().toBuilder();
}
if (!isSend) {
sendFields(selectStmt.getColLabels(), exprToType(selectStmt.getResultExprs()));
isSend = true;
}
context.getState().setEof();
}
return isSend;
}
/**
* Handle the SelectStmt via Cache.
*/
private void handleCacheStmt(CacheAnalyzer cacheAnalyzer, MysqlChannel channel, SelectStmt selectStmt)
throws Exception {
InternalService.PFetchCacheResult cacheResult = cacheAnalyzer.getCacheData();
CacheMode mode = cacheAnalyzer.getCacheMode();
SelectStmt newSelectStmt = selectStmt;
boolean isSendFields = false;
if (cacheResult != null) {
isCached = true;
if (cacheAnalyzer.getHitRange() == Cache.HitRange.Full) {
sendCachedValues(channel, cacheResult.getValuesList(), newSelectStmt, isSendFields, true);
return;
}
if (mode == CacheMode.Partition) {
if (cacheAnalyzer.getHitRange() == Cache.HitRange.Left) {
isSendFields = sendCachedValues(channel, cacheResult.getValuesList(),
newSelectStmt, isSendFields, false);
}
newSelectStmt = cacheAnalyzer.getRewriteStmt();
newSelectStmt.reset();
analyzer = new Analyzer(context.getEnv(), context);
newSelectStmt.analyze(analyzer);
if (parsedStmt instanceof LogicalPlanAdapter) {
planner = new NereidsPlanner(statementContext);
} else {
planner = new OriginalPlanner(analyzer);
}
planner.plan(newSelectStmt, context.getSessionVariable().toThrift());
}
}
sendResult(false, isSendFields, newSelectStmt, channel, cacheAnalyzer, cacheResult);
}
private boolean handleSelectRequestInFe(SelectStmt parsedSelectStmt) throws IOException {
List<SelectListItem> selectItemList = parsedSelectStmt.getSelectList().getItems();
List<Column> columns = new ArrayList<>(selectItemList.size());
ResultSetMetaData metadata = new CommonResultSet.CommonResultSetMetaData(columns);
List<String> columnLabels = parsedSelectStmt.getColLabels();
List<String> data = new ArrayList<>();
for (int i = 0; i < selectItemList.size(); i++) {
SelectListItem item = selectItemList.get(i);
Expr expr = item.getExpr();
String columnName = columnLabels.get(i);
if (expr instanceof LiteralExpr) {
columns.add(new Column(columnName, expr.getType()));
if (expr instanceof NullLiteral) {
data.add(null);
} else if (expr instanceof FloatLiteral) {
data.add(LiteralUtils.getStringValue((FloatLiteral) expr));
} else if (expr instanceof DecimalLiteral) {
data.add(((DecimalLiteral) expr).getValue().toPlainString());
} else if (expr instanceof ArrayLiteral) {
data.add(LiteralUtils.getStringValue((ArrayLiteral) expr));
} else {
data.add(expr.getStringValue());
}
} else {
return false;
}
}
ResultSet resultSet = new CommonResultSet(metadata, Collections.singletonList(data));
sendResultSet(resultSet);
return true;
}
private void handleQueryStmt() throws Exception {
context.getMysqlChannel().reset();
Queriable queryStmt = (Queriable) parsedStmt;
QueryDetail queryDetail = new QueryDetail(context.getStartTime(),
DebugUtil.printId(context.queryId()),
context.getStartTime(), -1, -1,
QueryDetail.QueryMemState.RUNNING,
context.getDatabase(),
originStmt.originStmt);
context.setQueryDetail(queryDetail);
QueryDetailQueue.addOrUpdateQueryDetail(queryDetail);
if (queryStmt.isExplain()) {
String explainString = planner.getExplainString(queryStmt.getExplainOptions());
handleExplainStmt(explainString);
return;
}
if (parsedStmt instanceof SelectStmt && ((SelectStmt) parsedStmt).getTableRefs().isEmpty()) {
SelectStmt parsedSelectStmt = (SelectStmt) parsedStmt;
if (handleSelectRequestInFe(parsedSelectStmt)) {
return;
}
}
MysqlChannel channel = context.getMysqlChannel();
boolean isOutfileQuery = queryStmt.hasOutFileClause();
CacheAnalyzer cacheAnalyzer = new CacheAnalyzer(context, parsedStmt, planner);
if (cacheAnalyzer.enableCache() && !isOutfileQuery && queryStmt instanceof SelectStmt) {
handleCacheStmt(cacheAnalyzer, channel, (SelectStmt) queryStmt);
return;
}
if (parsedStmt instanceof SelectStmt) {
SelectStmt parsedSelectStmt = (SelectStmt) parsedStmt;
if (parsedSelectStmt.getLimit() == 0) {
LOG.info("ignore handle limit 0 ,sql:{}", parsedSelectStmt.toSql());
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
context.getState().setEof();
return;
}
}
sendResult(isOutfileQuery, false, queryStmt, channel, null, null);
}
private void sendResult(boolean isOutfileQuery, boolean isSendFields, Queriable queryStmt, MysqlChannel channel,
CacheAnalyzer cacheAnalyzer, InternalService.PFetchCacheResult cacheResult) throws Exception {
RowBatch batch;
coord = new Coordinator(context, analyzer, planner);
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(),
new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord));
coord.setProfileWriter(this);
Span queryScheduleSpan =
context.getTracer().spanBuilder("query schedule").setParent(Context.current()).startSpan();
try (Scope scope = queryScheduleSpan.makeCurrent()) {
coord.exec();
} catch (Exception e) {
queryScheduleSpan.recordException(e);
throw e;
} finally {
queryScheduleSpan.end();
}
plannerProfile.setQueryScheduleFinishTime();
writeProfile(false);
Span fetchResultSpan = context.getTracer().spanBuilder("fetch result").setParent(Context.current()).startSpan();
try (Scope scope = fetchResultSpan.makeCurrent()) {
while (true) {
plannerProfile.setTempStartTime();
batch = coord.getNext();
plannerProfile.freshFetchResultConsumeTime();
if (batch.getBatch() != null) {
if (cacheAnalyzer != null) {
cacheAnalyzer.copyRowBatch(batch);
}
plannerProfile.setTempStartTime();
if (!isSendFields) {
if (!isOutfileQuery) {
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
} else {
sendFields(OutFileClause.RESULT_COL_NAMES, OutFileClause.RESULT_COL_TYPES);
}
isSendFields = true;
}
for (ByteBuffer row : batch.getBatch().getRows()) {
channel.sendOnePacket(row);
}
plannerProfile.freshWriteResultConsumeTime();
context.updateReturnRows(batch.getBatch().getRows().size());
}
if (batch.isEos()) {
break;
}
}
if (cacheAnalyzer != null) {
if (cacheResult != null && cacheAnalyzer.getHitRange() == Cache.HitRange.Right) {
isSendFields =
sendCachedValues(channel, cacheResult.getValuesList(), (SelectStmt) queryStmt, isSendFields,
false);
}
cacheAnalyzer.updateCache();
}
if (!isSendFields) {
if (!isOutfileQuery) {
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
} else {
sendFields(OutFileClause.RESULT_COL_NAMES, OutFileClause.RESULT_COL_TYPES);
}
}
statisticsForAuditLog = batch.getQueryStatistics() == null ? null : batch.getQueryStatistics().toBuilder();
context.getState().setEof();
plannerProfile.setQueryFetchResultFinishTime();
} catch (Exception e) {
fetchResultSpan.recordException(e);
throw e;
} finally {
fetchResultSpan.end();
}
}
private TWaitingTxnStatusResult getWaitingTxnStatus(TWaitingTxnStatusRequest request) throws Exception {
TWaitingTxnStatusResult statusResult = null;
if (Env.getCurrentEnv().isMaster()) {
statusResult = Env.getCurrentGlobalTransactionMgr()
.getWaitingTxnStatus(request);
} else {
MasterTxnExecutor masterTxnExecutor = new MasterTxnExecutor(context);
statusResult = masterTxnExecutor.getWaitingTxnStatus(request);
}
return statusResult;
}
private void handleTransactionStmt() throws Exception {
context.getMysqlChannel().reset();
context.getState().setOk(0, 0, "");
if (context.getTxnEntry() != null && context.getTxnEntry().getRowsInTransaction() == 0
&& (parsedStmt instanceof TransactionCommitStmt || parsedStmt instanceof TransactionRollbackStmt)) {
context.setTxnEntry(null);
} else if (parsedStmt instanceof TransactionBeginStmt) {
if (context.isTxnModel()) {
LOG.info("A transaction has already begin");
return;
}
TTxnParams txnParams = new TTxnParams();
txnParams.setNeedTxn(true).setEnablePipelineTxnLoad(Config.enable_pipeline_load)
.setThriftRpcTimeoutMs(5000).setTxnId(-1).setDb("").setTbl("");
if (context.getSessionVariable().getEnableInsertStrict()) {
txnParams.setMaxFilterRatio(0);
} else {
txnParams.setMaxFilterRatio(1.0);
}
if (context.getTxnEntry() == null) {
context.setTxnEntry(new TransactionEntry());
}
TransactionEntry txnEntry = context.getTxnEntry();
txnEntry.setTxnConf(txnParams);
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(TransactionStatus.PREPARE.name());
sb.append("', 'txnId':'").append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} else if (parsedStmt instanceof TransactionCommitStmt) {
if (!context.isTxnModel()) {
LOG.info("No transaction to commit");
return;
}
TTxnParams txnConf = context.getTxnEntry().getTxnConf();
try {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(context.getTxnEntry());
if (context.getTxnEntry().getDataToSend().size() > 0) {
executor.sendData();
}
executor.commitTransaction();
TWaitingTxnStatusRequest request = new TWaitingTxnStatusRequest();
request.setDbId(txnConf.getDbId()).setTxnId(txnConf.getTxnId());
request.setLabelIsSet(false);
request.setTxnIdIsSet(true);
TWaitingTxnStatusResult statusResult = getWaitingTxnStatus(request);
TransactionStatus txnStatus = TransactionStatus.valueOf(statusResult.getTxnStatusId());
if (txnStatus == TransactionStatus.COMMITTED) {
throw new AnalysisException("transaction commit successfully, BUT data will be visible later.");
} else if (txnStatus != TransactionStatus.VISIBLE) {
String errMsg = "commit failed, rollback.";
if (statusResult.getStatus().isSetErrorMsgs()
&& statusResult.getStatus().getErrorMsgs().size() > 0) {
errMsg = String.join(". ", statusResult.getStatus().getErrorMsgs());
}
throw new AnalysisException(errMsg);
}
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(txnStatus.name()).append("', 'txnId':'")
.append(context.getTxnEntry().getTxnConf().getTxnId()).append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} catch (Exception e) {
throw new AnalysisException(e.getMessage());
} finally {
context.setTxnEntry(null);
}
} else if (parsedStmt instanceof TransactionRollbackStmt) {
if (!context.isTxnModel()) {
LOG.info("No transaction to rollback");
return;
}
try {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(context.getTxnEntry());
executor.abortTransaction();
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(TransactionStatus.ABORTED.name()).append("', 'txnId':'")
.append(context.getTxnEntry().getTxnConf().getTxnId()).append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} catch (Exception e) {
throw new AnalysisException(e.getMessage());
} finally {
context.setTxnEntry(null);
}
} else {
throw new TException("parsedStmt type is not TransactionStmt");
}
}
public int executeForTxn(InsertStmt insertStmt)
throws UserException, TException, InterruptedException, ExecutionException, TimeoutException {
if (context.isTxnIniting()) {
beginTxn(insertStmt.getDb(), insertStmt.getTbl());
}
if (!context.getTxnEntry().getTxnConf().getDb().equals(insertStmt.getDb())
|| !context.getTxnEntry().getTxnConf().getTbl().equals(insertStmt.getTbl())) {
throw new TException("Only one table can be inserted in one transaction.");
}
QueryStmt queryStmt = insertStmt.getQueryStmt();
if (!(queryStmt instanceof SelectStmt)) {
throw new TException("queryStmt is not SelectStmt, insert command error");
}
TransactionEntry txnEntry = context.getTxnEntry();
SelectStmt selectStmt = (SelectStmt) queryStmt;
int effectRows = 0;
if (selectStmt.getValueList() != null) {
Table tbl = txnEntry.getTable();
int schemaSize = tbl.getBaseSchema(false).size();
for (List<Expr> row : selectStmt.getValueList().getRows()) {
if (schemaSize != row.size()) {
throw new TException("Column count doesn't match value count");
}
}
for (List<Expr> row : selectStmt.getValueList().getRows()) {
++effectRows;
InternalService.PDataRow data = getRowStringValue(row);
if (data == null) {
continue;
}
List<InternalService.PDataRow> dataToSend = txnEntry.getDataToSend();
dataToSend.add(data);
if (dataToSend.size() >= MAX_DATA_TO_SEND_FOR_TXN) {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(txnEntry);
executor.sendData();
}
}
}
txnEntry.setRowsInTransaction(txnEntry.getRowsInTransaction() + effectRows);
return effectRows;
}
private void beginTxn(String dbName, String tblName) throws UserException, TException,
InterruptedException, ExecutionException, TimeoutException {
TransactionEntry txnEntry = context.getTxnEntry();
TTxnParams txnConf = txnEntry.getTxnConf();
long timeoutSecond = ConnectContext.get().getSessionVariable().getQueryTimeoutS();
TransactionState.LoadJobSourceType sourceType = TransactionState.LoadJobSourceType.INSERT_STREAMING;
Database dbObj = Env.getCurrentInternalCatalog()
.getDbOrException(dbName, s -> new TException("database is invalid for dbName: " + s));
Table tblObj = dbObj.getTableOrException(tblName, s -> new TException("table is invalid: " + s));
txnConf.setDbId(dbObj.getId()).setTbl(tblName).setDb(dbName);
txnEntry.setTable(tblObj);
txnEntry.setDb(dbObj);
String label = txnEntry.getLabel();
if (Env.getCurrentEnv().isMaster()) {
long txnId = Env.getCurrentGlobalTransactionMgr().beginTransaction(
txnConf.getDbId(), Lists.newArrayList(tblObj.getId()),
label, new TransactionState.TxnCoordinator(
TransactionState.TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),
sourceType, timeoutSecond);
txnConf.setTxnId(txnId);
String authCodeUuid = Env.getCurrentGlobalTransactionMgr().getTransactionState(
txnConf.getDbId(), txnConf.getTxnId()).getAuthCode();
txnConf.setAuthCodeUuid(authCodeUuid);
} else {
String authCodeUuid = UUID.randomUUID().toString();
MasterTxnExecutor masterTxnExecutor = new MasterTxnExecutor(context);
TLoadTxnBeginRequest request = new TLoadTxnBeginRequest();
request.setDb(txnConf.getDb()).setTbl(txnConf.getTbl()).setAuthCodeUuid(authCodeUuid)
.setCluster(dbObj.getClusterName()).setLabel(label).setUser("").setUserIp("").setPasswd("");
TLoadTxnBeginResult result = masterTxnExecutor.beginTxn(request);
txnConf.setTxnId(result.getTxnId());
txnConf.setAuthCodeUuid(authCodeUuid);
}
TStreamLoadPutRequest request = new TStreamLoadPutRequest();
request.setTxnId(txnConf.getTxnId()).setDb(txnConf.getDb())
.setTbl(txnConf.getTbl())
.setFileType(TFileType.FILE_STREAM).setFormatType(TFileFormatType.FORMAT_CSV_PLAIN)
.setMergeType(TMergeType.APPEND).setThriftRpcTimeoutMs(5000).setLoadId(context.queryId());
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(txnEntry);
executor.beginTransaction(request);
}
private void handleInsertStmt() throws Exception {
if (context.getMysqlChannel() != null) {
context.getMysqlChannel().reset();
}
InsertStmt insertStmt = (InsertStmt) parsedStmt;
if (insertStmt.getQueryStmt().hasOutFileClause()) {
throw new DdlException("Not support OUTFILE clause in INSERT statement");
}
if (insertStmt.getQueryStmt().isExplain()) {
ExplainOptions explainOptions = insertStmt.getQueryStmt().getExplainOptions();
insertStmt.setIsExplain(explainOptions);
String explainString = planner.getExplainString(explainOptions);
handleExplainStmt(explainString);
return;
}
long createTime = System.currentTimeMillis();
Throwable throwable = null;
long txnId = -1;
String label = "";
long loadedRows = 0;
int filteredRows = 0;
TransactionStatus txnStatus = TransactionStatus.ABORTED;
String errMsg = "";
TableType tblType = insertStmt.getTargetTable().getType();
if (context.isTxnModel()) {
if (insertStmt.getQueryStmt() instanceof SelectStmt) {
if (((SelectStmt) insertStmt.getQueryStmt()).getTableRefs().size() > 0) {
throw new TException("Insert into ** select is not supported in a transaction");
}
}
txnStatus = TransactionStatus.PREPARE;
loadedRows = executeForTxn(insertStmt);
label = context.getTxnEntry().getLabel();
txnId = context.getTxnEntry().getTxnConf().getTxnId();
} else {
label = insertStmt.getLabel();
LOG.info("Do insert [{}] with query id: {}", label, DebugUtil.printId(context.queryId()));
try {
coord = new Coordinator(context, analyzer, planner);
coord.setLoadZeroTolerance(context.getSessionVariable().getEnableInsertStrict());
coord.setQueryType(TQueryType.LOAD);
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(), coord);
coord.exec();
boolean notTimeout = coord.join(context.getSessionVariable().getQueryTimeoutS());
if (!coord.isDone()) {
coord.cancel();
if (notTimeout) {
errMsg = coord.getExecStatus().getErrorMsg();
ErrorReport.reportDdlException("There exists unhealthy backend. "
+ errMsg, ErrorCode.ERR_FAILED_WHEN_INSERT);
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_EXECUTE_TIMEOUT);
}
}
if (!coord.getExecStatus().ok()) {
errMsg = coord.getExecStatus().getErrorMsg();
LOG.warn("insert failed: {}", errMsg);
ErrorReport.reportDdlException(errMsg, ErrorCode.ERR_FAILED_WHEN_INSERT);
}
LOG.debug("delta files is {}", coord.getDeltaUrls());
if (coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL) != null) {
loadedRows = Long.parseLong(coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL));
}
if (coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL) != null) {
filteredRows = Integer.parseInt(coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL));
}
if (context.getSessionVariable().getEnableInsertStrict()) {
if (filteredRows > 0) {
context.getState().setError(ErrorCode.ERR_FAILED_WHEN_INSERT,
"Insert has filtered data in strict mode, tracking_url=" + coord.getTrackingUrl());
return;
}
}
if (tblType != TableType.OLAP && tblType != TableType.MATERIALIZED_VIEW) {
context.getState().setOk(loadedRows, filteredRows, null);
return;
}
if (Env.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(
insertStmt.getDbObj(), Lists.newArrayList(insertStmt.getTargetTable()),
insertStmt.getTransactionId(),
TabletCommitInfo.fromThrift(coord.getCommitInfos()),
context.getSessionVariable().getInsertVisibleTimeoutMs())) {
txnStatus = TransactionStatus.VISIBLE;
} else {
txnStatus = TransactionStatus.COMMITTED;
}
} catch (Throwable t) {
LOG.warn("handle insert stmt fail: {}", label, t);
try {
Env.getCurrentGlobalTransactionMgr().abortTransaction(
insertStmt.getDbObj().getId(), insertStmt.getTransactionId(),
t.getMessage() == null ? "unknown reason" : t.getMessage());
} catch (Exception abortTxnException) {
LOG.warn("errors when abort txn", abortTxnException);
}
if (!Config.using_old_load_usage_pattern) {
StringBuilder sb = new StringBuilder(t.getMessage());
if (!Strings.isNullOrEmpty(coord.getTrackingUrl())) {
sb.append(". url: " + coord.getTrackingUrl());
}
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, sb.toString());
return;
}
/*
* If config 'using_old_load_usage_pattern' is true.
* Doris will return a label to user, and user can use this label to check load job's status,
* which exactly like the old insert stmt usage pattern.
*/
throwable = t;
} finally {
endProfile(true);
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
txnId = insertStmt.getTransactionId();
try {
context.getEnv().getLoadManager()
.recordFinishedLoadJob(label, txnId, insertStmt.getDb(), insertStmt.getTargetTable().getId(),
EtlJobType.INSERT, createTime, throwable == null ? "" : throwable.getMessage(),
coord.getTrackingUrl());
} catch (MetaNotFoundException e) {
LOG.warn("Record info of insert load with error {}", e.getMessage(), e);
errMsg = "Record info of insert load with error " + e.getMessage();
}
}
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(label).append("', 'status':'").append(txnStatus.name());
sb.append("', 'txnId':'").append(txnId).append("'");
if (tblType == TableType.MATERIALIZED_VIEW) {
sb.append("', 'rows':'").append(loadedRows).append("'");
}
if (!Strings.isNullOrEmpty(errMsg)) {
sb.append(", 'err':'").append(errMsg).append("'");
}
sb.append("}");
context.getState().setOk(loadedRows, filteredRows, sb.toString());
context.setOrUpdateInsertResult(txnId, label, insertStmt.getDb(), insertStmt.getTbl(),
txnStatus, loadedRows, filteredRows);
context.updateReturnRows((int) loadedRows);
}
private void handleUnsupportedStmt() {
context.getMysqlChannel().reset();
context.getState().setOk();
}
private void handleSwitchStmt() throws AnalysisException {
SwitchStmt switchStmt = (SwitchStmt) parsedStmt;
try {
context.getEnv().changeCatalog(context, switchStmt.getCatalogName());
} catch (DdlException e) {
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void handlePrepareStmt() throws Exception {
LOG.debug("add prepared statement {}, isBinaryProtocol {}",
prepareStmt.getName(), prepareStmt.isBinaryProtocol());
context.addPreparedStmt(prepareStmt.getName(),
new PrepareStmtContext(prepareStmt,
context, planner, analyzer, prepareStmt.getName()));
if (prepareStmt.isBinaryProtocol()) {
sendStmtPrepareOK();
}
context.getState().setOk();
}
private void handleUseStmt() throws AnalysisException {
UseStmt useStmt = (UseStmt) parsedStmt;
try {
if (Strings.isNullOrEmpty(useStmt.getClusterName())) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_CLUSTER_NO_SELECT_CLUSTER);
}
if (useStmt.getCatalogName() != null) {
context.getEnv().changeCatalog(context, useStmt.getCatalogName());
}
context.getEnv().changeDb(context, useStmt.getDatabase());
} catch (DdlException e) {
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void sendMetaData(ResultSetMetaData metaData) throws IOException {
serializer.reset();
serializer.writeVInt(metaData.getColumnCount());
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
for (Column col : metaData.getColumns()) {
serializer.reset();
serializer.writeField(col.getName(), col.getType().getPrimitiveType());
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
serializer.reset();
MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState());
eofPacket.writeTo(serializer);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
private void sendStmtPrepareOK() throws IOException {
serializer.reset();
serializer.writeInt1(0);
serializer.writeInt4(Integer.valueOf(prepareStmt.getName()));
int numColumns = 0;
serializer.writeInt2(numColumns);
int numParams = prepareStmt.getColLabelsOfPlaceHolders().size();
serializer.writeInt2(numParams);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
if (numParams > 0) {
sendFields(prepareStmt.getColLabelsOfPlaceHolders(),
exprToType(prepareStmt.getSlotRefOfPlaceHolders()));
}
context.getState().setOk();
}
private void sendFields(List<String> colNames, List<PrimitiveType> types) throws IOException {
serializer.reset();
serializer.writeVInt(colNames.size());
LOG.debug("sendFields {}", colNames.size());
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
for (int i = 0; i < colNames.size(); ++i) {
serializer.reset();
serializer.writeField(colNames.get(i), types.get(i));
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
serializer.reset();
MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState());
eofPacket.writeTo(serializer);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
public void sendResultSet(ResultSet resultSet) throws IOException {
context.updateReturnRows(resultSet.getResultRows().size());
sendMetaData(resultSet.getMetaData());
for (List<String> row : resultSet.getResultRows()) {
serializer.reset();
for (String item : row) {
if (item == null || item.equals(FeConstants.null_string)) {
serializer.writeNull();
} else {
serializer.writeLenEncodedString(item);
}
}
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
context.getState().setEof();
}
private void handleShow() throws IOException, AnalysisException, DdlException {
ShowExecutor executor = new ShowExecutor(context, (ShowStmt) parsedStmt);
ShowResultSet resultSet = executor.execute();
if (resultSet == null) {
return;
}
if (isProxy) {
proxyResultSet = resultSet;
return;
}
sendResultSet(resultSet);
}
private void handleUnlockTablesStmt() {
}
private void handleLockTablesStmt() {
}
private void handleExplainStmt(String result) throws IOException {
ShowResultSetMetaData metaData =
ShowResultSetMetaData.builder()
.addColumn(new Column("Explain String", ScalarType.createVarchar(20)))
.build();
sendMetaData(metaData);
for (String item : result.split("\n")) {
serializer.reset();
serializer.writeLenEncodedString(item);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
context.getState().setEof();
}
private void handleDdlStmt() {
try {
DdlExecutor.execute(context.getEnv(), (DdlStmt) parsedStmt);
context.getState().setOk();
} catch (QueryStateException e) {
context.setState(e.getQueryState());
} catch (UserException e) {
LOG.debug("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
}
private void handleEnterStmt() {
final EnterStmt enterStmt = (EnterStmt) parsedStmt;
try {
context.getEnv().changeCluster(context, enterStmt.getClusterName());
context.setDatabase("");
} catch (DdlException e) {
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void handleExportStmt() throws Exception {
ExportStmt exportStmt = (ExportStmt) parsedStmt;
context.getEnv().getExportMgr().addExportJob(exportStmt);
}
private void handleCtasStmt() {
CreateTableAsSelectStmt ctasStmt = (CreateTableAsSelectStmt) this.parsedStmt;
try {
DdlExecutor.execute(context.getEnv(), ctasStmt);
context.getState().setOk();
} catch (Exception e) {
LOG.warn("CTAS create table error, stmt={}", originStmt.originStmt, e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
if (MysqlStateType.OK.equals(context.getState().getStateType())) {
try {
parsedStmt = ctasStmt.getInsertStmt();
execute();
} catch (Exception e) {
LOG.warn("CTAS insert data error, stmt={}", ctasStmt.toSql(), e);
DropTableStmt dropTableStmt = new DropTableStmt(true, ctasStmt.getCreateTableStmt().getDbTbl(), true);
try {
DdlExecutor.execute(context.getEnv(), dropTableStmt);
} catch (Exception ex) {
LOG.warn("CTAS drop table error, stmt={}", parsedStmt.toSql(), ex);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR,
"Unexpected exception: " + ex.getMessage());
}
}
}
}
public Data.PQueryStatistics getQueryStatisticsForAuditLog() {
if (statisticsForAuditLog == null) {
statisticsForAuditLog = Data.PQueryStatistics.newBuilder();
}
if (!statisticsForAuditLog.hasScanBytes()) {
statisticsForAuditLog.setScanBytes(0L);
}
if (!statisticsForAuditLog.hasScanRows()) {
statisticsForAuditLog.setScanRows(0L);
}
if (!statisticsForAuditLog.hasReturnedRows()) {
statisticsForAuditLog.setReturnedRows(0L);
}
if (!statisticsForAuditLog.hasCpuMs()) {
statisticsForAuditLog.setCpuMs(0L);
}
return statisticsForAuditLog.build();
}
private List<PrimitiveType> exprToType(List<Expr> exprs) {
return exprs.stream().map(e -> e.getType().getPrimitiveType()).collect(Collectors.toList());
}
private StatementBase setParsedStmt(StatementBase parsedStmt) {
this.parsedStmt = parsedStmt;
this.statementContext.setParsedStatement(parsedStmt);
return parsedStmt;
}
public List<ResultRow> executeInternalQuery() {
try {
List<ResultRow> resultRows = new ArrayList<>();
analyzer = new Analyzer(context.getEnv(), context);
try {
analyze(context.getSessionVariable().toThrift());
} catch (UserException e) {
LOG.warn("Internal SQL execution failed, SQL: {}", originStmt, e);
return resultRows;
}
planner.getFragments();
RowBatch batch;
coord = new Coordinator(context, analyzer, planner);
try {
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(),
new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord));
} catch (UserException e) {
LOG.warn(e.getMessage(), e);
}
coord.setProfileWriter(this);
Span queryScheduleSpan = context.getTracer()
.spanBuilder("internal SQL schedule").setParent(Context.current()).startSpan();
try (Scope scope = queryScheduleSpan.makeCurrent()) {
coord.exec();
} catch (Exception e) {
queryScheduleSpan.recordException(e);
LOG.warn("Unexpected exception when SQL running", e);
} finally {
queryScheduleSpan.end();
}
Span fetchResultSpan = context.getTracer().spanBuilder("fetch internal SQL result")
.setParent(Context.current()).startSpan();
try (Scope scope = fetchResultSpan.makeCurrent()) {
while (true) {
batch = coord.getNext();
if (batch == null || batch.isEos()) {
return resultRows;
} else {
resultRows.addAll(convertResultBatchToResultRows(batch.getBatch()));
}
}
} catch (Exception e) {
LOG.warn("Unexpected exception when SQL running", e);
fetchResultSpan.recordException(e);
return resultRows;
} finally {
fetchResultSpan.end();
}
} finally {
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
}
private List<ResultRow> convertResultBatchToResultRows(TResultBatch batch) {
List<String> columns = parsedStmt.getColLabels();
List<PrimitiveType> types = parsedStmt.getResultExprs().stream()
.map(e -> e.getType().getPrimitiveType())
.collect(Collectors.toList());
List<ResultRow> resultRows = new ArrayList<>();
List<ByteBuffer> rows = batch.getRows();
for (ByteBuffer buffer : rows) {
List<String> values = Lists.newArrayList();
InternalQueryBuffer queryBuffer = new InternalQueryBuffer(buffer.slice());
for (int i = 0; i < columns.size(); i++) {
String value = queryBuffer.readStringWithLength();
values.add(value);
}
ResultRow resultRow = new ResultRow(columns, types, values);
resultRows.add(resultRow);
}
return resultRows;
}
} | class StmtExecutor implements ProfileWriter {
private static final Logger LOG = LogManager.getLogger(StmtExecutor.class);
private static final AtomicLong STMT_ID_GENERATOR = new AtomicLong(0);
private static final int MAX_DATA_TO_SEND_FOR_TXN = 100;
private static final String NULL_VALUE_FOR_LOAD = "\\N";
private final Object writeProfileLock = new Object();
private ConnectContext context;
private final StatementContext statementContext;
private MysqlSerializer serializer;
private OriginStatement originStmt;
private StatementBase parsedStmt;
private Analyzer analyzer;
private RuntimeProfile profile;
private RuntimeProfile summaryProfile;
private RuntimeProfile plannerRuntimeProfile;
private volatile boolean isFinishedProfile = false;
private String queryType = "Query";
private volatile Coordinator coord = null;
private MasterOpExecutor masterOpExecutor = null;
private RedirectStatus redirectStatus = null;
private Planner planner;
private boolean isProxy;
private ShowResultSet proxyResultSet = null;
private Data.PQueryStatistics.Builder statisticsForAuditLog;
private boolean isCached;
private QueryPlannerProfile plannerProfile = new QueryPlannerProfile();
private String stmtName;
private PrepareStmt prepareStmt;
public StmtExecutor(ConnectContext context, OriginStatement originStmt, boolean isProxy) {
this.context = context;
this.originStmt = originStmt;
this.serializer = context.getSerializer();
this.isProxy = isProxy;
this.statementContext = new StatementContext(context, originStmt);
this.context.setStatementContext(statementContext);
}
public StmtExecutor(ConnectContext context, String stmt) {
this(context, new OriginStatement(stmt, 0), false);
this.stmtName = stmt;
}
public StmtExecutor(ConnectContext ctx, StatementBase parsedStmt) {
this.context = ctx;
this.parsedStmt = parsedStmt;
this.originStmt = parsedStmt.getOrigStmt();
this.serializer = context.getSerializer();
this.isProxy = false;
if (parsedStmt instanceof LogicalPlanAdapter) {
this.statementContext = ((LogicalPlanAdapter) parsedStmt).getStatementContext();
this.statementContext.setConnectContext(ctx);
this.statementContext.setOriginStatement(originStmt);
this.statementContext.setParsedStatement(parsedStmt);
} else {
this.statementContext = new StatementContext(ctx, originStmt);
this.statementContext.setParsedStatement(parsedStmt);
}
this.context.setStatementContext(statementContext);
}
public static InternalService.PDataRow getRowStringValue(List<Expr> cols) throws UserException {
if (cols.isEmpty()) {
return null;
}
InternalService.PDataRow.Builder row = InternalService.PDataRow.newBuilder();
for (Expr expr : cols) {
if (!expr.isLiteralOrCastExpr()) {
throw new UserException(
"do not support non-literal expr in transactional insert operation: " + expr.toSql());
}
if (expr instanceof NullLiteral) {
row.addColBuilder().setValue(NULL_VALUE_FOR_LOAD);
} else if (expr instanceof ArrayLiteral) {
row.addColBuilder().setValue(expr.getStringValueForArray());
} else {
row.addColBuilder().setValue(expr.getStringValue());
}
}
return row.build();
}
public void setCoord(Coordinator coord) {
this.coord = coord;
}
public Analyzer getAnalyzer() {
return analyzer;
}
private void initProfile(QueryPlannerProfile plannerProfile, boolean waiteBeReport) {
RuntimeProfile queryProfile;
if (coord == null) {
queryProfile = new RuntimeProfile("Execution Profile " + DebugUtil.printId(context.queryId()));
} else {
queryProfile = coord.getQueryProfile();
}
if (profile == null) {
profile = new RuntimeProfile("Query");
summaryProfile = new RuntimeProfile("Summary");
profile.addChild(summaryProfile);
summaryProfile.addInfoString(ProfileManager.START_TIME, TimeUtils.longToTimeString(context.getStartTime()));
updateSummaryProfile(waiteBeReport);
for (Map.Entry<String, String> entry : getSummaryInfo().entrySet()) {
summaryProfile.addInfoString(entry.getKey(), entry.getValue());
}
summaryProfile.addInfoString(ProfileManager.TRACE_ID, context.getSessionVariable().getTraceId());
plannerRuntimeProfile = new RuntimeProfile("Execution Summary");
summaryProfile.addChild(plannerRuntimeProfile);
profile.addChild(queryProfile);
} else {
updateSummaryProfile(waiteBeReport);
}
plannerProfile.initRuntimeProfile(plannerRuntimeProfile);
queryProfile.getCounterTotalTime().setValue(TimeUtils.getEstimatedTime(plannerProfile.getQueryBeginTime()));
endProfile(waiteBeReport);
}
private void endProfile(boolean waitProfileDone) {
if (context != null && context.getSessionVariable().enableProfile() && coord != null) {
coord.endProfile(waitProfileDone);
}
}
private void updateSummaryProfile(boolean waiteBeReport) {
Preconditions.checkNotNull(summaryProfile);
long currentTimestamp = System.currentTimeMillis();
long totalTimeMs = currentTimestamp - context.getStartTime();
summaryProfile.addInfoString(ProfileManager.END_TIME,
waiteBeReport ? TimeUtils.longToTimeString(currentTimestamp) : "N/A");
summaryProfile.addInfoString(ProfileManager.TOTAL_TIME, DebugUtil.getPrettyStringMs(totalTimeMs));
summaryProfile.addInfoString(ProfileManager.QUERY_STATE,
!waiteBeReport && context.getState().getStateType().equals(MysqlStateType.OK) ? "RUNNING" :
context.getState().toString());
}
private Map<String, String> getSummaryInfo() {
Map<String, String> infos = Maps.newLinkedHashMap();
infos.put(ProfileManager.JOB_ID, "N/A");
infos.put(ProfileManager.QUERY_ID, DebugUtil.printId(context.queryId()));
infos.put(ProfileManager.QUERY_TYPE, queryType);
infos.put(ProfileManager.DORIS_VERSION, Version.DORIS_BUILD_VERSION);
infos.put(ProfileManager.USER, context.getQualifiedUser());
infos.put(ProfileManager.DEFAULT_DB, context.getDatabase());
infos.put(ProfileManager.SQL_STATEMENT, originStmt.originStmt);
infos.put(ProfileManager.IS_CACHED, isCached ? "Yes" : "No");
Map<String, Integer> beToInstancesNum =
coord == null ? Maps.newTreeMap() : coord.getBeToInstancesNum();
infos.put(ProfileManager.TOTAL_INSTANCES_NUM,
String.valueOf(beToInstancesNum.values().stream().reduce(0, Integer::sum)));
infos.put(ProfileManager.INSTANCES_NUM_PER_BE, beToInstancesNum.toString());
infos.put(ProfileManager.PARALLEL_FRAGMENT_EXEC_INSTANCE,
String.valueOf(context.sessionVariable.parallelExecInstanceNum));
return infos;
}
public void addProfileToSpan() {
Span span = Span.fromContext(Context.current());
if (!span.isRecording()) {
return;
}
for (Map.Entry<String, String> entry : getSummaryInfo().entrySet()) {
span.setAttribute(entry.getKey(), entry.getValue());
}
}
public Planner planner() {
return planner;
}
public boolean isForwardToMaster() {
if (Env.getCurrentEnv().isMaster()) {
return false;
}
if ((parsedStmt instanceof QueryStmt) && !Env.getCurrentEnv().isMaster()
&& !Env.getCurrentEnv().canRead()) {
return true;
}
if (redirectStatus == null) {
return false;
} else {
return redirectStatus.isForwardToMaster();
}
}
public ByteBuffer getOutputPacket() {
if (masterOpExecutor == null) {
return null;
} else {
return masterOpExecutor.getOutputPacket();
}
}
public ShowResultSet getProxyResultSet() {
return proxyResultSet;
}
public ShowResultSet getShowResultSet() {
if (masterOpExecutor == null) {
return null;
} else {
return masterOpExecutor.getProxyResultSet();
}
}
public String getProxyStatus() {
if (masterOpExecutor == null) {
return MysqlStateType.UNKNOWN.name();
}
return masterOpExecutor.getProxyStatus();
}
public boolean isQueryStmt() {
return parsedStmt != null && parsedStmt instanceof QueryStmt;
}
/**
* Used for audit in ConnectProcessor.
* <p>
* TODO: There are three interface in StatementBase be called when doing audit:
* toDigest needAuditEncryption when parsedStmt is not a query
* and isValuesOrConstantSelect when parsedStmt is instance of InsertStmt.
* toDigest: is used to compute Statement fingerprint for blocking some queries
* needAuditEncryption: when this interface return true,
* log statement use toSql function instead of log original string
* isValuesOrConstantSelect: when this interface return true, original string is truncated at 1024
*
* @return parsed and analyzed statement for Stale planner.
* an unresolved LogicalPlan wrapped with a LogicalPlanAdapter for Nereids.
*/
public StatementBase getParsedStmt() {
return parsedStmt;
}
public void execute() throws Exception {
UUID uuid = UUID.randomUUID();
TUniqueId queryId = new TUniqueId(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());
Span executeSpan = context.getTracer().spanBuilder("execute").setParent(Context.current()).startSpan();
try (Scope scope = executeSpan.makeCurrent()) {
execute(queryId);
} finally {
executeSpan.end();
}
}
public void execute(TUniqueId queryId) throws Exception {
context.setStartTime();
plannerProfile.setQueryBeginTime();
context.setStmtId(STMT_ID_GENERATOR.incrementAndGet());
context.setQueryId(queryId);
if (parsedStmt instanceof QueryStmt) {
context.getState().setIsQuery(true);
}
if (parsedStmt instanceof LogicalPlanAdapter) {
context.getState().setNereids(true);
if (parsedStmt.getExplainOptions() == null
&& !(((LogicalPlanAdapter) parsedStmt).getLogicalPlan() instanceof Command)) {
context.getState().setIsQuery(true);
}
}
try {
if (context.isTxnModel() && !(parsedStmt instanceof InsertStmt)
&& !(parsedStmt instanceof TransactionStmt)) {
throw new TException("This is in a transaction, only insert, commit, rollback is acceptable.");
}
analyzeVariablesInStmt();
if (!context.isTxnModel()) {
Span queryAnalysisSpan =
context.getTracer().spanBuilder("query analysis").setParent(Context.current()).startSpan();
try (Scope scope = queryAnalysisSpan.makeCurrent()) {
try {
analyze(context.getSessionVariable().toThrift());
} catch (NereidsException e) {
if (!context.getSessionVariable().enableFallbackToOriginalPlanner) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw e.getException();
}
LOG.warn("fall back to legacy planner, because: {}", e.getMessage(), e);
parsedStmt = null;
context.getState().setNereids(false);
analyze(context.getSessionVariable().toThrift());
}
} catch (Exception e) {
queryAnalysisSpan.recordException(e);
throw e;
} finally {
queryAnalysisSpan.end();
}
if (isForwardToMaster()) {
if (isProxy) {
throw new UserException("The statement has been forwarded to master FE("
+ Env.getCurrentEnv().getSelfNode().first + ") and failed to execute"
+ " because Master FE is not ready. You may need to check FE's status");
}
forwardToMaster();
if (masterOpExecutor != null && masterOpExecutor.getQueryId() != null) {
context.setQueryId(masterOpExecutor.getQueryId());
}
return;
} else {
LOG.debug("no need to transfer to Master. stmt: {}", context.getStmtId());
}
} else {
analyzer = new Analyzer(context.getEnv(), context);
parsedStmt.analyze(analyzer);
}
if (prepareStmt instanceof PrepareStmt) {
handlePrepareStmt();
return;
}
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof LogicalPlanAdapter) {
if (!parsedStmt.isExplain()) {
try {
Env.getCurrentEnv().getSqlBlockRuleMgr().matchSql(
originStmt.originStmt, context.getSqlHash(), context.getQualifiedUser());
} catch (AnalysisException e) {
LOG.warn(e.getMessage());
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
List<ScanNode> scanNodeList = planner.getScanNodes();
for (ScanNode scanNode : scanNodeList) {
if (scanNode instanceof OlapScanNode) {
OlapScanNode olapScanNode = (OlapScanNode) scanNode;
Env.getCurrentEnv().getSqlBlockRuleMgr().checkLimitations(
olapScanNode.getSelectedPartitionNum().longValue(),
olapScanNode.getSelectedTabletsNum(),
olapScanNode.getCardinality(),
context.getQualifiedUser());
}
}
}
int retryTime = Config.max_query_retry_time;
for (int i = 0; i < retryTime; i++) {
try {
if (i > 0) {
UUID uuid = UUID.randomUUID();
TUniqueId newQueryId = new TUniqueId(uuid.getMostSignificantBits(),
uuid.getLeastSignificantBits());
AuditLog.getQueryAudit().log("Query {} {} times with new query id: {}",
DebugUtil.printId(queryId), i, DebugUtil.printId(newQueryId));
context.setQueryId(newQueryId);
}
handleQueryStmt();
break;
} catch (RpcException e) {
if (i == retryTime - 1) {
throw e;
}
if (!context.getMysqlChannel().isSend()) {
LOG.warn("retry {} times. stmt: {}", (i + 1), parsedStmt.getOrigStmt().originStmt);
} else {
throw e;
}
} finally {
endProfile(true);
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
}
} else if (parsedStmt instanceof SetStmt) {
handleSetStmt();
} else if (parsedStmt instanceof EnterStmt) {
handleEnterStmt();
} else if (parsedStmt instanceof SwitchStmt) {
handleSwitchStmt();
} else if (parsedStmt instanceof UseStmt) {
handleUseStmt();
} else if (parsedStmt instanceof TransactionStmt) {
handleTransactionStmt();
} else if (parsedStmt instanceof CreateTableAsSelectStmt) {
handleCtasStmt();
} else if (parsedStmt instanceof InsertStmt) {
try {
handleInsertStmt();
if (!((InsertStmt) parsedStmt).getQueryStmt().isExplain()) {
queryType = "Insert";
}
} catch (Throwable t) {
LOG.warn("handle insert stmt fail: {}", t.getMessage());
throw t;
}
} else if (parsedStmt instanceof LoadStmt) {
handleLoadStmt();
} else if (parsedStmt instanceof DdlStmt) {
handleDdlStmt();
} else if (parsedStmt instanceof ShowStmt) {
handleShow();
} else if (parsedStmt instanceof KillStmt) {
handleKill();
} else if (parsedStmt instanceof ExportStmt) {
handleExportStmt();
} else if (parsedStmt instanceof UnlockTablesStmt) {
handleUnlockTablesStmt();
} else if (parsedStmt instanceof LockTablesStmt) {
handleLockTablesStmt();
} else if (parsedStmt instanceof UnsupportedStmt) {
handleUnsupportedStmt();
} else {
context.getState().setError(ErrorCode.ERR_NOT_SUPPORTED_YET, "Do not support this query.");
}
} catch (IOException e) {
LOG.warn("execute IOException. {}", context.getQueryIdentifier(), e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, e.getMessage());
throw e;
} catch (UserException e) {
LOG.warn("execute Exception. {}, {}", context.getQueryIdentifier(),
e.getMessage());
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR);
} catch (Exception e) {
LOG.warn("execute Exception. {}", context.getQueryIdentifier(), e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR,
e.getClass().getSimpleName() + ", msg: " + Util.getRootCauseMessage(e));
if (parsedStmt instanceof KillStmt) {
context.getState().setErrType(QueryState.ErrType.ANALYSIS_ERR);
}
} finally {
try {
SessionVariable sessionVariable = context.getSessionVariable();
VariableMgr.revertSessionValue(sessionVariable);
sessionVariable.setIsSingleSetVar(false);
sessionVariable.clearSessionOriginValue();
} catch (DdlException e) {
LOG.warn("failed to revert Session value. {}", context.getQueryIdentifier(), e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
}
if (!context.isTxnModel() && parsedStmt instanceof InsertStmt) {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
if (insertStmt.isTransactionBegin() && context.getState().getStateType() == MysqlStateType.ERR) {
try {
String errMsg = Strings.emptyToNull(context.getState().getErrorMessage());
Env.getCurrentGlobalTransactionMgr().abortTransaction(
insertStmt.getDbObj().getId(), insertStmt.getTransactionId(),
(errMsg == null ? "unknown reason" : errMsg));
} catch (Exception abortTxnException) {
LOG.warn("errors when abort txn. {}", context.getQueryIdentifier(), abortTxnException);
}
}
}
}
}
/**
* get variables in stmt.
*
* @throws DdlException
*/
private void analyzeVariablesInStmt() throws DdlException {
SessionVariable sessionVariable = context.getSessionVariable();
if (parsedStmt != null && parsedStmt instanceof SelectStmt) {
SelectStmt selectStmt = (SelectStmt) parsedStmt;
Map<String, String> optHints = selectStmt.getSelectList().getOptHints();
if (optHints != null) {
sessionVariable.setIsSingleSetVar(true);
for (String key : optHints.keySet()) {
VariableMgr.setVar(sessionVariable, new SetVar(key, new StringLiteral(optHints.get(key))));
}
}
}
}
private void forwardToMaster() throws Exception {
boolean isQuery = parsedStmt instanceof QueryStmt;
masterOpExecutor = new MasterOpExecutor(originStmt, context, redirectStatus, isQuery);
LOG.debug("need to transfer to Master. stmt: {}", context.getStmtId());
masterOpExecutor.execute();
}
@Override
public void writeProfile(boolean isLastWriteProfile) {
if (!context.getSessionVariable().enableProfile()) {
return;
}
synchronized (writeProfileLock) {
if (isFinishedProfile) {
return;
}
initProfile(plannerProfile, isLastWriteProfile);
profile.computeTimeInChildProfile();
ProfileManager.getInstance().pushProfile(profile);
isFinishedProfile = isLastWriteProfile;
}
}
public void analyze(TQueryOptions tQueryOptions) throws UserException {
if (LOG.isDebugEnabled()) {
LOG.debug("begin to analyze stmt: {}, forwarded stmt id: {}",
context.getStmtId(), context.getForwardedStmtId());
}
boolean preparedStmtReanalyzed = false;
PrepareStmtContext preparedStmtCtx = null;
if (parsedStmt instanceof ExecuteStmt) {
ExecuteStmt execStmt = (ExecuteStmt) parsedStmt;
preparedStmtCtx = context.getPreparedStmt(execStmt.getName());
if (preparedStmtCtx == null) {
throw new UserException("Could not execute, since `" + execStmt.getName() + "` not exist");
}
preparedStmtCtx.stmt.asignValues(execStmt.getArgs());
parsedStmt = preparedStmtCtx.stmt.getInnerStmt();
planner = preparedStmtCtx.planner;
analyzer = preparedStmtCtx.analyzer;
Preconditions.checkState(parsedStmt.isAnalyzed());
LOG.debug("already prepared stmt: {}", preparedStmtCtx.stmtString);
if (!preparedStmtCtx.stmt.needReAnalyze()) {
return;
}
preparedStmtReanalyzed = true;
preparedStmtCtx.stmt.analyze(analyzer);
}
parse();
if (isForwardToMaster()) {
return;
}
analyzer = new Analyzer(context.getEnv(), context);
if (parsedStmt instanceof PrepareStmt || context.getCommand() == MysqlCommand.COM_STMT_PREPARE) {
if (context.getCommand() == MysqlCommand.COM_STMT_PREPARE) {
prepareStmt = new PrepareStmt(parsedStmt,
String.valueOf(context.getEnv().getNextStmtId()), true /*binary protocol*/);
} else {
prepareStmt = (PrepareStmt) parsedStmt;
}
prepareStmt.setContext(context);
prepareStmt.analyze(analyzer);
parsedStmt = prepareStmt.getInnerStmt();
}
if (parsedStmt instanceof ShowStmt) {
SelectStmt selectStmt = ((ShowStmt) parsedStmt).toSelectStmt(analyzer);
if (selectStmt != null) {
setParsedStmt(selectStmt);
}
}
if (parsedStmt instanceof QueryStmt
|| parsedStmt instanceof InsertStmt
|| parsedStmt instanceof CreateTableAsSelectStmt
|| parsedStmt instanceof LogicalPlanAdapter) {
Map<Long, TableIf> tableMap = Maps.newTreeMap();
QueryStmt queryStmt;
Set<String> parentViewNameSet = Sets.newHashSet();
if (parsedStmt instanceof QueryStmt) {
queryStmt = (QueryStmt) parsedStmt;
queryStmt.getTables(analyzer, false, tableMap, parentViewNameSet);
} else if (parsedStmt instanceof CreateTableAsSelectStmt) {
CreateTableAsSelectStmt parsedStmt = (CreateTableAsSelectStmt) this.parsedStmt;
queryStmt = parsedStmt.getQueryStmt();
queryStmt.getTables(analyzer, false, tableMap, parentViewNameSet);
} else if (parsedStmt instanceof InsertStmt) {
InsertStmt insertStmt = (InsertStmt) parsedStmt;
insertStmt.getTables(analyzer, tableMap, parentViewNameSet);
}
List<TableIf> tables = Lists.newArrayList(tableMap.values());
int analyzeTimes = 2;
for (int i = 1; i <= analyzeTimes; i++) {
MetaLockUtils.readLockTables(tables);
try {
analyzeAndGenerateQueryPlan(tQueryOptions);
break;
} catch (MVSelectFailedException e) {
/*
* If there is MVSelectFailedException after the first planner,
* there will be error mv rewritten in query.
* So, the query should be reanalyzed without mv rewritten and planner again.
* Attention: Only error rewritten tuple is forbidden to mv rewrite in the second time.
*/
if (i == analyzeTimes) {
throw e;
} else {
resetAnalyzerAndStmt();
}
} catch (UserException e) {
throw e;
} catch (Exception e) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
if (parsedStmt instanceof LogicalPlanAdapter) {
throw new NereidsException(new AnalysisException("Unexpected exception: " + e.getMessage(), e));
}
throw new AnalysisException("Unexpected exception: " + e.getMessage());
} finally {
MetaLockUtils.readUnlockTables(tables);
}
}
} else {
try {
parsedStmt.analyze(analyzer);
} catch (UserException e) {
throw e;
} catch (Exception e) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw new AnalysisException("Unexpected exception: " + e.getMessage());
}
}
if (preparedStmtReanalyzed) {
LOG.debug("update planner and analyzer after prepared statement reanalyzed");
preparedStmtCtx.planner = planner;
preparedStmtCtx.analyzer = analyzer;
Preconditions.checkNotNull(preparedStmtCtx.stmt);
preparedStmtCtx.analyzer.setPrepareStmt(preparedStmtCtx.stmt);
}
}
private void parse() throws AnalysisException, DdlException {
if (parsedStmt == null) {
SqlScanner input = new SqlScanner(new StringReader(originStmt.originStmt),
context.getSessionVariable().getSqlMode());
SqlParser parser = new SqlParser(input);
try {
StatementBase parsedStmt = setParsedStmt(SqlParserUtils.getStmt(parser, originStmt.idx));
parsedStmt.setOrigStmt(originStmt);
parsedStmt.setUserInfo(context.getCurrentUserIdentity());
} catch (Error e) {
LOG.info("error happened when parsing stmt {}, id: {}", originStmt, context.getStmtId(), e);
throw new AnalysisException("sql parsing error, please check your sql");
} catch (AnalysisException e) {
String syntaxError = parser.getErrorMsg(originStmt.originStmt);
LOG.info("analysis exception happened when parsing stmt {}, id: {}, error: {}",
originStmt, context.getStmtId(), syntaxError, e);
if (syntaxError == null) {
throw e;
} else {
throw new AnalysisException(syntaxError, e);
}
} catch (Exception e) {
LOG.info("unexpected exception happened when parsing stmt {}, id: {}, error: {}",
originStmt, context.getStmtId(), parser.getErrorMsg(originStmt.originStmt), e);
throw new AnalysisException("Unexpected exception: " + e.getMessage());
}
analyzeVariablesInStmt();
}
redirectStatus = parsedStmt.getRedirectStatus();
}
private void analyzeAndGenerateQueryPlan(TQueryOptions tQueryOptions) throws UserException {
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof InsertStmt) {
QueryStmt queryStmt = null;
if (parsedStmt instanceof QueryStmt) {
queryStmt = (QueryStmt) parsedStmt;
}
if (parsedStmt instanceof InsertStmt) {
queryStmt = (QueryStmt) ((InsertStmt) parsedStmt).getQueryStmt();
}
if (queryStmt.getOrderByElements() != null && queryStmt.getOrderByElements().isEmpty()) {
queryStmt.removeOrderByElements();
}
}
parsedStmt.analyze(analyzer);
if (parsedStmt instanceof QueryStmt || parsedStmt instanceof InsertStmt) {
ExprRewriter rewriter = analyzer.getExprRewriter();
rewriter.reset();
if (context.getSessionVariable().isEnableFoldConstantByBe()) {
parsedStmt.foldConstant(rewriter);
}
ExplainOptions explainOptions = parsedStmt.getExplainOptions();
boolean reAnalyze = false;
parsedStmt.rewriteExprs(rewriter);
reAnalyze = rewriter.changed();
if (analyzer.containSubquery()) {
parsedStmt = setParsedStmt(StmtRewriter.rewrite(analyzer, parsedStmt));
reAnalyze = true;
}
if (parsedStmt instanceof SelectStmt) {
if (StmtRewriter.rewriteByPolicy(parsedStmt, analyzer)) {
reAnalyze = true;
}
}
if (parsedStmt instanceof SetOperationStmt) {
List<SetOperationStmt.SetOperand> operands = ((SetOperationStmt) parsedStmt).getOperands();
for (SetOperationStmt.SetOperand operand : operands) {
if (StmtRewriter.rewriteByPolicy(operand.getQueryStmt(), analyzer)) {
reAnalyze = true;
}
}
}
if (parsedStmt instanceof InsertStmt) {
QueryStmt queryStmt = ((InsertStmt) parsedStmt).getQueryStmt();
if (queryStmt != null && StmtRewriter.rewriteByPolicy(queryStmt, analyzer)) {
reAnalyze = true;
}
}
if (reAnalyze) {
List<Type> origResultTypes = Lists.newArrayList();
for (Expr e : parsedStmt.getResultExprs()) {
origResultTypes.add(e.getType());
}
List<String> origColLabels =
Lists.newArrayList(parsedStmt.getColLabels());
analyzer = new Analyzer(context.getEnv(), context);
if (prepareStmt != null) {
prepareStmt.reset();
prepareStmt.analyze(analyzer);
}
parsedStmt.reset();
parsedStmt.analyze(analyzer);
parsedStmt.castResultExprs(origResultTypes);
parsedStmt.setColLabels(origColLabels);
if (LOG.isTraceEnabled()) {
LOG.trace("rewrittenStmt: " + parsedStmt.toSql());
}
if (explainOptions != null) {
parsedStmt.setIsExplain(explainOptions);
}
}
}
plannerProfile.setQueryAnalysisFinishTime();
if (parsedStmt instanceof LogicalPlanAdapter) {
planner = new NereidsPlanner(statementContext);
} else {
planner = new OriginalPlanner(analyzer);
}
if (parsedStmt instanceof QueryStmt
|| parsedStmt instanceof InsertStmt
|| parsedStmt instanceof LogicalPlanAdapter) {
planner.plan(parsedStmt, tQueryOptions);
}
plannerProfile.setQueryPlanFinishTime();
}
private void resetAnalyzerAndStmt() {
analyzer = new Analyzer(context.getEnv(), context);
parsedStmt.reset();
if (parsedStmt instanceof QueryStmt) {
((QueryStmt) parsedStmt).resetSelectList();
}
if (parsedStmt instanceof InsertStmt) {
((InsertStmt) parsedStmt).getQueryStmt().resetSelectList();
}
}
public void cancel() {
Coordinator coordRef = coord;
if (coordRef != null) {
coordRef.cancel();
}
}
private void handleKill() throws DdlException {
KillStmt killStmt = (KillStmt) parsedStmt;
int id = killStmt.getConnectionId();
ConnectContext killCtx = context.getConnectScheduler().getContext(id);
if (killCtx == null) {
ErrorReport.reportDdlException(ErrorCode.ERR_NO_SUCH_THREAD, id);
}
if (context == killCtx) {
context.setKilled();
} else {
if (!killCtx.getQualifiedUser().equals(ConnectContext.get().getQualifiedUser())
&& !Env.getCurrentEnv().getAuth().checkGlobalPriv(ConnectContext.get(),
PrivPredicate.ADMIN)) {
ErrorReport.reportDdlException(ErrorCode.ERR_KILL_DENIED_ERROR, id);
}
killCtx.kill(killStmt.isConnectionKill());
}
context.getState().setOk();
}
private void handleSetStmt() {
try {
SetStmt setStmt = (SetStmt) parsedStmt;
SetExecutor executor = new SetExecutor(context, setStmt);
executor.execute();
} catch (DdlException e) {
context.getState().setError(ErrorCode.ERR_LOCAL_VARIABLE, e.getMessage());
return;
}
context.getState().setOk();
}
private boolean sendCachedValues(MysqlChannel channel, List<InternalService.PCacheValue> cacheValues,
SelectStmt selectStmt, boolean isSendFields, boolean isEos)
throws Exception {
RowBatch batch = null;
boolean isSend = isSendFields;
for (InternalService.PCacheValue value : cacheValues) {
TResultBatch resultBatch = new TResultBatch();
for (ByteString one : value.getRowsList()) {
resultBatch.addToRows(ByteBuffer.wrap(one.toByteArray()));
}
resultBatch.setPacketSeq(1);
resultBatch.setIsCompressed(false);
batch = new RowBatch();
batch.setBatch(resultBatch);
batch.setEos(true);
if (!isSend) {
sendFields(selectStmt.getColLabels(), exprToType(selectStmt.getResultExprs()));
isSend = true;
}
for (ByteBuffer row : batch.getBatch().getRows()) {
channel.sendOnePacket(row);
}
context.updateReturnRows(batch.getBatch().getRows().size());
}
if (isEos) {
if (batch != null) {
statisticsForAuditLog = batch.getQueryStatistics() == null
? null : batch.getQueryStatistics().toBuilder();
}
if (!isSend) {
sendFields(selectStmt.getColLabels(), exprToType(selectStmt.getResultExprs()));
isSend = true;
}
context.getState().setEof();
}
return isSend;
}
/**
* Handle the SelectStmt via Cache.
*/
private void handleCacheStmt(CacheAnalyzer cacheAnalyzer, MysqlChannel channel, SelectStmt selectStmt)
throws Exception {
InternalService.PFetchCacheResult cacheResult = cacheAnalyzer.getCacheData();
CacheMode mode = cacheAnalyzer.getCacheMode();
SelectStmt newSelectStmt = selectStmt;
boolean isSendFields = false;
if (cacheResult != null) {
isCached = true;
if (cacheAnalyzer.getHitRange() == Cache.HitRange.Full) {
sendCachedValues(channel, cacheResult.getValuesList(), newSelectStmt, isSendFields, true);
return;
}
if (mode == CacheMode.Partition) {
if (cacheAnalyzer.getHitRange() == Cache.HitRange.Left) {
isSendFields = sendCachedValues(channel, cacheResult.getValuesList(),
newSelectStmt, isSendFields, false);
}
newSelectStmt = cacheAnalyzer.getRewriteStmt();
newSelectStmt.reset();
analyzer = new Analyzer(context.getEnv(), context);
newSelectStmt.analyze(analyzer);
if (parsedStmt instanceof LogicalPlanAdapter) {
planner = new NereidsPlanner(statementContext);
} else {
planner = new OriginalPlanner(analyzer);
}
planner.plan(newSelectStmt, context.getSessionVariable().toThrift());
}
}
sendResult(false, isSendFields, newSelectStmt, channel, cacheAnalyzer, cacheResult);
}
private boolean handleSelectRequestInFe(SelectStmt parsedSelectStmt) throws IOException {
List<SelectListItem> selectItemList = parsedSelectStmt.getSelectList().getItems();
List<Column> columns = new ArrayList<>(selectItemList.size());
ResultSetMetaData metadata = new CommonResultSet.CommonResultSetMetaData(columns);
List<String> columnLabels = parsedSelectStmt.getColLabels();
List<String> data = new ArrayList<>();
for (int i = 0; i < selectItemList.size(); i++) {
SelectListItem item = selectItemList.get(i);
Expr expr = item.getExpr();
String columnName = columnLabels.get(i);
if (expr instanceof LiteralExpr) {
columns.add(new Column(columnName, expr.getType()));
if (expr instanceof NullLiteral) {
data.add(null);
} else if (expr instanceof FloatLiteral) {
data.add(LiteralUtils.getStringValue((FloatLiteral) expr));
} else if (expr instanceof DecimalLiteral) {
data.add(((DecimalLiteral) expr).getValue().toPlainString());
} else if (expr instanceof ArrayLiteral) {
data.add(LiteralUtils.getStringValue((ArrayLiteral) expr));
} else {
data.add(expr.getStringValue());
}
} else {
return false;
}
}
ResultSet resultSet = new CommonResultSet(metadata, Collections.singletonList(data));
sendResultSet(resultSet);
return true;
}
private void handleQueryStmt() throws Exception {
context.getMysqlChannel().reset();
Queriable queryStmt = (Queriable) parsedStmt;
QueryDetail queryDetail = new QueryDetail(context.getStartTime(),
DebugUtil.printId(context.queryId()),
context.getStartTime(), -1, -1,
QueryDetail.QueryMemState.RUNNING,
context.getDatabase(),
originStmt.originStmt);
context.setQueryDetail(queryDetail);
QueryDetailQueue.addOrUpdateQueryDetail(queryDetail);
if (queryStmt.isExplain()) {
String explainString = planner.getExplainString(queryStmt.getExplainOptions());
handleExplainStmt(explainString);
return;
}
if (parsedStmt instanceof SelectStmt && ((SelectStmt) parsedStmt).getTableRefs().isEmpty()) {
SelectStmt parsedSelectStmt = (SelectStmt) parsedStmt;
if (handleSelectRequestInFe(parsedSelectStmt)) {
return;
}
}
MysqlChannel channel = context.getMysqlChannel();
boolean isOutfileQuery = queryStmt.hasOutFileClause();
CacheAnalyzer cacheAnalyzer = new CacheAnalyzer(context, parsedStmt, planner);
if (cacheAnalyzer.enableCache() && !isOutfileQuery && queryStmt instanceof SelectStmt) {
handleCacheStmt(cacheAnalyzer, channel, (SelectStmt) queryStmt);
return;
}
if (parsedStmt instanceof SelectStmt) {
SelectStmt parsedSelectStmt = (SelectStmt) parsedStmt;
if (parsedSelectStmt.getLimit() == 0) {
LOG.info("ignore handle limit 0 ,sql:{}", parsedSelectStmt.toSql());
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
context.getState().setEof();
return;
}
}
sendResult(isOutfileQuery, false, queryStmt, channel, null, null);
}
private void sendResult(boolean isOutfileQuery, boolean isSendFields, Queriable queryStmt, MysqlChannel channel,
CacheAnalyzer cacheAnalyzer, InternalService.PFetchCacheResult cacheResult) throws Exception {
RowBatch batch;
coord = new Coordinator(context, analyzer, planner);
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(),
new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord));
coord.setProfileWriter(this);
Span queryScheduleSpan =
context.getTracer().spanBuilder("query schedule").setParent(Context.current()).startSpan();
try (Scope scope = queryScheduleSpan.makeCurrent()) {
coord.exec();
} catch (Exception e) {
queryScheduleSpan.recordException(e);
throw e;
} finally {
queryScheduleSpan.end();
}
plannerProfile.setQueryScheduleFinishTime();
writeProfile(false);
Span fetchResultSpan = context.getTracer().spanBuilder("fetch result").setParent(Context.current()).startSpan();
try (Scope scope = fetchResultSpan.makeCurrent()) {
while (true) {
plannerProfile.setTempStartTime();
batch = coord.getNext();
plannerProfile.freshFetchResultConsumeTime();
if (batch.getBatch() != null) {
if (cacheAnalyzer != null) {
cacheAnalyzer.copyRowBatch(batch);
}
plannerProfile.setTempStartTime();
if (!isSendFields) {
if (!isOutfileQuery) {
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
} else {
sendFields(OutFileClause.RESULT_COL_NAMES, OutFileClause.RESULT_COL_TYPES);
}
isSendFields = true;
}
for (ByteBuffer row : batch.getBatch().getRows()) {
channel.sendOnePacket(row);
}
plannerProfile.freshWriteResultConsumeTime();
context.updateReturnRows(batch.getBatch().getRows().size());
}
if (batch.isEos()) {
break;
}
}
if (cacheAnalyzer != null) {
if (cacheResult != null && cacheAnalyzer.getHitRange() == Cache.HitRange.Right) {
isSendFields =
sendCachedValues(channel, cacheResult.getValuesList(), (SelectStmt) queryStmt, isSendFields,
false);
}
cacheAnalyzer.updateCache();
}
if (!isSendFields) {
if (!isOutfileQuery) {
sendFields(queryStmt.getColLabels(), exprToType(queryStmt.getResultExprs()));
} else {
sendFields(OutFileClause.RESULT_COL_NAMES, OutFileClause.RESULT_COL_TYPES);
}
}
statisticsForAuditLog = batch.getQueryStatistics() == null ? null : batch.getQueryStatistics().toBuilder();
context.getState().setEof();
plannerProfile.setQueryFetchResultFinishTime();
} catch (Exception e) {
fetchResultSpan.recordException(e);
throw e;
} finally {
fetchResultSpan.end();
}
}
private TWaitingTxnStatusResult getWaitingTxnStatus(TWaitingTxnStatusRequest request) throws Exception {
TWaitingTxnStatusResult statusResult = null;
if (Env.getCurrentEnv().isMaster()) {
statusResult = Env.getCurrentGlobalTransactionMgr()
.getWaitingTxnStatus(request);
} else {
MasterTxnExecutor masterTxnExecutor = new MasterTxnExecutor(context);
statusResult = masterTxnExecutor.getWaitingTxnStatus(request);
}
return statusResult;
}
private void handleTransactionStmt() throws Exception {
context.getMysqlChannel().reset();
context.getState().setOk(0, 0, "");
if (context.getTxnEntry() != null && context.getTxnEntry().getRowsInTransaction() == 0
&& (parsedStmt instanceof TransactionCommitStmt || parsedStmt instanceof TransactionRollbackStmt)) {
context.setTxnEntry(null);
} else if (parsedStmt instanceof TransactionBeginStmt) {
if (context.isTxnModel()) {
LOG.info("A transaction has already begin");
return;
}
TTxnParams txnParams = new TTxnParams();
txnParams.setNeedTxn(true).setEnablePipelineTxnLoad(Config.enable_pipeline_load)
.setThriftRpcTimeoutMs(5000).setTxnId(-1).setDb("").setTbl("");
if (context.getSessionVariable().getEnableInsertStrict()) {
txnParams.setMaxFilterRatio(0);
} else {
txnParams.setMaxFilterRatio(1.0);
}
if (context.getTxnEntry() == null) {
context.setTxnEntry(new TransactionEntry());
}
TransactionEntry txnEntry = context.getTxnEntry();
txnEntry.setTxnConf(txnParams);
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(TransactionStatus.PREPARE.name());
sb.append("', 'txnId':'").append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} else if (parsedStmt instanceof TransactionCommitStmt) {
if (!context.isTxnModel()) {
LOG.info("No transaction to commit");
return;
}
TTxnParams txnConf = context.getTxnEntry().getTxnConf();
try {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(context.getTxnEntry());
if (context.getTxnEntry().getDataToSend().size() > 0) {
executor.sendData();
}
executor.commitTransaction();
TWaitingTxnStatusRequest request = new TWaitingTxnStatusRequest();
request.setDbId(txnConf.getDbId()).setTxnId(txnConf.getTxnId());
request.setLabelIsSet(false);
request.setTxnIdIsSet(true);
TWaitingTxnStatusResult statusResult = getWaitingTxnStatus(request);
TransactionStatus txnStatus = TransactionStatus.valueOf(statusResult.getTxnStatusId());
if (txnStatus == TransactionStatus.COMMITTED) {
throw new AnalysisException("transaction commit successfully, BUT data will be visible later.");
} else if (txnStatus != TransactionStatus.VISIBLE) {
String errMsg = "commit failed, rollback.";
if (statusResult.getStatus().isSetErrorMsgs()
&& statusResult.getStatus().getErrorMsgs().size() > 0) {
errMsg = String.join(". ", statusResult.getStatus().getErrorMsgs());
}
throw new AnalysisException(errMsg);
}
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(txnStatus.name()).append("', 'txnId':'")
.append(context.getTxnEntry().getTxnConf().getTxnId()).append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} catch (Exception e) {
throw new AnalysisException(e.getMessage());
} finally {
context.setTxnEntry(null);
}
} else if (parsedStmt instanceof TransactionRollbackStmt) {
if (!context.isTxnModel()) {
LOG.info("No transaction to rollback");
return;
}
try {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(context.getTxnEntry());
executor.abortTransaction();
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(context.getTxnEntry().getLabel()).append("', 'status':'")
.append(TransactionStatus.ABORTED.name()).append("', 'txnId':'")
.append(context.getTxnEntry().getTxnConf().getTxnId()).append("'").append("}");
context.getState().setOk(0, 0, sb.toString());
} catch (Exception e) {
throw new AnalysisException(e.getMessage());
} finally {
context.setTxnEntry(null);
}
} else {
throw new TException("parsedStmt type is not TransactionStmt");
}
}
public int executeForTxn(InsertStmt insertStmt)
throws UserException, TException, InterruptedException, ExecutionException, TimeoutException {
if (context.isTxnIniting()) {
beginTxn(insertStmt.getDb(), insertStmt.getTbl());
}
if (!context.getTxnEntry().getTxnConf().getDb().equals(insertStmt.getDb())
|| !context.getTxnEntry().getTxnConf().getTbl().equals(insertStmt.getTbl())) {
throw new TException("Only one table can be inserted in one transaction.");
}
QueryStmt queryStmt = insertStmt.getQueryStmt();
if (!(queryStmt instanceof SelectStmt)) {
throw new TException("queryStmt is not SelectStmt, insert command error");
}
TransactionEntry txnEntry = context.getTxnEntry();
SelectStmt selectStmt = (SelectStmt) queryStmt;
int effectRows = 0;
if (selectStmt.getValueList() != null) {
Table tbl = txnEntry.getTable();
int schemaSize = tbl.getBaseSchema(false).size();
for (List<Expr> row : selectStmt.getValueList().getRows()) {
if (schemaSize != row.size()) {
throw new TException("Column count doesn't match value count");
}
}
for (List<Expr> row : selectStmt.getValueList().getRows()) {
++effectRows;
InternalService.PDataRow data = getRowStringValue(row);
if (data == null) {
continue;
}
List<InternalService.PDataRow> dataToSend = txnEntry.getDataToSend();
dataToSend.add(data);
if (dataToSend.size() >= MAX_DATA_TO_SEND_FOR_TXN) {
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(txnEntry);
executor.sendData();
}
}
}
txnEntry.setRowsInTransaction(txnEntry.getRowsInTransaction() + effectRows);
return effectRows;
}
private void beginTxn(String dbName, String tblName) throws UserException, TException,
InterruptedException, ExecutionException, TimeoutException {
TransactionEntry txnEntry = context.getTxnEntry();
TTxnParams txnConf = txnEntry.getTxnConf();
long timeoutSecond = ConnectContext.get().getSessionVariable().getQueryTimeoutS();
TransactionState.LoadJobSourceType sourceType = TransactionState.LoadJobSourceType.INSERT_STREAMING;
Database dbObj = Env.getCurrentInternalCatalog()
.getDbOrException(dbName, s -> new TException("database is invalid for dbName: " + s));
Table tblObj = dbObj.getTableOrException(tblName, s -> new TException("table is invalid: " + s));
txnConf.setDbId(dbObj.getId()).setTbl(tblName).setDb(dbName);
txnEntry.setTable(tblObj);
txnEntry.setDb(dbObj);
String label = txnEntry.getLabel();
if (Env.getCurrentEnv().isMaster()) {
long txnId = Env.getCurrentGlobalTransactionMgr().beginTransaction(
txnConf.getDbId(), Lists.newArrayList(tblObj.getId()),
label, new TransactionState.TxnCoordinator(
TransactionState.TxnSourceType.FE, FrontendOptions.getLocalHostAddress()),
sourceType, timeoutSecond);
txnConf.setTxnId(txnId);
String authCodeUuid = Env.getCurrentGlobalTransactionMgr().getTransactionState(
txnConf.getDbId(), txnConf.getTxnId()).getAuthCode();
txnConf.setAuthCodeUuid(authCodeUuid);
} else {
String authCodeUuid = UUID.randomUUID().toString();
MasterTxnExecutor masterTxnExecutor = new MasterTxnExecutor(context);
TLoadTxnBeginRequest request = new TLoadTxnBeginRequest();
request.setDb(txnConf.getDb()).setTbl(txnConf.getTbl()).setAuthCodeUuid(authCodeUuid)
.setCluster(dbObj.getClusterName()).setLabel(label).setUser("").setUserIp("").setPasswd("");
TLoadTxnBeginResult result = masterTxnExecutor.beginTxn(request);
txnConf.setTxnId(result.getTxnId());
txnConf.setAuthCodeUuid(authCodeUuid);
}
TStreamLoadPutRequest request = new TStreamLoadPutRequest();
request.setTxnId(txnConf.getTxnId()).setDb(txnConf.getDb())
.setTbl(txnConf.getTbl())
.setFileType(TFileType.FILE_STREAM).setFormatType(TFileFormatType.FORMAT_CSV_PLAIN)
.setMergeType(TMergeType.APPEND).setThriftRpcTimeoutMs(5000).setLoadId(context.queryId());
InsertStreamTxnExecutor executor = new InsertStreamTxnExecutor(txnEntry);
executor.beginTransaction(request);
}
private void handleInsertStmt() throws Exception {
if (context.getMysqlChannel() != null) {
context.getMysqlChannel().reset();
}
InsertStmt insertStmt = (InsertStmt) parsedStmt;
if (insertStmt.getQueryStmt().hasOutFileClause()) {
throw new DdlException("Not support OUTFILE clause in INSERT statement");
}
if (insertStmt.getQueryStmt().isExplain()) {
ExplainOptions explainOptions = insertStmt.getQueryStmt().getExplainOptions();
insertStmt.setIsExplain(explainOptions);
String explainString = planner.getExplainString(explainOptions);
handleExplainStmt(explainString);
return;
}
long createTime = System.currentTimeMillis();
Throwable throwable = null;
long txnId = -1;
String label = "";
long loadedRows = 0;
int filteredRows = 0;
TransactionStatus txnStatus = TransactionStatus.ABORTED;
String errMsg = "";
TableType tblType = insertStmt.getTargetTable().getType();
if (context.isTxnModel()) {
if (insertStmt.getQueryStmt() instanceof SelectStmt) {
if (((SelectStmt) insertStmt.getQueryStmt()).getTableRefs().size() > 0) {
throw new TException("Insert into ** select is not supported in a transaction");
}
}
txnStatus = TransactionStatus.PREPARE;
loadedRows = executeForTxn(insertStmt);
label = context.getTxnEntry().getLabel();
txnId = context.getTxnEntry().getTxnConf().getTxnId();
} else {
label = insertStmt.getLabel();
LOG.info("Do insert [{}] with query id: {}", label, DebugUtil.printId(context.queryId()));
try {
coord = new Coordinator(context, analyzer, planner);
coord.setLoadZeroTolerance(context.getSessionVariable().getEnableInsertStrict());
coord.setQueryType(TQueryType.LOAD);
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(), coord);
coord.exec();
boolean notTimeout = coord.join(context.getSessionVariable().getQueryTimeoutS());
if (!coord.isDone()) {
coord.cancel();
if (notTimeout) {
errMsg = coord.getExecStatus().getErrorMsg();
ErrorReport.reportDdlException("There exists unhealthy backend. "
+ errMsg, ErrorCode.ERR_FAILED_WHEN_INSERT);
} else {
ErrorReport.reportDdlException(ErrorCode.ERR_EXECUTE_TIMEOUT);
}
}
if (!coord.getExecStatus().ok()) {
errMsg = coord.getExecStatus().getErrorMsg();
LOG.warn("insert failed: {}", errMsg);
ErrorReport.reportDdlException(errMsg, ErrorCode.ERR_FAILED_WHEN_INSERT);
}
LOG.debug("delta files is {}", coord.getDeltaUrls());
if (coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL) != null) {
loadedRows = Long.parseLong(coord.getLoadCounters().get(LoadEtlTask.DPP_NORMAL_ALL));
}
if (coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL) != null) {
filteredRows = Integer.parseInt(coord.getLoadCounters().get(LoadEtlTask.DPP_ABNORMAL_ALL));
}
if (context.getSessionVariable().getEnableInsertStrict()) {
if (filteredRows > 0) {
context.getState().setError(ErrorCode.ERR_FAILED_WHEN_INSERT,
"Insert has filtered data in strict mode, tracking_url=" + coord.getTrackingUrl());
return;
}
}
if (tblType != TableType.OLAP && tblType != TableType.MATERIALIZED_VIEW) {
context.getState().setOk(loadedRows, filteredRows, null);
return;
}
if (Env.getCurrentGlobalTransactionMgr().commitAndPublishTransaction(
insertStmt.getDbObj(), Lists.newArrayList(insertStmt.getTargetTable()),
insertStmt.getTransactionId(),
TabletCommitInfo.fromThrift(coord.getCommitInfos()),
context.getSessionVariable().getInsertVisibleTimeoutMs())) {
txnStatus = TransactionStatus.VISIBLE;
} else {
txnStatus = TransactionStatus.COMMITTED;
}
} catch (Throwable t) {
LOG.warn("handle insert stmt fail: {}", label, t);
try {
Env.getCurrentGlobalTransactionMgr().abortTransaction(
insertStmt.getDbObj().getId(), insertStmt.getTransactionId(),
t.getMessage() == null ? "unknown reason" : t.getMessage());
} catch (Exception abortTxnException) {
LOG.warn("errors when abort txn", abortTxnException);
}
if (!Config.using_old_load_usage_pattern) {
StringBuilder sb = new StringBuilder(t.getMessage());
if (!Strings.isNullOrEmpty(coord.getTrackingUrl())) {
sb.append(". url: " + coord.getTrackingUrl());
}
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, sb.toString());
return;
}
/*
* If config 'using_old_load_usage_pattern' is true.
* Doris will return a label to user, and user can use this label to check load job's status,
* which exactly like the old insert stmt usage pattern.
*/
throwable = t;
} finally {
endProfile(true);
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
txnId = insertStmt.getTransactionId();
try {
context.getEnv().getLoadManager()
.recordFinishedLoadJob(label, txnId, insertStmt.getDb(), insertStmt.getTargetTable().getId(),
EtlJobType.INSERT, createTime, throwable == null ? "" : throwable.getMessage(),
coord.getTrackingUrl());
} catch (MetaNotFoundException e) {
LOG.warn("Record info of insert load with error {}", e.getMessage(), e);
errMsg = "Record info of insert load with error " + e.getMessage();
}
}
StringBuilder sb = new StringBuilder();
sb.append("{'label':'").append(label).append("', 'status':'").append(txnStatus.name());
sb.append("', 'txnId':'").append(txnId).append("'");
if (tblType == TableType.MATERIALIZED_VIEW) {
sb.append("', 'rows':'").append(loadedRows).append("'");
}
if (!Strings.isNullOrEmpty(errMsg)) {
sb.append(", 'err':'").append(errMsg).append("'");
}
sb.append("}");
context.getState().setOk(loadedRows, filteredRows, sb.toString());
context.setOrUpdateInsertResult(txnId, label, insertStmt.getDb(), insertStmt.getTbl(),
txnStatus, loadedRows, filteredRows);
context.updateReturnRows((int) loadedRows);
}
private void handleUnsupportedStmt() {
context.getMysqlChannel().reset();
context.getState().setOk();
}
private void handleSwitchStmt() throws AnalysisException {
SwitchStmt switchStmt = (SwitchStmt) parsedStmt;
try {
context.getEnv().changeCatalog(context, switchStmt.getCatalogName());
} catch (DdlException e) {
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void handlePrepareStmt() throws Exception {
LOG.debug("add prepared statement {}, isBinaryProtocol {}",
prepareStmt.getName(), prepareStmt.isBinaryProtocol());
context.addPreparedStmt(prepareStmt.getName(),
new PrepareStmtContext(prepareStmt,
context, planner, analyzer, prepareStmt.getName()));
if (prepareStmt.isBinaryProtocol()) {
sendStmtPrepareOK();
}
context.getState().setOk();
}
private void handleUseStmt() throws AnalysisException {
UseStmt useStmt = (UseStmt) parsedStmt;
try {
if (Strings.isNullOrEmpty(useStmt.getClusterName())) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_CLUSTER_NO_SELECT_CLUSTER);
}
if (useStmt.getCatalogName() != null) {
context.getEnv().changeCatalog(context, useStmt.getCatalogName());
}
context.getEnv().changeDb(context, useStmt.getDatabase());
} catch (DdlException e) {
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void sendMetaData(ResultSetMetaData metaData) throws IOException {
serializer.reset();
serializer.writeVInt(metaData.getColumnCount());
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
for (Column col : metaData.getColumns()) {
serializer.reset();
serializer.writeField(col.getName(), col.getType().getPrimitiveType());
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
serializer.reset();
MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState());
eofPacket.writeTo(serializer);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
private void sendStmtPrepareOK() throws IOException {
serializer.reset();
serializer.writeInt1(0);
serializer.writeInt4(Integer.valueOf(prepareStmt.getName()));
int numColumns = 0;
serializer.writeInt2(numColumns);
int numParams = prepareStmt.getColLabelsOfPlaceHolders().size();
serializer.writeInt2(numParams);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
if (numParams > 0) {
sendFields(prepareStmt.getColLabelsOfPlaceHolders(),
exprToType(prepareStmt.getSlotRefOfPlaceHolders()));
}
context.getState().setOk();
}
private void sendFields(List<String> colNames, List<PrimitiveType> types) throws IOException {
serializer.reset();
serializer.writeVInt(colNames.size());
LOG.debug("sendFields {}", colNames.size());
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
for (int i = 0; i < colNames.size(); ++i) {
serializer.reset();
serializer.writeField(colNames.get(i), types.get(i));
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
serializer.reset();
MysqlEofPacket eofPacket = new MysqlEofPacket(context.getState());
eofPacket.writeTo(serializer);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
public void sendResultSet(ResultSet resultSet) throws IOException {
context.updateReturnRows(resultSet.getResultRows().size());
sendMetaData(resultSet.getMetaData());
for (List<String> row : resultSet.getResultRows()) {
serializer.reset();
for (String item : row) {
if (item == null || item.equals(FeConstants.null_string)) {
serializer.writeNull();
} else {
serializer.writeLenEncodedString(item);
}
}
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
context.getState().setEof();
}
private void handleShow() throws IOException, AnalysisException, DdlException {
ShowExecutor executor = new ShowExecutor(context, (ShowStmt) parsedStmt);
ShowResultSet resultSet = executor.execute();
if (resultSet == null) {
return;
}
if (isProxy) {
proxyResultSet = resultSet;
return;
}
sendResultSet(resultSet);
}
private void handleUnlockTablesStmt() {
}
private void handleLockTablesStmt() {
}
private void handleExplainStmt(String result) throws IOException {
ShowResultSetMetaData metaData =
ShowResultSetMetaData.builder()
.addColumn(new Column("Explain String", ScalarType.createVarchar(20)))
.build();
sendMetaData(metaData);
for (String item : result.split("\n")) {
serializer.reset();
serializer.writeLenEncodedString(item);
context.getMysqlChannel().sendOnePacket(serializer.toByteBuffer());
}
context.getState().setEof();
}
private void handleDdlStmt() {
try {
DdlExecutor.execute(context.getEnv(), (DdlStmt) parsedStmt);
context.getState().setOk();
} catch (QueryStateException e) {
context.setState(e.getQueryState());
} catch (UserException e) {
LOG.debug("DDL statement({}) process failed.", originStmt.originStmt, e);
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
} catch (Exception e) {
LOG.warn("DDL statement(" + originStmt.originStmt + ") process failed.", e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
}
private void handleEnterStmt() {
final EnterStmt enterStmt = (EnterStmt) parsedStmt;
try {
context.getEnv().changeCluster(context, enterStmt.getClusterName());
context.setDatabase("");
} catch (DdlException e) {
context.getState().setError(e.getMysqlErrorCode(), e.getMessage());
return;
}
context.getState().setOk();
}
private void handleExportStmt() throws Exception {
ExportStmt exportStmt = (ExportStmt) parsedStmt;
context.getEnv().getExportMgr().addExportJob(exportStmt);
}
private void handleCtasStmt() {
CreateTableAsSelectStmt ctasStmt = (CreateTableAsSelectStmt) this.parsedStmt;
try {
DdlExecutor.execute(context.getEnv(), ctasStmt);
context.getState().setOk();
} catch (Exception e) {
LOG.warn("CTAS create table error, stmt={}", originStmt.originStmt, e);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR, "Unexpected exception: " + e.getMessage());
}
if (MysqlStateType.OK.equals(context.getState().getStateType())) {
try {
parsedStmt = ctasStmt.getInsertStmt();
execute();
} catch (Exception e) {
LOG.warn("CTAS insert data error, stmt={}", ctasStmt.toSql(), e);
DropTableStmt dropTableStmt = new DropTableStmt(true, ctasStmt.getCreateTableStmt().getDbTbl(), true);
try {
DdlExecutor.execute(context.getEnv(), dropTableStmt);
} catch (Exception ex) {
LOG.warn("CTAS drop table error, stmt={}", parsedStmt.toSql(), ex);
context.getState().setError(ErrorCode.ERR_UNKNOWN_ERROR,
"Unexpected exception: " + ex.getMessage());
}
}
}
}
public Data.PQueryStatistics getQueryStatisticsForAuditLog() {
if (statisticsForAuditLog == null) {
statisticsForAuditLog = Data.PQueryStatistics.newBuilder();
}
if (!statisticsForAuditLog.hasScanBytes()) {
statisticsForAuditLog.setScanBytes(0L);
}
if (!statisticsForAuditLog.hasScanRows()) {
statisticsForAuditLog.setScanRows(0L);
}
if (!statisticsForAuditLog.hasReturnedRows()) {
statisticsForAuditLog.setReturnedRows(0L);
}
if (!statisticsForAuditLog.hasCpuMs()) {
statisticsForAuditLog.setCpuMs(0L);
}
return statisticsForAuditLog.build();
}
private List<PrimitiveType> exprToType(List<Expr> exprs) {
return exprs.stream().map(e -> e.getType().getPrimitiveType()).collect(Collectors.toList());
}
private StatementBase setParsedStmt(StatementBase parsedStmt) {
this.parsedStmt = parsedStmt;
this.statementContext.setParsedStatement(parsedStmt);
return parsedStmt;
}
public List<ResultRow> executeInternalQuery() {
try {
List<ResultRow> resultRows = new ArrayList<>();
analyzer = new Analyzer(context.getEnv(), context);
try {
analyze(context.getSessionVariable().toThrift());
} catch (UserException e) {
LOG.warn("Internal SQL execution failed, SQL: {}", originStmt, e);
return resultRows;
}
planner.getFragments();
RowBatch batch;
coord = new Coordinator(context, analyzer, planner);
try {
QeProcessorImpl.INSTANCE.registerQuery(context.queryId(),
new QeProcessorImpl.QueryInfo(context, originStmt.originStmt, coord));
} catch (UserException e) {
LOG.warn(e.getMessage(), e);
}
coord.setProfileWriter(this);
Span queryScheduleSpan = context.getTracer()
.spanBuilder("internal SQL schedule").setParent(Context.current()).startSpan();
try (Scope scope = queryScheduleSpan.makeCurrent()) {
coord.exec();
} catch (Exception e) {
queryScheduleSpan.recordException(e);
LOG.warn("Unexpected exception when SQL running", e);
} finally {
queryScheduleSpan.end();
}
Span fetchResultSpan = context.getTracer().spanBuilder("fetch internal SQL result")
.setParent(Context.current()).startSpan();
try (Scope scope = fetchResultSpan.makeCurrent()) {
while (true) {
batch = coord.getNext();
if (batch == null || batch.isEos()) {
return resultRows;
} else {
resultRows.addAll(convertResultBatchToResultRows(batch.getBatch()));
}
}
} catch (Exception e) {
LOG.warn("Unexpected exception when SQL running", e);
fetchResultSpan.recordException(e);
return resultRows;
} finally {
fetchResultSpan.end();
}
} finally {
QeProcessorImpl.INSTANCE.unregisterQuery(context.queryId());
}
}
private List<ResultRow> convertResultBatchToResultRows(TResultBatch batch) {
List<String> columns = parsedStmt.getColLabels();
List<PrimitiveType> types = parsedStmt.getResultExprs().stream()
.map(e -> e.getType().getPrimitiveType())
.collect(Collectors.toList());
List<ResultRow> resultRows = new ArrayList<>();
List<ByteBuffer> rows = batch.getRows();
for (ByteBuffer buffer : rows) {
List<String> values = Lists.newArrayList();
InternalQueryBuffer queryBuffer = new InternalQueryBuffer(buffer.slice());
for (int i = 0; i < columns.size(); i++) {
String value = queryBuffer.readStringWithLength();
values.add(value);
}
ResultRow resultRow = new ResultRow(columns, types, values);
resultRows.add(resultRow);
}
return resultRows;
}
} |
I wonder if you shouldn't just rely exclusively on `agroalSupport.entries` and get rid of `support.getConfiguredNames()` altogether. `support.getConfiguredNames()` is sometimes returning non-agroal datasource names (which you're working around here), and I've noticed before (#37779) that it may not even return the name of all configured Agroal datasources: https://github.com/quarkusio/quarkus/blob/5bbae6fc32ca65188b91d0f86db987fde727332f/extensions/datasource/runtime/src/main/java/io/quarkus/datasource/runtime/DataSourceRecorder.java#L21-L29 | protected void init() {
if (!dataSources.isResolvable()) {
return;
}
DataSourceSupport support = Arc.container().instance(DataSourceSupport.class)
.get();
AgroalDataSourceSupport agroalSupport = Arc.container().instance(AgroalDataSourceSupport.class)
.get();
Set<String> names = support.getConfiguredNames();
Set<String> excludedNames = support.getInactiveOrHealthCheckExcludedNames();
for (String name : names) {
if (excludedNames.contains(name) || !agroalSupport.entries.containsKey(name)) {
continue;
}
DataSource ds = dataSources.get().getDataSource(name);
if (ds != null) {
checkedDataSources.put(name, ds);
}
}
} | Set<String> names = support.getConfiguredNames(); | protected void init() {
if (!dataSources.isResolvable()) {
return;
}
DataSourceSupport support = Arc.container().instance(DataSourceSupport.class)
.get();
AgroalDataSourceSupport agroalSupport = Arc.container().instance(AgroalDataSourceSupport.class)
.get();
Set<String> excludedNames = support.getInactiveOrHealthCheckExcludedNames();
for (String name : agroalSupport.entries.keySet()) {
if (excludedNames.contains(name)) {
continue;
}
DataSource ds = dataSources.get().getDataSource(name);
if (ds != null) {
checkedDataSources.put(name, ds);
}
}
} | class DataSourceHealthCheck implements HealthCheck {
@Inject
Instance<DataSources> dataSources;
private final Map<String, DataSource> checkedDataSources = new HashMap<>();
@PostConstruct
@Override
public HealthCheckResponse call() {
HealthCheckResponseBuilder builder = HealthCheckResponse.named("Database connections health check").up();
for (Map.Entry<String, DataSource> dataSource : checkedDataSources.entrySet()) {
boolean isDefault = DataSourceUtil.isDefault(dataSource.getKey());
AgroalDataSource ads = (AgroalDataSource) dataSource.getValue();
String dsName = dataSource.getKey();
try {
boolean valid = ads.isHealthy(false);
if (!valid) {
String data = isDefault ? "validation check failed for the default DataSource"
: "validation check failed for DataSource '" + dataSource.getKey() + "'";
builder.down().withData(dsName, data);
} else {
builder.withData(dsName, "UP");
}
} catch (SQLException e) {
String data = isDefault ? "Unable to execute the validation check for the default DataSource: "
: "Unable to execute the validation check for DataSource '" + dataSource.getKey() + "': ";
builder.down().withData(dsName, data + e.getMessage());
}
}
return builder.build();
}
} | class DataSourceHealthCheck implements HealthCheck {
@Inject
Instance<DataSources> dataSources;
private final Map<String, DataSource> checkedDataSources = new HashMap<>();
@PostConstruct
@Override
public HealthCheckResponse call() {
HealthCheckResponseBuilder builder = HealthCheckResponse.named("Database connections health check").up();
for (Map.Entry<String, DataSource> dataSource : checkedDataSources.entrySet()) {
boolean isDefault = DataSourceUtil.isDefault(dataSource.getKey());
AgroalDataSource ads = (AgroalDataSource) dataSource.getValue();
String dsName = dataSource.getKey();
try {
boolean valid = ads.isHealthy(false);
if (!valid) {
String data = isDefault ? "validation check failed for the default DataSource"
: "validation check failed for DataSource '" + dataSource.getKey() + "'";
builder.down().withData(dsName, data);
} else {
builder.withData(dsName, "UP");
}
} catch (SQLException e) {
String data = isDefault ? "Unable to execute the validation check for the default DataSource: "
: "Unable to execute the validation check for DataSource '" + dataSource.getKey() + "': ";
builder.down().withData(dsName, data + e.getMessage());
}
}
return builder.build();
}
} |
@heyams I pushed this to show an option that avoids relying on the (confusing to me at least) `additionalProperties` (here's the full commit I pushed: https://github.com/Azure/azure-sdk-for-java/pull/41106/commits/75f6fe2845fb0e22d7a73afdf1d3beedc3723f2c) | private static void validateSpan(TelemetryItem telemetryItem) throws IOException {
assertThat(telemetryItem.getName()).isEqualTo("RemoteDependency");
assertThat(telemetryItem.getInstrumentationKey()).isEqualTo(INSTRUMENTATION_KEY);
assertThat(telemetryItem.getTags()).containsEntry("ai.cloud.role", "unknown_service:java");
assertThat(telemetryItem.getTags())
.hasEntrySatisfying("ai.internal.sdkVersion", v -> assertThat(v).contains("otel"));
assertThat(telemetryItem.getData().getBaseType()).isEqualTo("RemoteDependencyData");
RemoteDependencyData actualData = toRemoteDependencyData(telemetryItem.getData().getBaseData());
assertThat(actualData.getName()).isEqualTo("test");
assertThat(actualData.getProperties())
.containsExactly(entry("color", "red"), entry("name", "apple"));
} | RemoteDependencyData actualData = toRemoteDependencyData(telemetryItem.getData().getBaseData()); | private static void validateSpan(TelemetryItem telemetryItem) {
assertThat(telemetryItem.getName()).isEqualTo("RemoteDependency");
assertThat(telemetryItem.getInstrumentationKey()).isEqualTo(INSTRUMENTATION_KEY);
assertThat(telemetryItem.getTags()).containsEntry("ai.cloud.role", "unknown_service:java");
assertThat(telemetryItem.getTags())
.hasEntrySatisfying("ai.internal.sdkVersion", v -> assertThat(v).contains("otel"));
assertThat(telemetryItem.getData().getBaseType()).isEqualTo("RemoteDependencyData");
RemoteDependencyData actualData = toRemoteDependencyData(telemetryItem.getData().getBaseData());
assertThat(actualData.getName()).isEqualTo("test");
assertThat(actualData.getProperties()).containsExactly(entry("color", "red"), entry("name", "apple"));
} | class AzureMonitorExportersEndToEndTest extends MonitorExporterClientTestBase {
private static final String CONNECTION_STRING_ENV =
"InstrumentationKey=00000000-0000-0000-0000-0FEEDDADBEEF;"
+ "IngestionEndpoint=https:
+ "LiveEndpoint=https:
private static final String INSTRUMENTATION_KEY = "00000000-0000-0000-0000-000000000000";
@Test
public void testBuildTraceExporter() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(10);
CustomValidationPolicy customValidationPolicy = new CustomValidationPolicy(countDownLatch);
HttpPipeline httpPipeline = getHttpPipeline(customValidationPolicy);
OpenTelemetry openTelemetry =
TestUtils.createOpenTelemetrySdk(httpPipeline, getConfiguration());
for (int i = 0; i < 10; i++) {
generateSpan(openTelemetry);
}
countDownLatch.await(10, SECONDS);
assertThat(customValidationPolicy.getUrl())
.isEqualTo(new URL("https:
assertThat(customValidationPolicy.getActualTelemetryItems().size()).isEqualTo(10);
TelemetryItem spanTelemetryItem =
customValidationPolicy.getActualTelemetryItems().stream()
.filter(item -> item.getName().equals("RemoteDependency"))
.findFirst()
.get();
validateSpan(spanTelemetryItem);
}
@Test
public void testBuildMetricExporter() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
CustomValidationPolicy customValidationPolicy = new CustomValidationPolicy(countDownLatch);
OpenTelemetrySdk openTelemetry =
TestUtils.createOpenTelemetrySdk(
getHttpPipeline(customValidationPolicy), getConfiguration());
generateMetric(openTelemetry);
openTelemetry.close();
countDownLatch.await(10, SECONDS);
assertThat(customValidationPolicy.getUrl())
.isEqualTo(new URL("https:
assertThat(customValidationPolicy.getActualTelemetryItems().size()).isEqualTo(1);
validateMetric(customValidationPolicy.getActualTelemetryItems().get(0));
}
@Test
public void testBuildLogExporter() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
CustomValidationPolicy customValidationPolicy = new CustomValidationPolicy(countDownLatch);
OpenTelemetry openTelemetry =
TestUtils.createOpenTelemetrySdk(
getHttpPipeline(customValidationPolicy), getConfiguration());
generateLog(openTelemetry);
countDownLatch.await(10, SECONDS);
assertThat(customValidationPolicy.getUrl())
.isEqualTo(new URL("https:
assertThat(customValidationPolicy.getActualTelemetryItems().size()).isEqualTo(1);
TelemetryItem logTelemetryItem =
customValidationPolicy.getActualTelemetryItems().stream()
.filter(item -> item.getName().equals("Message"))
.findFirst()
.get();
validateLog(logTelemetryItem);
}
@Test
public void testBuildTraceMetricLogExportersConsecutively() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(3);
CustomValidationPolicy customValidationPolicy = new CustomValidationPolicy(countDownLatch);
OpenTelemetrySdk openTelemetry =
TestUtils.createOpenTelemetrySdk(
getHttpPipeline(customValidationPolicy), getConfiguration());
generateSpan(openTelemetry);
generateMetric(openTelemetry);
generateLog(openTelemetry);
openTelemetry.close();
countDownLatch.await(10, SECONDS);
assertThat(customValidationPolicy.getUrl())
.isEqualTo(new URL("https:
assertThat(customValidationPolicy.getActualTelemetryItems().size()).isEqualTo(3);
TelemetryItem spanTelemetryItem =
customValidationPolicy.getActualTelemetryItems().stream()
.filter(item -> item.getName().equals("RemoteDependency"))
.findFirst()
.get();
TelemetryItem metricTelemetryItem =
customValidationPolicy.getActualTelemetryItems().stream()
.filter(item -> item.getName().equals("Metric"))
.findFirst()
.get();
TelemetryItem logTelemetryItem =
customValidationPolicy.getActualTelemetryItems().stream()
.filter(item -> item.getName().equals("Message"))
.findFirst()
.get();
validateSpan(spanTelemetryItem);
validateMetric(metricTelemetryItem);
validateLog(logTelemetryItem);
}
@SuppressWarnings("try")
private static void generateSpan(OpenTelemetry openTelemetry) {
Tracer tracer = openTelemetry.getTracer("Sample");
Span span = tracer.spanBuilder("test").startSpan();
try (Scope ignored = span.makeCurrent()) {
span.setAttribute("name", "apple");
span.setAttribute("color", "red");
} finally {
span.end();
}
}
private static void generateMetric(OpenTelemetry openTelemetry) {
Meter meter = openTelemetry.getMeter("Sample");
LongCounter counter = meter.counterBuilder("test").build();
counter.add(
1L,
Attributes.of(
AttributeKey.stringKey("name"), "apple", AttributeKey.stringKey("color"), "red"));
}
private static void generateLog(OpenTelemetry openTelemetry) {
Logger logger = openTelemetry.getLogsBridge().get("Sample");
logger
.logRecordBuilder()
.setBody("test body")
.setAttribute(AttributeKey.stringKey("name"), "apple")
.setAttribute(AttributeKey.stringKey("color"), "red")
.emit();
}
@SuppressWarnings("unchecked")
private static void validateMetric(TelemetryItem telemetryItem) {
assertThat(telemetryItem.getInstrumentationKey()).isEqualTo(INSTRUMENTATION_KEY);
assertThat(telemetryItem.getTags()).containsEntry("ai.cloud.role", "unknown_service:java");
assertThat(telemetryItem.getTags())
.hasEntrySatisfying("ai.internal.sdkVersion", v -> assertThat(v).contains("otel"));
assertThat(telemetryItem.getData().getBaseType()).isEqualTo("MetricData");
Map<String, Object> metricData = ((List<Map<String, Object>>) telemetryItem.getData().getBaseData().getAdditionalProperties().get("metrics")).get(0);
assertThat((Double) metricData.get("value")).isEqualTo(1.0);
assertThat((String) metricData.get("name")).isEqualTo("test");
assertThat((Map<String, String>) telemetryItem.getData().getBaseData().getAdditionalProperties().get("properties"))
.containsExactly(entry("color", "red"), entry("name", "apple"));
}
@SuppressWarnings("unchecked")
private static void validateLog(TelemetryItem telemetryItem) {
assertThat(telemetryItem.getName()).isEqualTo("Message");
assertThat(telemetryItem.getInstrumentationKey()).isEqualTo(INSTRUMENTATION_KEY);
assertThat(telemetryItem.getTags()).containsEntry("ai.cloud.role", "unknown_service:java");
assertThat(telemetryItem.getTags())
.hasEntrySatisfying("ai.internal.sdkVersion", v -> assertThat(v).contains("otel"));
assertThat(telemetryItem.getData().getBaseType()).isEqualTo("MessageData");
Map<String, Object> messageProperties = telemetryItem.getData().getBaseData().getAdditionalProperties();
assertThat(messageProperties.get("message")).isEqualTo("test body");
assertThat((Map<String, String>) messageProperties.get("properties"))
.containsOnly(
entry("LoggerName", "Sample"),
entry("SourceType", "Logger"),
entry("color", "red"),
entry("name", "apple"));
}
private static Map<String, String> getConfiguration() {
return Collections.singletonMap("APPLICATIONINSIGHTS_CONNECTION_STRING", CONNECTION_STRING_ENV);
}
private static RemoteDependencyData toRemoteDependencyData(MonitorDomain baseData) throws IOException {
return RemoteDependencyData.fromJson(JsonProviders.createReader(baseData.toJsonString()));
}
} | class AzureMonitorExportersEndToEndTest extends MonitorExporterClientTestBase {
private static final String CONNECTION_STRING_ENV =
"InstrumentationKey=00000000-0000-0000-0000-0FEEDDADBEEF;"
+ "IngestionEndpoint=https:
+ "LiveEndpoint=https:
private static final String INSTRUMENTATION_KEY = "00000000-0000-0000-0000-000000000000";
@Test
public void testBuildTraceExporter() throws Exception {
final int numberOfSpans = 10;
CountDownLatch countDownLatch = new CountDownLatch(numberOfSpans);
CustomValidationPolicy customValidationPolicy = new CustomValidationPolicy(countDownLatch);
HttpPipeline httpPipeline = getHttpPipeline(customValidationPolicy);
OpenTelemetry openTelemetry =
TestUtils.createOpenTelemetrySdk(httpPipeline, getConfiguration());
for (int i = 0; i < numberOfSpans; i++) {
generateSpan(openTelemetry);
}
countDownLatch.await(numberOfSpans, SECONDS);
Thread.sleep(1000);
assertThat(customValidationPolicy.getUrl())
.isEqualTo(new URL("https:
assertThat(customValidationPolicy.getActualTelemetryItems().size()).isEqualTo(numberOfSpans);
TelemetryItem spanTelemetryItem =
customValidationPolicy.getActualTelemetryItems().stream()
.filter(item -> item.getName().equals("RemoteDependency"))
.findFirst()
.get();
validateSpan(spanTelemetryItem);
}
@Test
public void testBuildMetricExporter() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
CustomValidationPolicy customValidationPolicy = new CustomValidationPolicy(countDownLatch);
OpenTelemetrySdk openTelemetry =
TestUtils.createOpenTelemetrySdk(
getHttpPipeline(customValidationPolicy), getConfiguration());
generateMetric(openTelemetry);
openTelemetry.close();
countDownLatch.await(10, SECONDS);
assertThat(customValidationPolicy.getUrl())
.isEqualTo(new URL("https:
assertThat(customValidationPolicy.getActualTelemetryItems().size()).isEqualTo(1);
validateMetric(customValidationPolicy.getActualTelemetryItems().get(0));
}
@Test
public void testBuildLogExporter() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
CustomValidationPolicy customValidationPolicy = new CustomValidationPolicy(countDownLatch);
OpenTelemetry openTelemetry =
TestUtils.createOpenTelemetrySdk(
getHttpPipeline(customValidationPolicy), getConfiguration());
generateLog(openTelemetry);
countDownLatch.await(10, SECONDS);
assertThat(customValidationPolicy.getUrl())
.isEqualTo(new URL("https:
assertThat(customValidationPolicy.getActualTelemetryItems().size()).isEqualTo(1);
TelemetryItem logTelemetryItem =
customValidationPolicy.getActualTelemetryItems().stream()
.filter(item -> item.getName().equals("Message"))
.findFirst()
.get();
validateLog(logTelemetryItem);
}
@Test
public void testBuildTraceMetricLogExportersConsecutively() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(3);
CustomValidationPolicy customValidationPolicy = new CustomValidationPolicy(countDownLatch);
OpenTelemetrySdk openTelemetry =
TestUtils.createOpenTelemetrySdk(
getHttpPipeline(customValidationPolicy), getConfiguration());
generateSpan(openTelemetry);
generateMetric(openTelemetry);
generateLog(openTelemetry);
openTelemetry.close();
countDownLatch.await(10, SECONDS);
assertThat(customValidationPolicy.getUrl())
.isEqualTo(new URL("https:
assertThat(customValidationPolicy.getActualTelemetryItems().size()).isEqualTo(3);
TelemetryItem spanTelemetryItem =
customValidationPolicy.getActualTelemetryItems().stream()
.filter(item -> item.getName().equals("RemoteDependency"))
.findFirst()
.get();
TelemetryItem metricTelemetryItem =
customValidationPolicy.getActualTelemetryItems().stream()
.filter(item -> item.getName().equals("Metric"))
.findFirst()
.get();
TelemetryItem logTelemetryItem =
customValidationPolicy.getActualTelemetryItems().stream()
.filter(item -> item.getName().equals("Message"))
.findFirst()
.get();
validateSpan(spanTelemetryItem);
validateMetric(metricTelemetryItem);
validateLog(logTelemetryItem);
}
@SuppressWarnings("try")
private static void generateSpan(OpenTelemetry openTelemetry) {
Tracer tracer = openTelemetry.getTracer("Sample");
Span span = tracer.spanBuilder("test").startSpan();
try (Scope ignored = span.makeCurrent()) {
span.setAttribute("name", "apple");
span.setAttribute("color", "red");
} finally {
span.end();
}
}
private static void generateMetric(OpenTelemetry openTelemetry) {
Meter meter = openTelemetry.getMeter("Sample");
LongCounter counter = meter.counterBuilder("test").build();
counter.add(
1L,
Attributes.of(
AttributeKey.stringKey("name"), "apple", AttributeKey.stringKey("color"), "red"));
}
private static void generateLog(OpenTelemetry openTelemetry) {
Logger logger = openTelemetry.getLogsBridge().get("Sample");
logger
.logRecordBuilder()
.setBody("test body")
.setAttribute(AttributeKey.stringKey("name"), "apple")
.setAttribute(AttributeKey.stringKey("color"), "red")
.emit();
}
private static void validateMetric(TelemetryItem telemetryItem) {
assertThat(telemetryItem.getInstrumentationKey()).isEqualTo(INSTRUMENTATION_KEY);
assertThat(telemetryItem.getTags()).containsEntry("ai.cloud.role", "unknown_service:java");
assertThat(telemetryItem.getTags())
.hasEntrySatisfying("ai.internal.sdkVersion", v -> assertThat(v).contains("otel"));
assertThat(telemetryItem.getData().getBaseType()).isEqualTo("MetricData");
MetricsData metricsData = toMetricsData(telemetryItem.getData().getBaseData());
assertThat(metricsData.getMetrics().size()).isEqualTo(1);
assertThat(metricsData.getMetrics().get(0).getName()).isEqualTo("test");
assertThat(metricsData.getMetrics().get(0).getValue()).isEqualTo(1.0);
assertThat(metricsData.getProperties()).containsExactly(entry("color", "red"), entry("name", "apple"));
}
private static void validateLog(TelemetryItem telemetryItem) {
assertThat(telemetryItem.getName()).isEqualTo("Message");
assertThat(telemetryItem.getInstrumentationKey()).isEqualTo(INSTRUMENTATION_KEY);
assertThat(telemetryItem.getTags()).containsEntry("ai.cloud.role", "unknown_service:java");
assertThat(telemetryItem.getTags())
.hasEntrySatisfying("ai.internal.sdkVersion", v -> assertThat(v).contains("otel"));
assertThat(telemetryItem.getData().getBaseType()).isEqualTo("MessageData");
MessageData messageData = toMessageData(telemetryItem.getData().getBaseData());
assertThat(messageData.getMessage()).isEqualTo("test body");
assertThat(messageData.getProperties())
.containsOnly(
entry("LoggerName", "Sample"),
entry("SourceType", "Logger"),
entry("color", "red"),
entry("name", "apple"));
}
private static Map<String, String> getConfiguration() {
return Collections.singletonMap("APPLICATIONINSIGHTS_CONNECTION_STRING", CONNECTION_STRING_ENV);
}
} |
Since there is only one usage, no need to define a separate variable. ```suggestion switch (nextToken.kind) { ``` | private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {
switch (peek(lookahead + 1).kind) {
case IDENTIFIER_TOKEN:
SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;
switch (tokenAfterIdentifier) {
case ON_KEYWORD:
case OPEN_BRACE_TOKEN:
return true;
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
case QUESTION_MARK_TOKEN:
return false;
default:
return false;
}
case ON_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse listener declaration, given the qualifier.
* <p>
* <code>
* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;
* </code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.LISTENER_DECL);
STNode listenerKeyword = parseListenerKeyword();
if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode listenerDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);
endContext();
return listenerDecl;
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse listener keyword.
*
* @return Parsed node
*/
private STNode parseListenerKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LISTENER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LISTENER_KEYWORD);
return parseListenerKeyword();
}
}
/**
* Parse constant declaration, given the qualifier.
* <p>
* <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.CONSTANT_DECL);
STNode constKeyword = parseConstantKeyword();
return parseConstDecl(metadata, qualifier, constKeyword);
}
/**
* Parse the components that follows after the const keyword of a constant declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case ANNOTATION_KEYWORD:
endContext();
return parseAnnotationDeclaration(metadata, qualifier, constKeyword);
case IDENTIFIER_TOKEN:
STNode constantDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false);
endContext();
return constantDecl;
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.CONST_DECL_TYPE);
return parseConstDecl(metadata, qualifier, constKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
boolean isListener) {
STNode varNameOrTypeName = parseStatementStartIdentifier();
return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);
}
/**
* Parse the component that follows the first identifier in a const decl. The identifier
* can be either the type-name (a user defined type) or the var-name there the type-name
* is not present.
*
* @param qualifier Qualifier that precedes the constant decl
* @param keyword Keyword
* @param typeOrVarName Identifier that follows the const-keywoord
* @return Parsed node
*/
private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,
STNode typeOrVarName, boolean isListener) {
if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode type = typeOrVarName;
STNode variableName = parseVariableName();
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
STNode type;
STNode variableName;
switch (peek().kind) {
case IDENTIFIER_TOKEN:
type = typeOrVarName;
variableName = parseVariableName();
break;
case EQUAL_TOKEN:
variableName = ((STSimpleNameReferenceNode) typeOrVarName).name;
type = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.CONST_DECL_RHS);
return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener);
}
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,
STNode type, STNode variableName) {
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
if (isListener) {
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse const keyword.
*
* @return Parsed node
*/
private STNode parseConstantKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONST_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONST_KEYWORD);
return parseConstantKeyword();
}
}
/**
* Parse typeof expression.
* <p>
* <code>
* typeof-expr := typeof expression
* </code>
*
* @param isRhsExpr
* @return Typeof expression node
*/
private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode typeofKeyword = parseTypeofKeyword();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);
}
/**
* Parse typeof-keyword.
*
* @return Typeof-keyword node
*/
private STNode parseTypeofKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TYPEOF_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TYPEOF_KEYWORD);
return parseTypeofKeyword();
}
}
/**
* Parse optional type descriptor given the type.
* <p>
* <code>optional-type-descriptor := type-descriptor `?`</code>
* </p>
*
* @param typeDescriptorNode Preceding type descriptor
* @return Parsed node
*/
private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {
startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);
STNode questionMarkToken = parseQuestionMark();
endContext();
return createOptionalTypeDesc(typeDescriptorNode, questionMarkToken);
}
private STNode createOptionalTypeDesc(STNode typeDescNode, STNode questionMarkToken) {
if (typeDescNode.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDescNode;
STNode middleTypeDesc = createOptionalTypeDesc(unionTypeDesc.rightTypeDesc, questionMarkToken);
typeDescNode = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);
} else if (typeDescNode.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDescNode;
STNode middleTypeDesc = createOptionalTypeDesc(intersectionTypeDesc.rightTypeDesc, questionMarkToken);
typeDescNode = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,
intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);
} else {
typeDescNode = validateForUsageOfVar(typeDescNode);
typeDescNode = STNodeFactory.createOptionalTypeDescriptorNode(typeDescNode, questionMarkToken);
}
return typeDescNode;
}
/**
* Parse unary expression.
* <p>
* <code>
* unary-expr := + expression | - expression | ~ expression | ! expression
* </code>
*
* @param isRhsExpr
* @return Unary expression node
*/
private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode unaryOperator = parseUnaryOperator();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);
}
/**
* Parse unary operator.
* <code>UnaryOperator := + | - | ~ | !</code>
*
* @return Parsed node
*/
private STNode parseUnaryOperator() {
STToken token = peek();
if (isUnaryOperator(token.kind)) {
return consume();
} else {
recover(token, ParserRuleContext.UNARY_OPERATOR);
return parseUnaryOperator();
}
}
/**
* Check whether the given token kind is a unary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise
*/
private boolean isUnaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse array type descriptor.
* <p>
* <code>
* array-type-descriptor := array-member-type-descriptor [ [ array-length ] ]
* array-member-type-descriptor := type-descriptor
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* inferred-array-length := *
* </code>
* </p>
*
* @param memberTypeDesc
* @return Parsed Node
*/
private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode openBracketToken = parseOpenBracket();
STNode arrayLengthNode = parseArrayLength();
STNode closeBracketToken = parseCloseBracket();
endContext();
return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken);
}
private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode,
STNode closeBracketToken) {
memberTypeDesc = validateForUsageOfVar(memberTypeDesc);
if (arrayLengthNode != null) {
switch (arrayLengthNode.kind) {
case ASTERISK_LITERAL:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind;
if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken,
arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
arrayLengthNode = STNodeFactory.createEmptyNode();
}
}
List<STNode> arrayDimensions = new ArrayList();
if (memberTypeDesc.kind == SyntaxKind.ARRAY_TYPE_DESC) {
STArrayTypeDescriptorNode innerArrayType = (STArrayTypeDescriptorNode) memberTypeDesc;
STNode innerArrayDimensions = innerArrayType.dimensions;
int dimensionCount = innerArrayDimensions.bucketCount();
for (int i = 0; i < dimensionCount; i++) {
arrayDimensions.add(innerArrayDimensions.childInBucket(i));
}
memberTypeDesc = innerArrayType.memberTypeDesc;
}
STNode arrayDimension = STNodeFactory.createArrayDimensionNode(openBracketToken, arrayLengthNode,
closeBracketToken);
arrayDimensions.add(arrayDimension);
STNode arrayDimensionNodeList = STNodeFactory.createNodeList(arrayDimensions);
return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, arrayDimensionNodeList);
}
/**
* Parse array length.
* <p>
* <code>
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* constant-reference-expr := variable-reference-expr
* </code>
* </p>
*
* @return Parsed array length
*/
private STNode parseArrayLength() {
STToken token = peek();
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);
default:
recover(token, ParserRuleContext.ARRAY_LENGTH);
return parseArrayLength();
}
}
/**
* Parse annotations.
* <p>
* <i>Note: In the <a href="https:
* annotations-list is specified as one-or-more annotations. And the usage is marked as
* optional annotations-list. However, for the consistency of the tree, here we make the
* annotation-list as zero-or-more annotations, and the usage is not-optional.</i>
* <p>
* <code>annots := annotation*</code>
*
* @return Parsed node
*/
private STNode parseOptionalAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation list with at least one annotation.
*
* @return Annotation list
*/
private STNode parseAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
annotList.add(parseAnnotation());
while (peek().kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation attachment.
* <p>
* <code>annotation := @ annot-tag-reference annot-value</code>
*
* @return Parsed node
*/
private STNode parseAnnotation() {
STNode atToken = parseAtToken();
STNode annotReference;
if (isPredeclaredIdentifier(peek().kind)) {
annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);
} else {
annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
annotReference = STNodeFactory.createSimpleNameReferenceNode(annotReference);
}
STNode annotValue;
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
annotValue = parseMappingConstructorExpr();
} else {
annotValue = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);
}
/**
* Parse '@' token.
*
* @return Parsed node
*/
private STNode parseAtToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.AT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.AT);
return parseAtToken();
}
}
/**
* Parse metadata. Meta data consist of optional doc string and
* an annotations list.
* <p>
* <code>metadata := [DocumentationString] annots</code>
*
* @return Parse node
*/
private STNode parseMetaData() {
STNode docString;
STNode annotations;
switch (peek().kind) {
case DOCUMENTATION_STRING:
docString = parseMarkdownDocumentation();
annotations = parseOptionalAnnotations();
break;
case AT_TOKEN:
docString = STNodeFactory.createEmptyNode();
annotations = parseOptionalAnnotations();
break;
default:
return STNodeFactory.createEmptyNode();
}
return createMetadata(docString, annotations);
}
/**
* Create metadata node.
*
* @return A metadata node
*/
private STNode createMetadata(STNode docString, STNode annotations) {
if (annotations == null && docString == null) {
return STNodeFactory.createEmptyNode();
} else {
return STNodeFactory.createMetadataNode(docString, annotations);
}
}
/**
* Parse type test expression.
* <code>
* type-test-expr := expression (is | !is) type-descriptor
* </code>
*
* @param lhsExpr Preceding expression of the is expression
* @return Is expression node
*/
private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode isOrNotIsKeyword = parseIsOrNotIsKeyword();
STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr);
return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isOrNotIsKeyword, typeDescriptor);
}
/**
* Parse `is` keyword or `!is` keyword.
*
* @return is-keyword or not-is-keyword node
*/
private STNode parseIsOrNotIsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IS_KEYWORD ||
token.kind == SyntaxKind.NOT_IS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IS_KEYWORD);
return parseIsOrNotIsKeyword();
}
}
/**
* Parse local type definition statement statement.
* <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code>
*
* @return local type definition statement statement
*/
private STNode parseLocalTypeDefinitionStatement(STNode annots) {
startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);
STNode typeKeyword = parseTypeKeyword();
STNode typeName = parseTypeName();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,
semicolon);
}
/**
* Parse statement which is only consists of an action or expression.
*
* @param annots Annotations
* @return Statement node
*/
private STNode parseExpressionStatement(STNode annots) {
startContext(ParserRuleContext.EXPRESSION_STATEMENT);
STNode expression = parseActionOrExpressionInLhs(annots);
return getExpressionAsStatement(expression);
}
/**
* Parse statements that starts with an expression.
*
* @return Statement node
*/
private STNode parseStatementStartWithExpr(STNode annots) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode expr = parseActionOrExpressionInLhs(annots);
return parseStatementStartWithExprRhs(expr);
}
/**
* Parse the component followed by the expression, at the beginning of a statement.
*
* @param expression Action or expression in LHS
* @return Statement node
*/
private STNode parseStatementStartWithExprRhs(STNode expression) {
SyntaxKind nextTokenKind = peek().kind;
if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) {
return getExpressionAsStatement(expression);
}
switch (nextTokenKind) {
case EQUAL_TOKEN:
switchContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(expression);
case IDENTIFIER_TOKEN:
default:
if (isCompoundAssignment(nextTokenKind)) {
return parseCompoundAssignmentStmtRhs(expression);
}
ParserRuleContext context;
if (isPossibleExpressionStatement(expression)) {
context = ParserRuleContext.EXPR_STMT_RHS;
} else {
context = ParserRuleContext.STMT_START_WITH_EXPR_RHS;
}
recover(peek(), context);
return parseStatementStartWithExprRhs(expression);
}
}
private boolean isPossibleExpressionStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
case CHECK_EXPRESSION:
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
return true;
default:
return false;
}
}
private STNode getExpressionAsStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
return parseCallStatement(expression);
case CHECK_EXPRESSION:
return parseCheckStatement(expression);
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
case CLIENT_RESOURCE_ACCESS_ACTION:
return parseActionStatement(expression);
default:
STNode semicolon = parseSemicolon();
endContext();
expression = getExpression(expression);
STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,
expression, semicolon);
exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);
return exprStmt;
}
}
private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {
STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);
STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;
if (lengthExprs.isEmpty()) {
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),
indexedExpr.closeBracket);
}
STNode lengthExpr = lengthExprs.get(0);
switch (lengthExpr.kind) {
case SIMPLE_NAME_REFERENCE:
STSimpleNameReferenceNode nameRef = (STSimpleNameReferenceNode) lengthExpr;
if (nameRef.name.isMissing()) {
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),
indexedExpr.closeBracket);
}
break;
case ASTERISK_LITERAL:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind;
if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(
indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);
lengthExpr = STNodeFactory.createEmptyNode();
}
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket);
}
/**
* <p>
* Parse call statement, given the call expression.
* </p>
* <code>
* call-stmt := call-expr ;
* <br/>
* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr
* </code>
*
* @param expression Call expression associated with the call statement
* @return Call statement node
*/
private STNode parseCallStatement(STNode expression) {
return parseCallStatementOrCheckStatement(expression);
}
/**
* <p>
* Parse checking statement.
* </p>
* <code>
* checking-stmt := checking-expr ;
* <br/>
* checking-expr := checking-keyword expr ;
* </code>
*
* @param expression Checking expression associated with the checking statement
* @return Checking statement node
*/
private STNode parseCheckStatement(STNode expression) {
return parseCallStatementOrCheckStatement(expression);
}
private STNode parseCallStatementOrCheckStatement(STNode expression) {
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);
}
private STNode parseActionStatement(STNode action) {
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);
}
/**
* Parse client resource access action, given the starting expression.
* <br/><br/>
* <code>
* client-resource-access-action := expression "->" "/" [resource-access-path] ["." method-name] ["(" arg-list ")"]
* </code>
*
* @param expression Expression
* @param rightArrow Right arrow token
* @param slashToken Slash token
* @return Parsed node
*/
private STNode parseClientResourceAccessAction(STNode expression, STNode rightArrow, STNode slashToken,
boolean isRhsExpr, boolean isInMatchGuard) {
startContext(ParserRuleContext.CLIENT_RESOURCE_ACCESS_ACTION);
STNode resourceAccessPath = parseOptionalResourceAccessPath(isRhsExpr, isInMatchGuard);
STNode resourceAccessMethodDot = parseOptionalResourceAccessMethodDot(isRhsExpr, isInMatchGuard);
STNode resourceAccessMethodName = STNodeFactory.createEmptyNode();
if (resourceAccessMethodDot != null) {
resourceAccessMethodName = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());
}
STNode resourceMethodCallArgList = parseOptionalResourceAccessActionArgList(isRhsExpr, isInMatchGuard);
endContext();
return STNodeFactory.createClientResourceAccessActionNode(expression, rightArrow, slashToken,
resourceAccessPath, resourceAccessMethodDot, resourceAccessMethodName, resourceMethodCallArgList);
}
private STNode parseOptionalResourceAccessPath(boolean isRhsExpr, boolean isInMatchGuard) {
STNode resourceAccessPath = STNodeFactory.createEmptyNodeList();
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACKET_TOKEN:
resourceAccessPath = parseResourceAccessPath(isRhsExpr, isInMatchGuard);
break;
case DOT_TOKEN:
case OPEN_PAREN_TOKEN:
break;
default:
if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) {
break;
}
recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_PATH);
return parseOptionalResourceAccessPath(isRhsExpr, isInMatchGuard);
}
return resourceAccessPath;
}
private STNode parseOptionalResourceAccessMethodDot(boolean isRhsExpr, boolean isInMatchGuard) {
STNode dotToken = STNodeFactory.createEmptyNode();
STToken nextToken = peek();
switch (nextToken.kind) {
case DOT_TOKEN:
dotToken = consume();
break;
case OPEN_PAREN_TOKEN:
break;
default:
if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) {
break;
}
recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_METHOD);
return parseOptionalResourceAccessMethodDot(isRhsExpr, isInMatchGuard);
}
return dotToken;
}
private STNode parseOptionalResourceAccessActionArgList(boolean isRhsExpr, boolean isInMatchGuard) {
STNode argList = STNodeFactory.createEmptyNode();
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
argList = parseParenthesizedArgList();
break;
default:
if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) {
break;
}
recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_ACTION_ARG_LIST);
return parseOptionalResourceAccessActionArgList(isRhsExpr, isInMatchGuard);
}
return argList;
}
/**
* Parse resource access path.
* <br/><br/>
* <code>
* resource-access-path :=
* resource-access-segments ["/" resource-access-rest-segment]
* | resource-access-rest-segment
* <br/><br/>
* resource-access-segments := resource-access-segment ("/" resource-access-segment ")*
* <br/><br/>
* resource-access-segment := resource-path-segment-name | computed-resource-access-segment
* <br/><br/>
* resource-path-segment-name := identifier
* </code>
* @return
*/
private STNode parseResourceAccessPath(boolean isRhsExpr, boolean isInMatchGuard) {
List<STNode> pathSegmentList = new ArrayList<>();
STNode pathSegment = parseResourceAccessSegment();
pathSegmentList.add(pathSegment);
STNode leadingSlash;
STNode previousPathSegmentNode = pathSegment;
while (!isEndOfResourceAccessPathSegments(peek(), isRhsExpr, isInMatchGuard)) {
leadingSlash = parseResourceAccessSegmentRhs(isRhsExpr, isInMatchGuard);
if (leadingSlash == null) {
break;
}
pathSegment = parseResourceAccessSegment();
if (previousPathSegmentNode.kind == SyntaxKind.RESOURCE_ACCESS_REST_SEGMENT) {
updateLastNodeInListWithInvalidNode(pathSegmentList, leadingSlash, null);
updateLastNodeInListWithInvalidNode(pathSegmentList, pathSegment,
DiagnosticErrorCode.RESOURCE_ACCESS_SEGMENT_IS_NOT_ALLOWED_AFTER_REST_SEGMENT);
} else {
pathSegmentList.add(leadingSlash);
pathSegmentList.add(pathSegment);
previousPathSegmentNode = pathSegment;
}
}
return STNodeFactory.createNodeList(pathSegmentList);
}
private STNode parseResourceAccessSegment() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return consume();
case OPEN_BRACKET_TOKEN:
return parseComputedOrResourceAccessRestSegment(consume());
default:
recover(nextToken, ParserRuleContext.RESOURCE_ACCESS_PATH_SEGMENT);
return parseResourceAccessSegment();
}
}
/**
* Parse computed resource segment or resource access rest segment.
* <code>
* <br/>
* computed-resource-access-segment := "[" expression "]"
* <br/>
* resource-access-rest-segment := "[" "..." expression "]"
* </code>
* @param openBracket Open bracket token
* @return Parsed node
*/
private STNode parseComputedOrResourceAccessRestSegment(STNode openBracket) {
STToken nextToken = peek();
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
STNode ellipsisToken = consume();
STNode expression = parseExpression();
STNode closeBracketToken = parseCloseBracket();
return STNodeFactory.createResourceAccessRestSegmentNode(openBracket, ellipsisToken,
expression, closeBracketToken);
default:
if (isValidExprStart(nextToken.kind)) {
expression = parseExpression();
closeBracketToken = parseCloseBracket();
return STNodeFactory.createComputedResourceAccessSegmentNode(openBracket, expression,
closeBracketToken);
}
recover(nextToken, ParserRuleContext.COMPUTED_SEGMENT_OR_REST_SEGMENT);
return parseComputedOrResourceAccessRestSegment(openBracket);
}
}
/**
* Parse resource access segment end.
*
* @return Parsed node
*/
private STNode parseResourceAccessSegmentRhs(boolean isRhsExpr, boolean isInMatchGuard) {
STToken nextToken = peek();
switch (nextToken.kind) {
case SLASH_TOKEN:
return consume();
default:
if (isEndOfResourceAccessPathSegments(nextToken, isRhsExpr, isInMatchGuard)) {
return null;
}
recover(nextToken, ParserRuleContext.RESOURCE_ACCESS_SEGMENT_RHS);
return parseResourceAccessSegmentRhs(isRhsExpr, isInMatchGuard);
}
}
private boolean isEndOfResourceAccessPathSegments(STToken nextToken,
boolean isRhsExpr, boolean isInMatchGuard) {
SyntaxKind nextTokenKind = nextToken.kind;
switch (nextTokenKind) {
case DOT_TOKEN:
case OPEN_PAREN_TOKEN:
return true;
default:
return isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard);
}
}
private STNode parseRemoteMethodCallOrClientResourceAccessOrAsyncSendAction(STNode expression, boolean isRhsExpr,
boolean isInMatchGuard) {
STNode rightArrow = parseRightArrow();
return parseClientResourceAccessOrAsyncSendActionRhs(expression, rightArrow, isRhsExpr, isInMatchGuard);
}
private STNode parseClientResourceAccessOrAsyncSendActionRhs(STNode expression, STNode rightArrow,
boolean isRhsExpr, boolean isInMatchGuard) {
STNode name;
STToken nextToken = peek();
switch (nextToken.kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
return parseAsyncSendAction(expression, rightArrow, name);
case CONTINUE_KEYWORD:
case COMMIT_KEYWORD:
name = getKeywordAsSimpleNameRef();
break;
case SLASH_TOKEN:
STNode slashToken = consume();
return parseClientResourceAccessAction(expression, rightArrow, slashToken, isRhsExpr, isInMatchGuard);
default:
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
SyntaxKind nextNextNextTokenKind = getNextNextToken().kind;
if (nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN ||
isEndOfActionOrExpression(getNextNextToken(), isRhsExpr, isInMatchGuard) ||
nextToken.isMissing()) {
name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());
break;
}
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.REMOTE_OR_RESOURCE_CALL_OR_ASYNC_SEND_RHS);
if (solution.action == Action.KEEP) {
name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());
break;
}
return parseClientResourceAccessOrAsyncSendActionRhs(expression, rightArrow, isRhsExpr, isInMatchGuard);
}
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseRemoteMethodCallAction(expression, rightArrow, name);
case SEMICOLON_TOKEN:
case CLOSE_PAREN_TOKEN:
case OPEN_BRACE_TOKEN:
case COMMA_TOKEN:
case FROM_KEYWORD:
case JOIN_KEYWORD:
case ON_KEYWORD:
case LET_KEYWORD:
case WHERE_KEYWORD:
case ORDER_KEYWORD:
case LIMIT_KEYWORD:
case SELECT_KEYWORD:
return parseAsyncSendAction(expression, rightArrow, name);
default:
if (isGroupOrCollectKeyword(nextToken)) {
return parseAsyncSendAction(expression, rightArrow, name);
}
recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END);
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
}
private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {
return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);
}
/**
* Parse remote method call action.
* <p>
* <code>
* remote-method-call-action := expression -> method-name ( arg-list )
* <br/>
* async-send-action := expression -> peer-worker ;
* </code>
*
* @param expression LHS expression
* @param rightArrow right arrow token
* @param name remote method name
* @return
*/
private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {
STNode openParenToken = parseArgListOpenParenthesis();
STNode arguments = parseArgsList();
STNode closeParenToken = parseArgListCloseParenthesis();
return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,
closeParenToken);
}
/**
* Parse right arrow (<code>-></code>) token.
*
* @return Parsed node
*/
private STNode parseRightArrow() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.RIGHT_ARROW);
return parseRightArrow();
}
}
/**
* Parse map type descriptor.
* map-type-descriptor := `map` type-parameter
*
* @return Parsed node
*/
private STNode parseMapTypeDescriptor(STNode mapKeyword) {
STNode typeParameter = parseTypeParameter();
return STNodeFactory.createMapTypeDescriptorNode(mapKeyword, typeParameter);
}
/**
* Parse parameterized type descriptor.
* parameterized-type-descriptor := `typedesc` [type-parameter]
* <br/> | `future` [type-parameter]
* <br/> | `xml` [type-parameter]
* <br/> | `error` [type-parameter]
*
* @return Parsed node
*/
private STNode parseParameterizedTypeDescriptor(STNode keywordToken) {
STNode typeParamNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
typeParamNode = parseTypeParameter();
} else {
typeParamNode = STNodeFactory.createEmptyNode();
}
SyntaxKind parameterizedTypeDescKind = getParameterizedTypeDescKind(keywordToken);
return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeDescKind, keywordToken,
typeParamNode);
}
private SyntaxKind getParameterizedTypeDescKind(STNode keywordToken) {
switch (keywordToken.kind) {
case TYPEDESC_KEYWORD:
return SyntaxKind.TYPEDESC_TYPE_DESC;
case FUTURE_KEYWORD:
return SyntaxKind.FUTURE_TYPE_DESC;
case XML_KEYWORD:
return SyntaxKind.XML_TYPE_DESC;
case ERROR_KEYWORD:
default:
return SyntaxKind.ERROR_TYPE_DESC;
}
}
/**
* Parse <code> < </code> token.
*
* @return Parsed node
*/
private STNode parseGTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.GT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.GT);
return parseGTToken();
}
}
/**
* Parse <code> > </code> token.
*
* @return Parsed node
*/
private STNode parseLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.LT);
return parseLTToken();
}
}
/**
* Parse nil literal. Here nil literal is only referred to ( ).
*
* @return Parsed node
*/
private STNode parseNilLiteral() {
startContext(ParserRuleContext.NIL_LITERAL);
STNode openParenthesisToken = parseOpenParenthesis();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);
}
/**
* Parse annotation declaration, given the qualifier.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {
startContext(ParserRuleContext.ANNOTATION_DECL);
STNode annotationKeyword = parseAnnotationKeyword();
STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
endContext();
return annotDecl;
}
/**
* Parse annotation keyword.
*
* @return Parsed node
*/
private STNode parseAnnotationKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOTATION_KEYWORD);
return parseAnnotationKeyword();
}
}
/**
* Parse the components that follows after the annotation keyword of a annotation declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @param annotationKeyword
* @return Parsed node
*/
private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);
return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
/**
* Parse annotation tag.
* <p>
* <code>annot-tag := identifier</code>
*
* @return
*/
private STNode parseAnnotationTag() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.ANNOTATION_TAG);
return parseAnnotationTag();
}
}
private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);
if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag, annotTag);
}
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {
STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,
ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);
}
/**
* Parse the component that follows the first identifier in an annotation decl. The identifier
* can be either the type-name (a user defined type) or the annot-tag, where the type-name
* is not present.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the annotation decl
* @param constKeyword Const keyword
* @param annotationKeyword Annotation keyword
* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDescOrAnnotTag) {
STToken nextToken = peek();
STNode typeDesc;
STNode annotTag;
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
typeDesc = typeDescOrAnnotTag;
annotTag = parseAnnotationTag();
break;
case SEMICOLON_TOKEN:
case ON_KEYWORD:
typeDesc = STNodeFactory.createEmptyNode();
annotTag = typeDescOrAnnotTag;
break;
default:
recover(peek(), ParserRuleContext.ANNOT_DECL_RHS);
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);
}
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDesc, STNode annotTag) {
STNode onKeyword;
STNode attachPoints;
STToken nextToken = peek();
switch (nextToken.kind) {
case SEMICOLON_TOKEN:
onKeyword = STNodeFactory.createEmptyNode();
attachPoints = STNodeFactory.createEmptyNodeList();
break;
case ON_KEYWORD:
onKeyword = parseOnKeyword();
attachPoints = parseAnnotationAttachPoints();
onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
break;
default:
recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS);
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,
typeDesc, annotTag, onKeyword, attachPoints, semicolonToken);
}
/**
* Parse annotation attach points.
* <p>
* <code>
* annot-attach-points := annot-attach-point (, annot-attach-point)*
* <br/><br/>
* annot-attach-point := dual-attach-point | source-only-attach-point
* <br/><br/>
* dual-attach-point := [source] dual-attach-point-ident
* <br/><br/>
* dual-attach-point-ident :=
* type
* | class
* | [object|service remote] function
* | parameter
* | return
* | service
* | [object|record] field
* <br/><br/>
* source-only-attach-point := source source-only-attach-point-ident
* <br/><br/>
* source-only-attach-point-ident :=
* annotation
* | external
* | var
* | const
* | listener
* | worker
* </code>
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoints() {
startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);
List<STNode> attachPoints = new ArrayList<>();
STToken nextToken = peek();
if (isEndAnnotAttachPointList(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode attachPoint = parseAnnotationAttachPoint();
attachPoints.add(attachPoint);
nextToken = peek();
STNode leadingComma;
while (!isEndAnnotAttachPointList(nextToken.kind)) {
leadingComma = parseAttachPointEnd();
if (leadingComma == null) {
break;
}
attachPoints.add(leadingComma);
attachPoint = parseAnnotationAttachPoint();
if (attachPoint == null) {
STToken missingAttachPointIdent = SyntaxErrors.createMissingToken(SyntaxKind.TYPE_KEYWORD);
STNode identList = STNodeFactory.createNodeList(missingAttachPointIdent);
attachPoint = STNodeFactory.createAnnotationAttachPointNode(STNodeFactory.createEmptyNode(), identList);
attachPoint = SyntaxErrors.addDiagnostic(attachPoint,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
attachPoints.add(attachPoint);
break;
}
attachPoints.add(attachPoint);
nextToken = peek();
}
if (attachPoint.lastToken().isMissing() && this.tokenReader.peek().kind == SyntaxKind.IDENTIFIER_TOKEN &&
!this.tokenReader.head().hasTrailingNewline()) {
STToken nextNonVirtualToken = this.tokenReader.read();
updateLastNodeInListWithInvalidNode(attachPoints, nextNonVirtualToken,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextNonVirtualToken.text());
}
endContext();
return STNodeFactory.createNodeList(attachPoints);
}
/**
* Parse annotation attach point end.
*
* @return Parsed node
*/
private STNode parseAttachPointEnd() {
switch (peek().kind) {
case SEMICOLON_TOKEN:
return null;
case COMMA_TOKEN:
return consume();
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_END);
return parseAttachPointEnd();
}
}
private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse annotation attach point.
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoint() {
switch (peek().kind) {
case EOF_TOKEN:
return null;
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
case SOURCE_KEYWORD:
STNode sourceKeyword = parseSourceKeyword();
return parseAttachPointIdent(sourceKeyword);
case OBJECT_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case RECORD_KEYWORD:
case CLASS_KEYWORD:
sourceKeyword = STNodeFactory.createEmptyNode();
STNode firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT);
return parseAnnotationAttachPoint();
}
}
/**
* Parse source keyword.
*
* @return Parsed node
*/
private STNode parseSourceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SOURCE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SOURCE_KEYWORD);
return parseSourceKeyword();
}
}
/**
* Parse attach point ident gievn.
* <p>
* <code>
* source-only-attach-point-ident := annotation | external | var | const | listener | worker
* <br/><br/>
* dual-attach-point-ident := type | class | [object|service remote] function | parameter
* | return | service | [object|record] field
* </code>
*
* @param sourceKeyword Source keyword
* @return Parsed node
*/
private STNode parseAttachPointIdent(STNode sourceKeyword) {
switch (peek().kind) {
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
STNode firstIdent = consume();
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case OBJECT_KEYWORD:
case RESOURCE_KEYWORD:
case RECORD_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT);
return parseAttachPointIdent(sourceKeyword);
}
}
/**
* Parse dual-attach-point ident.
*
* @param sourceKeyword Source keyword
* @param firstIdent first part of the dual attach-point
* @return Parsed node
*/
private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {
STNode secondIdent;
switch (firstIdent.kind) {
case OBJECT_KEYWORD:
secondIdent = parseIdentAfterObjectIdent();
break;
case RESOURCE_KEYWORD:
secondIdent = parseFunctionIdent();
break;
case RECORD_KEYWORD:
secondIdent = parseFieldIdent();
break;
case SERVICE_KEYWORD:
return parseServiceAttachPoint(sourceKeyword, firstIdent);
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
default:
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
/**
* Parse remote ident.
*
* @return Parsed node
*/
private STNode parseRemoteIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.REMOTE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.REMOTE_IDENT);
return parseRemoteIdent();
}
}
/**
* Parse service attach point.
* <code>service-attach-point := service | service remote function</code>
*
* @return Parsed node
*/
private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) {
STNode identList;
STToken token = peek();
switch (token.kind) {
case REMOTE_KEYWORD:
STNode secondIdent = parseRemoteIdent();
STNode thirdIdent = parseFunctionIdent();
identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case COMMA_TOKEN:
case SEMICOLON_TOKEN:
identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
default:
recover(token, ParserRuleContext.SERVICE_IDENT_RHS);
return parseServiceAttachPoint(sourceKeyword, firstIdent);
}
}
/**
* Parse the idents that are supported after object-ident.
*
* @return Parsed node
*/
private STNode parseIdentAfterObjectIdent() {
STToken token = peek();
switch (token.kind) {
case FUNCTION_KEYWORD:
case FIELD_KEYWORD:
return consume();
default:
recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);
return parseIdentAfterObjectIdent();
}
}
/**
* Parse function ident.
*
* @return Parsed node
*/
private STNode parseFunctionIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FUNCTION_IDENT);
return parseFunctionIdent();
}
}
/**
* Parse field ident.
*
* @return Parsed node
*/
private STNode parseFieldIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FIELD_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FIELD_IDENT);
return parseFieldIdent();
}
}
/**
* Parse XML namespace declaration.
* <p>
* <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;
* <br/>
* xml-namespace-uri := simple-const-expr
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @return
*/
private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {
startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);
STNode xmlnsKeyword = parseXMLNSKeyword();
STNode namespaceUri = parseSimpleConstExpr();
while (!isValidXMLNameSpaceURI(namespaceUri)) {
xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri,
DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);
namespaceUri = parseSimpleConstExpr();
}
STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
endContext();
return xmlnsDecl;
}
/**
* Parse xmlns keyword.
*
* @return Parsed node
*/
private STNode parseXMLNSKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XMLNS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XMLNS_KEYWORD);
return parseXMLNSKeyword();
}
}
private boolean isValidXMLNameSpaceURI(STNode expr) {
switch (expr.kind) {
case STRING_LITERAL:
case QUALIFIED_NAME_REFERENCE:
case SIMPLE_NAME_REFERENCE:
return true;
case IDENTIFIER_TOKEN:
default:
return false;
}
}
private STNode parseSimpleConstExpr() {
startContext(ParserRuleContext.CONSTANT_EXPRESSION);
STNode expr = parseSimpleConstExprInternal();
endContext();
return expr;
}
/**
* Parse simple constants expr.
*
* @return Parsed node
*/
private STNode parseSimpleConstExprInternal() {
STToken nextToken = peek();
switch (nextToken.kind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return parseBasicLiteral();
case PLUS_TOKEN:
case MINUS_TOKEN:
return parseSignedIntOrFloat();
case OPEN_PAREN_TOKEN:
return parseNilLiteral();
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
recover(nextToken, ParserRuleContext.CONSTANT_EXPRESSION_START);
return parseSimpleConstExprInternal();
}
}
/**
* Parse the portion after the namsepsace-uri of an XML declaration.
*
* @param xmlnsKeyword XMLNS keyword
* @param namespaceUri Namespace URI
* @return Parsed node
*/
private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {
STNode asKeyword = STNodeFactory.createEmptyNode();
STNode namespacePrefix = STNodeFactory.createEmptyNode();
switch (peek().kind) {
case AS_KEYWORD:
asKeyword = parseAsKeyword();
namespacePrefix = parseNamespacePrefix();
break;
case SEMICOLON_TOKEN:
break;
default:
recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL);
return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
}
STNode semicolon = parseSemicolon();
if (isModuleVar) {
return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,
namespacePrefix, semicolon);
}
return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,
semicolon);
}
/**
* Parse import prefix.
*
* @return Parsed node
*/
private STNode parseNamespacePrefix() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);
return parseNamespacePrefix();
}
}
/**
* Parse named worker declaration.
* <p>
* <code>named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt }
* </code>
*
* @param annots Annotations attached to the worker decl
* @param qualifiers Preceding transactional keyword in a list
* @return Parsed node
*/
private STNode parseNamedWorkerDeclaration(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.NAMED_WORKER_DECL);
STNode transactionalKeyword = getTransactionalKeyword(qualifiers);
STNode workerKeyword = parseWorkerKeyword();
STNode workerName = parseWorkerName();
STNode returnTypeDesc = parseReturnTypeDescriptor();
STNode workerBody = parseBlockNode();
endContext();
return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName,
returnTypeDesc, workerBody);
}
private STNode getTransactionalKeyword(List<STNode> qualifierList) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,
((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
STNode transactionalKeyword;
if (validatedList.isEmpty()) {
transactionalKeyword = STNodeFactory.createEmptyNode();
} else {
transactionalKeyword = validatedList.get(0);
}
return transactionalKeyword;
}
private STNode parseReturnTypeDescriptor() {
STToken token = peek();
if (token.kind != SyntaxKind.RETURNS_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
STNode returnsKeyword = consume();
STNode annot = parseOptionalAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);
return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);
}
/**
* Parse worker keyword.
*
* @return Parsed node
*/
private STNode parseWorkerKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_KEYWORD);
return parseWorkerKeyword();
}
}
/**
* Parse worker name.
* <p>
* <code>worker-name := identifier</code>
*
* @return Parsed node
*/
private STNode parseWorkerName() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_NAME);
return parseWorkerName();
}
}
/**
* Parse lock statement.
* <code>lock-stmt := lock block-stmt [on-fail-clause]</code>
*
* @return Lock statement
*/
private STNode parseLockStatement() {
startContext(ParserRuleContext.LOCK_STMT);
STNode lockKeyword = parseLockKeyword();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause);
}
/**
* Parse lock-keyword.
*
* @return lock-keyword node
*/
private STNode parseLockKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LOCK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LOCK_KEYWORD);
return parseLockKeyword();
}
}
/**
* Parse union type descriptor.
* union-type-descriptor := type-descriptor | type-descriptor
*
* @param leftTypeDesc Type desc in the LHS os the union type desc.
* @param context Current context.
* @return parsed union type desc node
*/
private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode pipeToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,
TypePrecedence.UNION);
return mergeTypesWithUnion(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Creates a union type descriptor after validating lhs and rhs types.
* <p>
* <i>Note: Since type precedence and associativity are not taken into account here,
* this method should not be called directly when types are unknown.
* <br/>
* Call {@link
*
* @param leftTypeDesc lhs type
* @param pipeToken pipe token
* @param rightTypeDesc rhs type
* @return a UnionTypeDescriptorNode
*/
private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Parse pipe token.
*
* @return parsed pipe token node
*/
private STNode parsePipeToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.PIPE);
return parsePipeToken();
}
}
private boolean isTypeStartingToken(SyntaxKind nodeKind) {
return isTypeStartingToken(nodeKind, getNextNextToken());
}
private static boolean isTypeStartingToken(SyntaxKind nextTokenKind, STToken nextNextToken) {
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
case SERVICE_KEYWORD:
case RECORD_KEYWORD:
case OBJECT_KEYWORD:
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
case OPEN_PAREN_TOKEN:
case MAP_KEYWORD:
case STREAM_KEYWORD:
case TABLE_KEYWORD:
case FUNCTION_KEYWORD:
case OPEN_BRACKET_TOKEN:
case DISTINCT_KEYWORD:
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case TRANSACTION_KEYWORD:
return true;
default:
if (isParameterizedTypeToken(nextTokenKind)) {
return true;
}
if (isSingletonTypeDescStart(nextTokenKind, nextNextToken)) {
return true;
}
return isSimpleType(nextTokenKind);
}
}
/**
* Check if the token kind is a type descriptor in terminal expression.
* <p>
* simple-type-in-expr :=
* boolean | int | byte | float | decimal | string | handle | json | anydata | any | never
*
* @param nodeKind token kind to check
* @return <code>true</code> for simple type token in expression. <code>false</code> otherwise.
*/
private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) {
switch (nodeKind) {
case VAR_KEYWORD:
case READONLY_KEYWORD:
return false;
default:
return isSimpleType(nodeKind);
}
}
static boolean isSimpleType(SyntaxKind nodeKind) {
switch (nodeKind) {
case INT_KEYWORD:
case FLOAT_KEYWORD:
case DECIMAL_KEYWORD:
case BOOLEAN_KEYWORD:
case STRING_KEYWORD:
case BYTE_KEYWORD:
case JSON_KEYWORD:
case HANDLE_KEYWORD:
case ANY_KEYWORD:
case ANYDATA_KEYWORD:
case NEVER_KEYWORD:
case VAR_KEYWORD:
case READONLY_KEYWORD:
return true;
default:
return false;
}
}
static boolean isPredeclaredPrefix(SyntaxKind nodeKind) {
switch (nodeKind) {
case BOOLEAN_KEYWORD:
case DECIMAL_KEYWORD:
case ERROR_KEYWORD:
case FLOAT_KEYWORD:
case FUNCTION_KEYWORD:
case FUTURE_KEYWORD:
case INT_KEYWORD:
case MAP_KEYWORD:
case OBJECT_KEYWORD:
case STREAM_KEYWORD:
case STRING_KEYWORD:
case TABLE_KEYWORD:
case TRANSACTION_KEYWORD:
case TYPEDESC_KEYWORD:
case XML_KEYWORD:
return true;
default:
return false;
}
}
private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) {
return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN;
}
private static SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) {
switch (typeKeyword) {
case INT_KEYWORD:
return SyntaxKind.INT_TYPE_DESC;
case FLOAT_KEYWORD:
return SyntaxKind.FLOAT_TYPE_DESC;
case DECIMAL_KEYWORD:
return SyntaxKind.DECIMAL_TYPE_DESC;
case BOOLEAN_KEYWORD:
return SyntaxKind.BOOLEAN_TYPE_DESC;
case STRING_KEYWORD:
return SyntaxKind.STRING_TYPE_DESC;
case BYTE_KEYWORD:
return SyntaxKind.BYTE_TYPE_DESC;
case JSON_KEYWORD:
return SyntaxKind.JSON_TYPE_DESC;
case HANDLE_KEYWORD:
return SyntaxKind.HANDLE_TYPE_DESC;
case ANY_KEYWORD:
return SyntaxKind.ANY_TYPE_DESC;
case ANYDATA_KEYWORD:
return SyntaxKind.ANYDATA_TYPE_DESC;
case NEVER_KEYWORD:
return SyntaxKind.NEVER_TYPE_DESC;
case VAR_KEYWORD:
return SyntaxKind.VAR_TYPE_DESC;
case READONLY_KEYWORD:
return SyntaxKind.READONLY_TYPE_DESC;
default:
assert false : typeKeyword + " is not a built-in type";
return SyntaxKind.TYPE_REFERENCE;
}
}
/**
* Parse fork-keyword.
*
* @return Fork-keyword node
*/
private STNode parseForkKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FORK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FORK_KEYWORD);
return parseForkKeyword();
}
}
/**
* Parse fork statement.
* <code>fork-stmt := fork { named-worker-decl+ }</code>
*
* @return Fork statement
*/
private STNode parseForkStatement() {
startContext(ParserRuleContext.FORK_STMT);
STNode forkKeyword = parseForkKeyword();
STNode openBrace = parseOpenBrace();
ArrayList<STNode> workers = new ArrayList<>();
while (!isEndOfStatements()) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
if (validateStatement(stmt)) {
continue;
}
switch (stmt.kind) {
case NAMED_WORKER_DECLARATION:
workers.add(stmt);
break;
default:
if (workers.isEmpty()) {
openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
} else {
updateLastNodeInListWithInvalidNode(workers, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
}
}
}
STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);
STNode closeBrace = parseCloseBrace();
endContext();
STNode forkStmt =
STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);
if (isNodeListEmpty(namedWorkerDeclarations)) {
return SyntaxErrors.addDiagnostic(forkStmt,
DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);
}
return forkStmt;
}
/**
* Parse trap expression.
* <p>
* <code>
* trap-expr := trap expression
* </code>
*
* @param allowActions Allow actions
* @param isRhsExpr Whether this is a RHS expression or not
* @return Trap expression node
*/
private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
STNode trapKeyword = parseTrapKeyword();
STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr);
if (isAction(expr)) {
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);
}
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);
}
/**
* Parse trap-keyword.
*
* @return Trap-keyword node
*/
private STNode parseTrapKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRAP_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRAP_KEYWORD);
return parseTrapKeyword();
}
}
/**
* Parse list constructor expression.
* <p>
* <code>
* list-constructor-expr := [ [ list-members ] ]
* <br/>
* list-members := list-member (, list-member)*
* <br/>
* list-member := expression | spread-member
* <br/>
* spread-member := ... expression
* </code>
*
* @return Parsed node
*/
private STNode parseListConstructorExpr() {
startContext(ParserRuleContext.LIST_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode listMembers = parseListMembers();
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createListConstructorExpressionNode(openBracket, listMembers, closeBracket);
}
/**
* Parse optional list member list.
*
* @return Parsed node
*/
private STNode parseListMembers() {
List<STNode> listMembers = new ArrayList<>();
if (isEndOfListConstructor(peek().kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode listMember = parseListMember();
listMembers.add(listMember);
return parseListMembers(listMembers);
}
private STNode parseListMembers(List<STNode> listMembers) {
STNode listConstructorMemberEnd;
while (!isEndOfListConstructor(peek().kind)) {
listConstructorMemberEnd = parseListConstructorMemberEnd();
if (listConstructorMemberEnd == null) {
break;
}
listMembers.add(listConstructorMemberEnd);
STNode listMember = parseListMember();
listMembers.add(listMember);
}
return STNodeFactory.createNodeList(listMembers);
}
/**
* Parse list member.
* <p>
* <code>
* list-member := expression | spread-member
* </code>
*
* @return Parsed node
*/
private STNode parseListMember() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {
return parseSpreadMember();
} else {
return parseExpression();
}
}
/**
* Parse spread member.
* <p>
* <code>
* spread-member := ... expression
* </code>
*
* @return Parsed node
*/
private STNode parseSpreadMember() {
STNode ellipsis = parseEllipsis();
STNode expr = parseExpression();
return STNodeFactory.createSpreadMemberNode(ellipsis, expr);
}
private boolean isEndOfListConstructor(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListConstructorMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);
return parseListConstructorMemberEnd();
}
}
/**
* Parse foreach statement.
* <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code>
*
* @return foreach statement
*/
private STNode parseForEachStatement() {
startContext(ParserRuleContext.FOREACH_STMT);
STNode forEachKeyword = parseForEachKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);
STNode inKeyword = parseInKeyword();
STNode actionOrExpr = parseActionOrExpression();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,
blockStatement, onFailClause);
}
/**
* Parse foreach-keyword.
*
* @return ForEach-keyword node
*/
private STNode parseForEachKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FOREACH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FOREACH_KEYWORD);
return parseForEachKeyword();
}
}
/**
* Parse in-keyword.
*
* @return In-keyword node
*/
private STNode parseInKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IN_KEYWORD);
return parseInKeyword();
}
}
/**
* Parse type cast expression.
* <p>
* <code>
* type-cast-expr := < type-cast-param > expression
* <br/>
* type-cast-param := [annots] type-descriptor | annots
* </code>
*
* @return Parsed node
*/
private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
startContext(ParserRuleContext.TYPE_CAST);
STNode ltToken = parseLTToken();
return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr);
}
private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions,
boolean isInConditionalExpr) {
STNode typeCastParam = parseTypeCastParam();
STNode gtToken = parseGTToken();
endContext();
STNode expression =
parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);
return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);
}
private STNode parseTypeCastParam() {
STNode annot;
STNode type;
STToken token = peek();
switch (token.kind) {
case AT_TOKEN:
annot = parseOptionalAnnotations();
token = peek();
if (isTypeStartingToken(token.kind)) {
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
} else {
type = STNodeFactory.createEmptyNode();
}
break;
default:
annot = STNodeFactory.createEmptyNode();
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
break;
}
return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);
}
/**
* Parse table constructor expression.
* <p>
* <code>
* table-constructor-expr-rhs := [ [row-list] ]
* </code>
*
* @param tableKeyword tableKeyword that precedes this rhs
* @param keySpecifier keySpecifier that precedes this rhs
* @return Parsed node
*/
private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {
switchContext(ParserRuleContext.TABLE_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode rowList = parseRowList();
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,
closeBracket);
}
/**
* Parse table-keyword.
*
* @return Table-keyword node
*/
private STNode parseTableKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TABLE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TABLE_KEYWORD);
return parseTableKeyword();
}
}
/**
* Parse table rows.
* <p>
* <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code>
*
* @return Parsed node
*/
private STNode parseRowList() {
STToken nextToken = peek();
if (isEndOfTableRowList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> mappings = new ArrayList<>();
STNode mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
STNode rowEnd;
while (!isEndOfTableRowList(nextToken.kind)) {
rowEnd = parseTableRowEnd();
if (rowEnd == null) {
break;
}
mappings.add(rowEnd);
mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
}
return STNodeFactory.createNodeList(mappings);
}
private boolean isEndOfTableRowList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
case COMMA_TOKEN:
case OPEN_BRACE_TOKEN:
return false;
default:
return isEndOfMappingConstructor(tokenKind);
}
}
private STNode parseTableRowEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.TABLE_ROW_END);
return parseTableRowEnd();
}
}
/**
* Parse key specifier.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier() {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode keyKeyword = parseKeyKeyword();
STNode openParen = parseOpenParenthesis();
STNode fieldNames = parseFieldNames();
STNode closeParen = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);
}
/**
* Parse key-keyword.
*
* @return Key-keyword node
*/
private STNode parseKeyKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.KEY_KEYWORD) {
return consume();
}
if (isKeyKeyword(token)) {
return getKeyKeyword(consume());
}
recover(token, ParserRuleContext.KEY_KEYWORD);
return parseKeyKeyword();
}
static boolean isKeyKeyword(STToken token) {
return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());
}
private STNode getKeyKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),
token.diagnostics());
}
private STToken getUnderscoreKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.UNDERSCORE_KEYWORD, token.leadingMinutiae(),
token.trailingMinutiae(), token.diagnostics());
}
/**
* Parse field names.
* <p>
* <code>field-name-list := [ field-name (, field-name)* ]</code>
*
* @return Parsed node
*/
private STNode parseFieldNames() {
STToken nextToken = peek();
if (isEndOfFieldNamesList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> fieldNames = new ArrayList<>();
STNode fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
STNode leadingComma;
while (!isEndOfFieldNamesList(nextToken.kind)) {
leadingComma = parseComma();
fieldNames.add(leadingComma);
fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
}
return STNodeFactory.createNodeList(fieldNames);
}
private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case IDENTIFIER_TOKEN:
return false;
default:
return true;
}
}
/**
* Parse error-keyword.
*
* @return Parsed error-keyword node
*/
private STNode parseErrorKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ERROR_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ERROR_KEYWORD);
return parseErrorKeyword();
}
}
/**
* Parse stream type descriptor.
* <p>
* stream-type-descriptor := stream [stream-type-parameters]
* <br/>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type descriptor node
*/
private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) {
STNode streamTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
streamTypeParamsNode = parseStreamTypeParamsNode();
} else {
streamTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);
}
/**
* Parse stream type params node.
* <p>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type params node
*/
private STNode parseStreamTypeParamsNode() {
STNode ltToken = parseLTToken();
startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
endContext();
return streamTypedesc;
}
private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {
STNode commaToken, rightTypeDescNode, gtToken;
switch (peek().kind) {
case COMMA_TOKEN:
commaToken = parseComma();
rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
break;
case GT_TOKEN:
commaToken = STNodeFactory.createEmptyNode();
rightTypeDescNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS);
return parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
}
gtToken = parseGTToken();
return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,
gtToken);
}
/**
* Parse let expression.
* <p>
* <code>
* let-expr := let let-var-decl [, let-var-decl]* in expression
* </code>
*
* @return Parsed node
*/
private STNode parseLetExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr, false);
STNode inKeyword = parseInKeyword();
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false,
isInConditionalExpr);
return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);
}
/**
* Parse let-keyword.
*
* @return Let-keyword node
*/
private STNode parseLetKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LET_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LET_KEYWORD);
return parseLetKeyword();
}
}
/**
* Parse let variable declarations.
* <p>
* <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code>
*
* @return Parsed node
*/
private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr, boolean allowActions) {
startContext(context);
List<STNode> varDecls = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfLetVarDeclarations(nextToken, getNextNextToken())) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode varDec = parseLetVarDecl(context, isRhsExpr, allowActions);
varDecls.add(varDec);
nextToken = peek();
STNode leadingComma;
while (!isEndOfLetVarDeclarations(nextToken, getNextNextToken())) {
leadingComma = parseComma();
varDecls.add(leadingComma);
varDec = parseLetVarDecl(context, isRhsExpr, allowActions);
varDecls.add(varDec);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(varDecls);
}
static boolean isEndOfLetVarDeclarations(STToken nextToken, STToken nextNextToken) {
SyntaxKind tokenKind = nextToken.kind;
switch (tokenKind) {
case COMMA_TOKEN:
case AT_TOKEN:
return false;
case IN_KEYWORD:
return true;
default:
if (isGroupOrCollectKeyword(nextToken)) {
return true;
}
return !isTypeStartingToken(tokenKind, nextNextToken);
}
}
/**
* Parse let variable declaration.
* <p>
* <code>let-var-decl := [annots] typed-binding-pattern = expression</code>
*
* @return Parsed node
*/
private STNode parseLetVarDecl(ParserRuleContext context, boolean isRhsExpr, boolean allowActions) {
STNode annot = parseOptionalAnnotations();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);
STNode assign = parseAssignOp();
STNode expression = context == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL ?
parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions) :
parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);
return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);
}
/**
* Parse raw backtick string template expression.
* <p>
* <code>BacktickString := `expression`</code>
*
* @return Template expression node
*/
private STNode parseTemplateExpression() {
STNode type = STNodeFactory.createEmptyNode();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
private STNode parseTemplateContent() {
List<STNode> items = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
items.add(contentItem);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
private boolean isEndOfBacktickContent(SyntaxKind kind) {
switch (kind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
return false;
}
}
private STNode parseTemplateItem() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return parseInterpolation();
}
return consume();
}
/**
* Parse string template expression.
* <p>
* <code>string-template-expr := string ` expression `</code>
*
* @return String template expression node
*/
private STNode parseStringTemplateExpression() {
STNode type = parseStringKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
/**
* Parse <code>string</code> keyword.
*
* @return string keyword node
*/
private STNode parseStringKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STRING_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.STRING_KEYWORD);
return parseStringKeyword();
}
}
/**
* Parse XML template expression.
* <p>
* <code>xml-template-expr := xml BacktickString</code>
*
* @return XML template expression
*/
private STNode parseXMLTemplateExpression() {
STNode xmlKeyword = parseXMLKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
if (startingBackTick.isMissing()) {
return createMissingTemplateExpressionNode(xmlKeyword, SyntaxKind.XML_TEMPLATE_EXPRESSION);
}
STNode content = parseTemplateContentAsXML();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,
startingBackTick, content, endingBackTick);
}
/**
* Parse <code>xml</code> keyword.
*
* @return xml keyword node
*/
private STNode parseXMLKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XML_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XML_KEYWORD);
return parseXMLKeyword();
}
}
/**
* Parse the content of the template string as XML. This method first read the
* input in the same way as the raw-backtick-template (BacktickString). Then
* it parses the content as XML.
*
* @return XML node
*/
private STNode parseTemplateContentAsXML() {
ArrayDeque<STNode> expressions = new ArrayDeque<>();
StringBuilder xmlStringBuilder = new StringBuilder();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {
xmlStringBuilder.append(((STToken) contentItem).text());
} else {
xmlStringBuilder.append("${}");
expressions.add(contentItem);
}
nextToken = peek();
}
CharReader charReader = CharReader.from(xmlStringBuilder.toString());
AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader));
XMLParser xmlParser = new XMLParser(tokenReader, expressions);
return xmlParser.parse();
}
/**
* Parse regular expression constructor.
* <p>
* <code>regexp-constructor-expr := re BacktickString</code>
*
* @return Regular expression template expression
*/
private STNode parseRegExpTemplateExpression() {
STNode reKeyword = consume();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
if (startingBackTick.isMissing()) {
return createMissingTemplateExpressionNode(reKeyword, SyntaxKind.REGEX_TEMPLATE_EXPRESSION);
}
STNode content = parseTemplateContentAsRegExp();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.REGEX_TEMPLATE_EXPRESSION, reKeyword,
startingBackTick, content, endingBackTick);
}
private STNode createMissingTemplateExpressionNode(STNode reKeyword, SyntaxKind kind) {
STNode startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode content = STAbstractNodeFactory.createEmptyNodeList();
STNode templateExpr =
STNodeFactory.createTemplateExpressionNode(kind, reKeyword, startingBackTick, content, endingBackTick);
templateExpr = SyntaxErrors.addDiagnostic(templateExpr, DiagnosticErrorCode.ERROR_MISSING_BACKTICK_STRING);
return templateExpr;
}
/**
* Parse the content of the template string as regular expression. This method first read the
* input in the same way as the raw-backtick-template (BacktickString). Then
* it parses the content as regular expression.
*
* @return Template expression node
*/
private STNode parseTemplateContentAsRegExp() {
this.tokenReader.startMode(ParserMode.REGEXP);
ArrayDeque<STNode> expressions = new ArrayDeque<>();
StringBuilder regExpStringBuilder = new StringBuilder();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {
regExpStringBuilder.append(((STToken) contentItem).text());
} else {
regExpStringBuilder.append("${}");
expressions.add(contentItem);
}
nextToken = peek();
}
this.tokenReader.endMode();
CharReader charReader = CharReader.from(regExpStringBuilder.toString());
AbstractTokenReader tokenReader = new TokenReader(new RegExpLexer(charReader));
RegExpParser regExpParser = new RegExpParser(tokenReader, expressions);
return regExpParser.parse();
}
/**
* Parse interpolation of a back-tick string.
* <p>
* <code>
* interpolation := ${ expression }
* </code>
*
* @return Interpolation node
*/
private STNode parseInterpolation() {
startContext(ParserRuleContext.INTERPOLATION);
STNode interpolStart = parseInterpolationStart();
STNode expr = parseExpression();
while (!isEndOfInterpolation()) {
STToken nextToken = consume();
expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());
}
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);
}
private boolean isEndOfInterpolation() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
ParserMode currentLexerMode = this.tokenReader.getCurrentMode();
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION &&
currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT;
}
}
/**
* Parse interpolation start token.
* <p>
* <code>interpolation-start := ${</code>
*
* @return Interpolation start token
*/
private STNode parseInterpolationStart() {
STToken token = peek();
if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);
return parseInterpolationStart();
}
}
/**
* Parse back-tick token.
*
* @return Back-tick token
*/
private STNode parseBacktickToken(ParserRuleContext ctx) {
STToken token = peek();
if (token.kind == SyntaxKind.BACKTICK_TOKEN) {
return consume();
} else {
recover(token, ctx);
return parseBacktickToken(ctx);
}
}
/**
* Parse table type descriptor.
* <p>
* table-type-descriptor := table row-type-parameter [key-constraint]
* row-type-parameter := type-parameter
* key-constraint := key-specifier | key-type-constraint
* key-specifier := key ( [ field-name (, field-name)* ] )
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed table type desc node.
*/
private STNode parseTableTypeDescriptor(STNode tableKeywordToken) {
STNode rowTypeParameterNode = parseRowTypeParameter();
STNode keyConstraintNode;
STToken nextToken = peek();
if (isKeyKeyword(nextToken)) {
STNode keyKeywordToken = getKeyKeyword(consume());
keyConstraintNode = parseKeyConstraint(keyKeywordToken);
} else {
keyConstraintNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);
}
/**
* Parse row type parameter node.
* <p>
* row-type-parameter := type-parameter
* </p>
*
* @return Parsed node.
*/
private STNode parseRowTypeParameter() {
startContext(ParserRuleContext.ROW_TYPE_PARAM);
STNode rowTypeParameterNode = parseTypeParameter();
endContext();
return rowTypeParameterNode;
}
/**
* Parse type parameter node.
* <p>
* type-parameter := < type-descriptor >
* </p>
*
* @return Parsed node
*/
private STNode parseTypeParameter() {
STNode ltToken = parseLTToken();
STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
STNode gtToken = parseGTToken();
return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);
}
/**
* Parse key constraint.
* <p>
* key-constraint := key-specifier | key-type-constraint
* </p>
*
* @return Parsed node.
*/
private STNode parseKeyConstraint(STNode keyKeywordToken) {
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
return parseKeySpecifier(keyKeywordToken);
case LT_TOKEN:
return parseKeyTypeConstraint(keyKeywordToken);
default:
recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS);
return parseKeyConstraint(keyKeywordToken);
}
}
/**
* Parse key specifier given parsed key keyword token.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier(STNode keyKeywordToken) {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode openParenToken = parseOpenParenthesis();
STNode fieldNamesNode = parseFieldNames();
STNode closeParenToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);
}
/**
* Parse key type constraint.
* <p>
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed node
*/
private STNode parseKeyTypeConstraint(STNode keyKeywordToken) {
STNode typeParameterNode = parseTypeParameter();
return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);
}
/**
* Parse function type descriptor.
* <p>
* <code>
* function-type-descriptor := function-quals function function-signature
* <br/> | [isolated] function
* <br/>
* function-quals := (transactional | isolated)*
* </code>
*
* @param qualifiers Preceding type descriptor qualifiers
* @return Function type descriptor node
*/
private STNode parseFunctionTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC);
STNode functionKeyword = parseFunctionKeyword();
boolean hasFuncSignature = false;
STNode signature = STNodeFactory.createEmptyNode();
if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN ||
isSyntaxKindInList(qualifiers, SyntaxKind.TRANSACTIONAL_KEYWORD)) {
signature = parseFuncSignature(true);
hasFuncSignature = true;
}
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, hasFuncSignature);
STNode qualifierList = nodes[0];
functionKeyword = nodes[1];
endContext();
return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature);
}
private STNode getLastNodeInList(List<STNode> nodeList) {
return nodeList.get(nodeList.size() - 1);
}
private STNode[] createFuncTypeQualNodeList(List<STNode> qualifierList, STNode functionKeyword,
boolean hasFuncSignature) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) {
validatedList.add(qualifier);
} else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
functionKeyword = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(functionKeyword, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
STNode nodeList = STNodeFactory.createNodeList(validatedList);
return new STNode[]{ nodeList, functionKeyword };
}
private boolean isRegularFuncQual(SyntaxKind tokenKind) {
switch (tokenKind) {
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse explicit anonymous function expression.
* <p>
* <code>explicit-anonymous-function-expr :=
* [annots] (isolated| transactional) function function-signature anon-func-body</code>
*
* @param annots Annotations.
* @param qualifiers Function qualifiers
* @param isRhsExpr Is expression in rhs context
* @return Anonymous function expression node
*/
private STNode parseExplicitFunctionExpression(STNode annots, List<STNode> qualifiers, boolean isRhsExpr) {
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
STNode funcKeyword = parseFunctionKeyword();
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, funcKeyword, true);
STNode qualifierList = nodes[0];
funcKeyword = nodes[1];
STNode funcSignature = parseFuncSignature(false);
STNode funcBody = parseAnonFuncBody(isRhsExpr);
return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword,
funcSignature, funcBody);
}
/**
* Parse anonymous function body.
* <p>
* <code>anon-func-body := block-function-body | expr-function-body</code>
*
* @param isRhsExpr Is expression in rhs context
* @return Anon function body node
*/
private STNode parseAnonFuncBody(boolean isRhsExpr) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case EOF_TOKEN:
STNode body = parseFunctionBodyBlock(true);
endContext();
return body;
case RIGHT_DOUBLE_ARROW_TOKEN:
endContext();
return parseExpressionFuncBody(true, isRhsExpr);
default:
recover(peek(), ParserRuleContext.ANON_FUNC_BODY);
return parseAnonFuncBody(isRhsExpr);
}
}
/**
* Parse expression function body.
* <p>
* <code>expr-function-body := => expression</code>
*
* @param isAnon Is anonymous function.
* @param isRhsExpr Is expression in rhs context
* @return Expression function body node
*/
private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false);
STNode semiColon;
if (isAnon) {
semiColon = STNodeFactory.createEmptyNode();
} else {
semiColon = parseSemicolon();
}
return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);
}
/**
* Parse '=>' token.
*
* @return Double right arrow token
*/
private STNode parseDoubleRightArrow() {
STToken token = peek();
if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);
return parseDoubleRightArrow();
}
}
private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {
switch (params.kind) {
case SIMPLE_NAME_REFERENCE:
case INFER_PARAM_LIST:
break;
case BRACED_EXPRESSION:
params = getAnonFuncParam((STBracedExpressionNode) params);
break;
case NIL_LITERAL:
STNilLiteralNode nilLiteralNode = (STNilLiteralNode) params;
params = STNodeFactory.createImplicitAnonymousFunctionParameters(nilLiteralNode.openParenToken,
STNodeFactory.createNodeList(new ArrayList<>()), nilLiteralNode.closeParenToken);
break;
default:
STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam);
}
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false);
return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);
}
/**
* Create a new anon-func-param node from a braced expression.
*
* @param bracedExpression Braced expression
* @return Anon-func param node
*/
private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) {
List<STNode> paramList = new ArrayList<>();
STNode innerExpression = bracedExpression.expression;
STNode openParen = bracedExpression.openParen;
if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
paramList.add(innerExpression);
} else {
openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
}
return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen,
STNodeFactory.createNodeList(paramList), bracedExpression.closeParen);
}
/**
* Parse implicit anon function expression.
*
* @param openParen Open parenthesis token
* @param firstParam First parameter
* @param isRhsExpr Is expression in rhs context
* @return Implicit anon function expression node
*/
private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {
List<STNode> paramList = new ArrayList<>();
paramList.add(firstParam);
STToken nextToken = peek();
STNode paramEnd;
STNode param;
while (!isEndOfAnonFuncParametersList(nextToken.kind)) {
paramEnd = parseImplicitAnonFuncParamEnd();
if (paramEnd == null) {
break;
}
paramList.add(paramEnd);
param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);
param = STNodeFactory.createSimpleNameReferenceNode(param);
paramList.add(param);
nextToken = peek();
}
STNode params = STNodeFactory.createNodeList(paramList);
STNode closeParen = parseCloseParenthesis();
endContext();
STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(inferedParams, isRhsExpr);
}
private STNode parseImplicitAnonFuncParamEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);
return parseImplicitAnonFuncParamEnd();
}
}
private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse tuple type descriptor.
* <p>
* <code>tuple-type-descriptor := [ tuple-member-type-descriptors ]
* <br/><br/>
* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]
* | [ tuple-rest-descriptor ]
* <br/><br/>
* member-type-descriptor := [annots] type-descriptor
* tuple-rest-descriptor := type-descriptor ...
* </code>
*
* @return
*/
private STNode parseTupleTypeDesc() {
STNode openBracket = parseOpenBracket();
startContext(ParserRuleContext.TUPLE_MEMBERS);
STNode memberTypeDesc = parseTupleMemberTypeDescList();
STNode closeBracket = parseCloseBracket();
endContext();
openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,
DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);
return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);
}
/**
* Parse tuple member type descriptors.
*
* @return Parsed node
*/
private STNode parseTupleMemberTypeDescList() {
List<STNode> typeDescList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfTypeList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode typeDesc = parseTupleMember();
return parseTupleTypeMembers(typeDesc, typeDescList);
}
private STNode parseTupleTypeMembers(STNode firstMember, List<STNode> memberList) {
STNode tupleMemberRhs;
while (!isEndOfTypeList(peek().kind)) {
if (firstMember.kind == SyntaxKind.REST_TYPE) {
firstMember = invalidateTypeDescAfterRestDesc(firstMember);
break;
}
tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
memberList.add(firstMember);
memberList.add(tupleMemberRhs);
firstMember = parseTupleMember();
}
memberList.add(firstMember);
return STNodeFactory.createNodeList(memberList);
}
private STNode parseTupleMember() {
STNode annot = parseOptionalAnnotations();
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
return createMemberOrRestNode(annot, typeDesc);
}
private STNode createMemberOrRestNode(STNode annot, STNode typeDesc) {
STNode tupleMemberRhs = parseTypeDescInTupleRhs();
if (tupleMemberRhs != null) {
if (!((STNodeList) annot).isEmpty()) {
typeDesc = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(typeDesc, annot,
DiagnosticErrorCode.ERROR_ANNOTATIONS_NOT_ALLOWED_FOR_TUPLE_REST_DESCRIPTOR);
}
return STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);
}
return STNodeFactory.createMemberTypeDescriptorNode(annot, typeDesc);
}
private STNode invalidateTypeDescAfterRestDesc(STNode restDescriptor) {
while (!isEndOfTypeList(peek().kind)) {
STNode tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, tupleMemberRhs, null);
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, parseTupleMember(),
DiagnosticErrorCode.ERROR_TYPE_DESC_AFTER_REST_DESCRIPTOR);
}
return restDescriptor;
}
private STNode parseTupleMemberRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.TUPLE_TYPE_MEMBER_RHS);
return parseTupleMemberRhs();
}
}
private STNode parseTypeDescInTupleRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
return null;
case ELLIPSIS_TOKEN:
return parseEllipsis();
default:
recover(nextToken, ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);
return parseTypeDescInTupleRhs();
}
}
private boolean isEndOfTypeList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse table constructor or query expression.
* <p>
* <code>
* table-constructor-or-query-expr := table-constructor-expr | query-expr
* <br/>
* table-constructor-expr := table [key-specifier] [ [row-list] ]
* <br/>
* query-expr := [query-construct-type] query-pipeline select-clause
* [query-construct-type] query-pipeline select-clause on-conflict-clause?
* <br/>
* query-construct-type := table key-specifier | stream | map
* </code>
*
* @return Parsed node
*/
private STNode parseTableConstructorOrQuery(boolean isRhsExpr, boolean allowActions) {
startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);
STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr, allowActions);
endContext();
return tableOrQueryExpr;
}
private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr, boolean allowActions) {
STNode queryConstructType;
switch (peek().kind) {
case FROM_KEYWORD:
queryConstructType = STNodeFactory.createEmptyNode();
return parseQueryExprRhs(queryConstructType, isRhsExpr, allowActions);
case TABLE_KEYWORD:
STNode tableKeyword = parseTableKeyword();
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr, allowActions);
case STREAM_KEYWORD:
case MAP_KEYWORD:
STNode streamOrMapKeyword = consume();
STNode keySpecifier = STNodeFactory.createEmptyNode();
queryConstructType = parseQueryConstructType(streamOrMapKeyword, keySpecifier);
return parseQueryExprRhs(queryConstructType, isRhsExpr, allowActions);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START);
return parseTableConstructorOrQueryInternal(isRhsExpr, allowActions);
}
}
private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr, boolean allowActions) {
STNode keySpecifier;
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
keySpecifier = STNodeFactory.createEmptyNode();
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
case KEY_KEYWORD:
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions);
case IDENTIFIER_TOKEN:
if (isKeyKeyword(nextToken)) {
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions);
}
break;
default:
break;
}
recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS);
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr, allowActions);
}
private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr,
boolean allowActions) {
switch (peek().kind) {
case FROM_KEYWORD:
return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr, allowActions);
case OPEN_BRACKET_TOKEN:
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS);
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions);
}
}
/**
* Parse query construct type.
* <p>
* <code>query-construct-type := table key-specifier | stream | map</code>
*
* @return Parsed node
*/
private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {
return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);
}
/**
* Parse query action or expression.
* <p>
* <code>
* query-expr-rhs := query-pipeline select-clause
* query-pipeline select-clause on-conflict-clause?
* <br/>
* query-pipeline := from-clause intermediate-clause*
* </code>
*
* @param queryConstructType queryConstructType that precedes this rhs
* @return Parsed node
*/
private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr, boolean allowActions) {
switchContext(ParserRuleContext.QUERY_EXPRESSION);
STNode fromClause = parseFromClause(isRhsExpr, allowActions);
List<STNode> clauses = new ArrayList<>();
STNode intermediateClause;
STNode selectClause = null;
STNode collectClause = null;
while (!isEndOfIntermediateClause(peek().kind)) {
intermediateClause = parseIntermediateClause(isRhsExpr, allowActions);
if (intermediateClause == null) {
break;
}
if (selectClause != null) {
selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,
DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);
continue;
} else if (collectClause != null) {
collectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(collectClause, intermediateClause,
DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_COLLECT_CLAUSE);
continue;
}
if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {
selectClause = intermediateClause;
} else if (intermediateClause.kind == SyntaxKind.COLLECT_CLAUSE) {
collectClause = intermediateClause;
} else {
clauses.add(intermediateClause);
continue;
}
if (isNestedQueryExpr() || !isValidIntermediateQueryStart(peek())) {
break;
}
}
if (peek().kind == SyntaxKind.DO_KEYWORD) {
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
return parseQueryAction(queryConstructType, queryPipeline, selectClause);
}
if (selectClause == null && collectClause == null) {
STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);
STNode expr = STNodeFactory
.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);
if (clauses.isEmpty()) {
fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
} else {
int lastIndex = clauses.size() - 1;
STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),
DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
clauses.set(lastIndex, intClauseWithDiagnostic);
}
}
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
STNode onConflictClause = parseOnConflictClause(isRhsExpr);
return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline,
selectClause == null ? collectClause : selectClause, onConflictClause);
}
/**
* Check whether currently parsing query expr is a nested query expression.
*
* @return <code>true</code> if currently parsing query-expr is a nested query-expr. <code>false</code> otherwise.
*/
private boolean isNestedQueryExpr() {
return Collections.frequency(this.errorHandler.getContextStack(), ParserRuleContext.QUERY_EXPRESSION) > 1;
}
private boolean isValidIntermediateQueryStart(STToken token) {
SyntaxKind syntaxKind = token.kind;
switch (syntaxKind) {
case FROM_KEYWORD:
case WHERE_KEYWORD:
case LET_KEYWORD:
case SELECT_KEYWORD:
case JOIN_KEYWORD:
case OUTER_KEYWORD:
case ORDER_KEYWORD:
case BY_KEYWORD:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
case LIMIT_KEYWORD:
return true;
case IDENTIFIER_TOKEN:
return isGroupOrCollectKeyword(token);
default:
return false;
}
}
private static boolean isGroupOrCollectKeyword(STToken nextToken) {
if (nextToken.kind != SyntaxKind.IDENTIFIER_TOKEN || !(nextToken instanceof STIdentifierToken)) {
return false;
}
String tokenText = ((STIdentifierToken) nextToken).text;
return tokenText.equals(SyntaxKind.COLLECT_KEYWORD.stringValue())
|| tokenText.equals(SyntaxKind.GROUP_KEYWORD.stringValue());
}
/**
* Parse an intermediate clause.
* <p>
* <code>
* intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause
* </code>
*
* @return Parsed node
*/
private STNode parseIntermediateClause(boolean isRhsExpr, boolean allowActions) {
STToken nextToken = peek();
switch (nextToken.kind) {
case FROM_KEYWORD:
return parseFromClause(isRhsExpr, allowActions);
case WHERE_KEYWORD:
return parseWhereClause(isRhsExpr);
case LET_KEYWORD:
return parseLetClause(isRhsExpr, allowActions);
case SELECT_KEYWORD:
return parseSelectClause(isRhsExpr, allowActions);
case JOIN_KEYWORD:
case OUTER_KEYWORD:
return parseJoinClause(isRhsExpr);
case ORDER_KEYWORD:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return parseOrderByClause(isRhsExpr);
case LIMIT_KEYWORD:
return parseLimitClause(isRhsExpr);
case DO_KEYWORD:
case SEMICOLON_TOKEN:
case ON_KEYWORD:
case CONFLICT_KEYWORD:
return null;
default:
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN && nextToken instanceof STIdentifierToken) {
if (((STIdentifierToken) nextToken).text.equals(SyntaxKind.COLLECT_KEYWORD.stringValue())) {
return parseCollectClause(isRhsExpr);
}
if (((STIdentifierToken) nextToken).text.equals(SyntaxKind.GROUP_KEYWORD.stringValue())) {
return parseGroupByClause(isRhsExpr);
}
}
recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS);
return parseIntermediateClause(isRhsExpr, allowActions);
}
}
private STNode parseCollectClause(boolean isRhsExpr) {
startContext(ParserRuleContext.COLLECT_CLAUSE);
STToken nextToken = consume();
STNode collectKeyword = STNodeFactory.createToken(SyntaxKind.COLLECT_KEYWORD,
nextToken.leadingMinutiae(), nextToken.trailingMinutiae());
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
return STNodeFactory.createCollectClauseNode(collectKeyword, expression);
}
/**
* Parse join-keyword.
*
* @return Join-keyword node
*/
private STNode parseJoinKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.JOIN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.JOIN_KEYWORD);
return parseJoinKeyword();
}
}
/**
* Parse equals keyword.
*
* @return Parsed node
*/
private STNode parseEqualsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.EQUALS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.EQUALS_KEYWORD);
return parseEqualsKeyword();
}
}
private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RESOURCE_KEYWORD:
case LISTENER_KEYWORD:
case DOCUMENTATION_STRING:
case PRIVATE_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
case DO_KEYWORD:
case ON_KEYWORD:
case CONFLICT_KEYWORD:
return true;
default:
return isValidExprRhsStart(tokenKind, SyntaxKind.NONE);
}
}
/**
* Parse from clause.
* <p>
* <code>from-clause := from typed-binding-pattern in expression</code>
*
* @return Parsed node
*/
private STNode parseFromClause(boolean isRhsExpr, boolean allowActions) {
STNode fromKeyword = parseFromKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions);
return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);
}
/**
* Parse from-keyword.
*
* @return From-keyword node
*/
private STNode parseFromKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FROM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FROM_KEYWORD);
return parseFromKeyword();
}
}
/**
* Parse where clause.
* <p>
* <code>where-clause := where expression</code>
*
* @return Parsed node
*/
private STNode parseWhereClause(boolean isRhsExpr) {
STNode whereKeyword = parseWhereKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createWhereClauseNode(whereKeyword, expression);
}
/**
* Parse where-keyword.
*
* @return Where-keyword node
*/
private STNode parseWhereKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WHERE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WHERE_KEYWORD);
return parseWhereKeyword();
}
}
/**
* Parse limit-keyword.
*
* @return limit-keyword node
*/
private STNode parseLimitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LIMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LIMIT_KEYWORD);
return parseLimitKeyword();
}
}
/**
* Parse let clause.
* <p>
* <code>let-clause := let let-var-decl [, let-var-decl]* </code>
*
* @return Parsed node
*/
private STNode parseLetClause(boolean isRhsExpr, boolean allowActions) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr,
allowActions);
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);
}
/**
* Parse group by clause.
* <code>group-by-clause := group by grouping-key-list</code>
*
* @return Parsed node
*/
private STNode parseGroupByClause(boolean isRhsExpr) {
startContext(ParserRuleContext.GROUP_BY_CLAUSE);
STToken nextToken = consume();
STNode groupKeyword = STNodeFactory.createToken(SyntaxKind.GROUP_KEYWORD,
nextToken.leadingMinutiae(), nextToken.trailingMinutiae());
STNode byKeyword = parseByKeyword();
STNode groupingKeys = parseGroupingKeyList(isRhsExpr);
byKeyword = cloneWithDiagnosticIfListEmpty(groupingKeys, byKeyword,
DiagnosticErrorCode.ERROR_MISSING_GROUPING_KEY);
endContext();
return STNodeFactory.createGroupByClauseNode(groupKeyword, byKeyword, groupingKeys);
}
/**
* Parse order-keyword.
*
* @return Order-keyword node
*/
private STNode parseOrderKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ORDER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ORDER_KEYWORD);
return parseOrderKeyword();
}
}
/**
* Parse by-keyword.
*
* @return By-keyword node
*/
private STNode parseByKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BY_KEYWORD);
return parseByKeyword();
}
}
/**
* Parse order by clause.
* <p>
* <code>order-by-clause := order by order-key-list
* </code>
*
* @return Parsed node
*/
private STNode parseOrderByClause(boolean isRhsExpr) {
STNode orderKeyword = parseOrderKeyword();
STNode byKeyword = parseByKeyword();
STNode orderKeys = parseOrderKeyList(isRhsExpr);
byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);
return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);
}
/**
* Parse grouping key.
* <code>grouping-key-list := grouping-key ["," grouping-key]*</code>
*
* @return Parsed node
*/
private STNode parseGroupingKeyList(boolean isRhsExpr) {
List<STNode> groupingKeys = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfGroupByKeyListElement(nextToken)) {
return STNodeFactory.createEmptyNodeList();
}
STNode groupingKey = parseGroupingKey(isRhsExpr);
groupingKeys.add(groupingKey);
nextToken = peek();
STNode groupingKeyListMemberEnd;
while (!isEndOfGroupByKeyListElement(nextToken)) {
groupingKeyListMemberEnd = parseGroupingKeyListMemberEnd();
if (groupingKeyListMemberEnd == null) {
break;
}
groupingKeys.add(groupingKeyListMemberEnd);
groupingKey = parseGroupingKey(isRhsExpr);
groupingKeys.add(groupingKey);
nextToken = peek();
}
return STNodeFactory.createNodeList(groupingKeys);
}
/**
* Parse order key.
* <p>
* <code>order-key-list := order-key [, order-key]*</code>
*
* @return Parsed node
*/
private STNode parseOrderKeyList(boolean isRhsExpr) {
startContext(ParserRuleContext.ORDER_KEY_LIST);
List<STNode> orderKeys = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfOrderKeys(nextToken)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
STNode orderKeyListMemberEnd;
while (!isEndOfOrderKeys(nextToken)) {
orderKeyListMemberEnd = parseOrderKeyListMemberEnd();
if (orderKeyListMemberEnd == null) {
break;
}
orderKeys.add(orderKeyListMemberEnd);
orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(orderKeys);
}
private boolean isEndOfGroupByKeyListElement(STToken nextToken) {
SyntaxKind tokenKind = nextToken.kind;
switch (tokenKind) {
case COMMA_TOKEN:
return false;
case EOF_TOKEN:
return true;
default:
return isQueryClauseStartToken(nextToken);
}
}
private boolean isEndOfOrderKeys(STToken nextToken) {
SyntaxKind tokenKind = nextToken.kind;
switch (tokenKind) {
case COMMA_TOKEN:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return false;
case SEMICOLON_TOKEN:
case EOF_TOKEN:
return true;
default:
return isQueryClauseStartToken(nextToken);
}
}
private boolean isQueryClauseStartToken(STToken nextToken) {
SyntaxKind tokenKind = nextToken.kind;
switch (tokenKind) {
case SELECT_KEYWORD:
case LET_KEYWORD:
case WHERE_KEYWORD:
case OUTER_KEYWORD:
case JOIN_KEYWORD:
case ORDER_KEYWORD:
case DO_KEYWORD:
case FROM_KEYWORD:
case LIMIT_KEYWORD:
return true;
case IDENTIFIER_TOKEN:
return isGroupOrCollectKeyword(nextToken);
default:
return false;
}
}
private STNode parseGroupingKeyListMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return consume();
case EOF_TOKEN:
return null;
default:
if (isQueryClauseStartToken(nextToken)) {
return null;
}
recover(peek(), ParserRuleContext.GROUPING_KEY_LIST_ELEMENT_END);
return parseGroupingKeyListMemberEnd();
}
}
private STNode parseOrderKeyListMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case EOF_TOKEN:
return null;
default:
if (isQueryClauseStartToken(nextToken)) {
return null;
}
recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);
return parseOrderKeyListMemberEnd();
}
}
private STNode parseGroupingKeyVariableDeclaration(boolean isRhsExpr) {
STNode groupingKeyElementTypeDesc =
parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER_IN_GROUPING_KEY);
startContext(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
STNode groupingKeySimpleBP = createCaptureOrWildcardBP(parseVariableName());
endContext();
STNode equalsToken = parseAssignOp();
STNode groupingKeyExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createGroupingKeyVarDeclarationNode(groupingKeyElementTypeDesc, groupingKeySimpleBP,
equalsToken, groupingKeyExpression);
}
/**
* Parse grouping key.
* <code>grouping-key := variable-name | inferable-type-descriptor variable-name "=" expression</code>
*
* @return Parsed node
*/
private STNode parseGroupingKey(boolean isRhsExpr) {
STToken nextToken = peek();
SyntaxKind nextTokenKind = nextToken.kind;
if (nextTokenKind == SyntaxKind.IDENTIFIER_TOKEN && !isPossibleGroupingKeyVarDeclaration()) {
return STNodeFactory.createSimpleNameReferenceNode(parseVariableName());
} else if (isTypeStartingToken(nextTokenKind, nextToken)) {
return parseGroupingKeyVariableDeclaration(isRhsExpr);
}
recover(nextToken, ParserRuleContext.GROUPING_KEY_LIST_ELEMENT);
return parseGroupingKey(isRhsExpr);
}
private boolean isPossibleGroupingKeyVarDeclaration() {
SyntaxKind nextNextTokenKind = getNextNextToken().kind;
return nextNextTokenKind == SyntaxKind.EQUAL_TOKEN ||
nextNextTokenKind == SyntaxKind.IDENTIFIER_TOKEN && peek(3).kind == SyntaxKind.EQUAL_TOKEN;
}
/**
* Parse order key.
* <p>
* <code>order-key := expression (ascending | descending)?</code>
*
* @return Parsed node
*/
private STNode parseOrderKey(boolean isRhsExpr) {
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode orderDirection;
STToken nextToken = peek();
switch (nextToken.kind) {
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
orderDirection = consume();
break;
default:
orderDirection = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createOrderKeyNode(expression, orderDirection);
}
/**
* Parse select clause.
* <p>
* <code>select-clause := select expression</code>
*
* @return Parsed node
*/
private STNode parseSelectClause(boolean isRhsExpr, boolean allowActions) {
startContext(ParserRuleContext.SELECT_CLAUSE);
STNode selectKeyword = parseSelectKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions);
endContext();
return STNodeFactory.createSelectClauseNode(selectKeyword, expression);
}
/**
* Parse select-keyword.
*
* @return Select-keyword node
*/
private STNode parseSelectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SELECT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SELECT_KEYWORD);
return parseSelectKeyword();
}
}
/**
* Parse on-conflict clause.
* <p>
* <code>
* onConflictClause := on conflict expression
* </code>
*
* @return On conflict clause node
*/
private STNode parseOnConflictClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
startContext(ParserRuleContext.ON_CONFLICT_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode conflictKeyword = parseConflictKeyword();
endContext();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);
}
/**
* Parse conflict keyword.
*
* @return Conflict keyword node
*/
private STNode parseConflictKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONFLICT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONFLICT_KEYWORD);
return parseConflictKeyword();
}
}
/**
* Parse limit clause.
* <p>
* <code>limitClause := limit expression</code>
*
* @return Limit expression node
*/
private STNode parseLimitClause(boolean isRhsExpr) {
STNode limitKeyword = parseLimitKeyword();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createLimitClauseNode(limitKeyword, expr);
}
/**
* Parse join clause.
* <p>
* <code>
* join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause
* <br/>
* join-var-decl := join (typeName | var) bindingPattern
* <br/>
* outer-join-var-decl := outer join var binding-pattern
* </code>
*
* @return Join clause
*/
private STNode parseJoinClause(boolean isRhsExpr) {
startContext(ParserRuleContext.JOIN_CLAUSE);
STNode outerKeyword;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {
outerKeyword = consume();
} else {
outerKeyword = STNodeFactory.createEmptyNode();
}
STNode joinKeyword = parseJoinKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
STNode onCondition = parseOnClause(isRhsExpr);
return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,
onCondition);
}
/**
* Parse on clause.
* <p>
* <code>on clause := `on` expression `equals` expression</code>
*
* @return On clause node
*/
private STNode parseOnClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (isQueryClauseStartToken(nextToken)) {
return createMissingOnClauseNode();
}
startContext(ParserRuleContext.ON_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode equalsKeyword = parseEqualsKeyword();
endContext();
STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
private STNode createMissingOnClauseNode() {
STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD);
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);
STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD);
STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
/**
* Parse start action.
* <p>
* <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code>
*
* @return Start action node
*/
private STNode parseStartAction(STNode annots) {
STNode startKeyword = parseStartKeyword();
STNode expr = parseActionOrExpression();
switch (expr.kind) {
case FUNCTION_CALL:
case METHOD_CALL:
case REMOTE_METHOD_CALL_ACTION:
break;
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case FIELD_ACCESS:
case ASYNC_SEND_ACTION:
expr = generateValidExprForStartAction(expr);
break;
default:
startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,
DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);
STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
funcName = STNodeFactory.createSimpleNameReferenceNode(funcName);
STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);
STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);
expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken,
STNodeFactory.createEmptyNodeList(), closeParenToken);
break;
}
return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);
}
private STNode generateValidExprForStartAction(STNode expr) {
STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);
STNode arguments = STNodeFactory.createEmptyNodeList();
STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);
switch (expr.kind) {
case FIELD_ACCESS:
STFieldAccessExpressionNode fieldAccessExpr = (STFieldAccessExpressionNode) expr;
return STNodeFactory.createMethodCallExpressionNode(fieldAccessExpr.expression,
fieldAccessExpr.dotToken, fieldAccessExpr.fieldName, openParenToken, arguments,
closeParenToken);
case ASYNC_SEND_ACTION:
STAsyncSendActionNode asyncSendAction = (STAsyncSendActionNode) expr;
return STNodeFactory.createRemoteMethodCallActionNode(asyncSendAction.expression,
asyncSendAction.rightArrowToken, asyncSendAction.peerWorker, openParenToken, arguments,
closeParenToken);
default:
return STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);
}
}
/**
* Parse start keyword.
*
* @return Start keyword node
*/
private STNode parseStartKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.START_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.START_KEYWORD);
return parseStartKeyword();
}
}
/**
* Parse flush action.
* <p>
* <code>flush-action := flush [peer-worker]</code>
*
* @return flush action node
*/
private STNode parseFlushAction() {
STNode flushKeyword = parseFlushKeyword();
STNode peerWorker = parseOptionalPeerWorkerName();
return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);
}
/**
* Parse flush keyword.
*
* @return flush keyword node
*/
private STNode parseFlushKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FLUSH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FLUSH_KEYWORD);
return parseFlushKeyword();
}
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parseOptionalPeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
return STNodeFactory.createEmptyNode();
}
}
/**
* Parse intersection type descriptor.
* <p>
* intersection-type-descriptor := type-descriptor & type-descriptor
* </p>
*
* @return Parsed node
*/
private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode bitwiseAndToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,
TypePrecedence.INTERSECTION);
return mergeTypesWithIntersection(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Creates an intersection type descriptor after validating lhs and rhs types.
* <p>
* <i>Note: Since type precedence and associativity are not taken into account here,
* this method should not be called directly when types are unknown.
* <br/>
* Call {@link
*
* @param leftTypeDesc lhs type
* @param bitwiseAndToken bitwise-and token
* @param rightTypeDesc rhs type
* @return an IntersectionTypeDescriptorNode
*/
private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Parse singleton type descriptor.
* <p>
* singleton-type-descriptor := simple-const-expr
* simple-const-expr :=
* nil-literal
* | boolean-literal
* | [Sign] int-literal
* | [Sign] floating-point-literal
* | string-literal
* | constant-reference-expr
* </p>
*/
private STNode parseSingletonTypeDesc() {
STNode simpleContExpr = parseSimpleConstExpr();
return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);
}
private STNode parseSignedIntOrFloat() {
STNode operator = parseUnaryOperator();
STNode literal;
STToken nextToken = peek();
switch (nextToken.kind) {
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
literal = parseBasicLiteral();
break;
default:
literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN);
literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal);
}
return STNodeFactory.createUnaryExpressionNode(operator, literal);
}
private static boolean isSingletonTypeDescStart(SyntaxKind tokenKind, STToken nextNextToken) {
switch (tokenKind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isIntOrFloat(nextNextToken);
default:
return false;
}
}
static boolean isIntOrFloat(STToken token) {
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
return true;
default:
return false;
}
}
/**
* Check whether the parser reached to a valid expression start.
*
* @param nextTokenKind Kind of the next immediate token.
* @param nextTokenIndex Index to the next token.
* @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise
*/
private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {
nextTokenIndex++;
switch (nextTokenKind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;
if (nextNextTokenKind == SyntaxKind.PIPE_TOKEN || nextNextTokenKind == SyntaxKind.BITWISE_AND_TOKEN) {
nextTokenIndex++;
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);
}
return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN ||
nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||
nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||
isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case IDENTIFIER_TOKEN:
return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case OPEN_PAREN_TOKEN:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case OPEN_BRACE_TOKEN:
case TYPEOF_KEYWORD:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
case TRAP_KEYWORD:
case OPEN_BRACKET_TOKEN:
case LT_TOKEN:
case FROM_KEYWORD:
case LET_KEYWORD:
case BACKTICK_TOKEN:
case NEW_KEYWORD:
case LEFT_ARROW_TOKEN:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
case BASE16_KEYWORD:
case BASE64_KEYWORD:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);
case TABLE_KEYWORD:
case MAP_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;
case STREAM_KEYWORD:
STToken nextNextToken = peek(nextTokenIndex);
return nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||
nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||
nextNextToken.kind == SyntaxKind.FROM_KEYWORD;
case ERROR_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;
case XML_KEYWORD:
case STRING_KEYWORD:
case RE_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;
case START_KEYWORD:
case FLUSH_KEYWORD:
case WAIT_KEYWORD:
default:
return false;
}
}
/**
* Parse sync send action.
* <p>
* <code>sync-send-action := expression ->> peer-worker</code>
*
* @param expression LHS expression of the sync send action
* @return Sync send action node
*/
private STNode parseSyncSendAction(STNode expression) {
STNode syncSendToken = parseSyncSendToken();
STNode peerWorker = parsePeerWorkerName();
return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parsePeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
recover(token, ParserRuleContext.PEER_WORKER_NAME);
return parsePeerWorkerName();
}
}
/**
* Parse sync send token.
* <p>
* <code>sync-send-token := ->> </code>
*
* @return sync send token
*/
private STNode parseSyncSendToken() {
STToken token = peek();
if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.SYNC_SEND_TOKEN);
return parseSyncSendToken();
}
}
/**
* Parse receive action.
* <p>
* <code>receive-action := single-receive-action | multiple-receive-action</code>
*
* @return Receive action
*/
private STNode parseReceiveAction() {
STNode leftArrow = parseLeftArrowToken();
STNode receiveWorkers = parseReceiveWorkers();
return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);
}
private STNode parseReceiveWorkers() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
case IDENTIFIER_TOKEN:
return parsePeerWorkerName();
case OPEN_BRACE_TOKEN:
return parseMultipleReceiveWorkers();
default:
recover(peek(), ParserRuleContext.RECEIVE_WORKERS);
return parseReceiveWorkers();
}
}
/**
* Parse multiple worker receivers.
* <p>
* <code>{ receive-field (, receive-field)* }</code>
*
* @return Multiple worker receiver node
*/
private STNode parseMultipleReceiveWorkers() {
startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);
STNode openBrace = parseOpenBrace();
STNode receiveFields = parseReceiveFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);
return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);
}
private STNode parseReceiveFields() {
List<STNode> receiveFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfReceiveFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
STNode recieveFieldEnd;
while (!isEndOfReceiveFields(nextToken.kind)) {
recieveFieldEnd = parseReceiveFieldEnd();
if (recieveFieldEnd == null) {
break;
}
receiveFields.add(recieveFieldEnd);
receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
}
return STNodeFactory.createNodeList(receiveFields);
}
private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseReceiveFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);
return parseReceiveFieldEnd();
}
}
/**
* Parse receive field.
* <p>
* <code>receive-field := peer-worker | field-name : peer-worker</code>
*
* @return Receiver field node
*/
private STNode parseReceiveField() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
return STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);
return createQualifiedReceiveField(identifier);
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD);
return parseReceiveField();
}
}
private STNode createQualifiedReceiveField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode peerWorker = parsePeerWorkerName();
return createQualifiedNameReferenceNode(identifier, colon, peerWorker);
}
/**
* Parse left arrow (<-) token.
*
* @return left arrow token
*/
private STNode parseLeftArrowToken() {
STToken token = peek();
if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);
return parseLeftArrowToken();
}
}
/**
* Parse signed right shift token (>>).
* This method should only be called by seeing a `DOUBLE_GT_TOKEN` or
* by seeing a `GT_TOKEN` followed by a `GT_TOKEN`
*
* @return Parsed node
*/
private STNode parseSignedRightShiftToken() {
STNode firstToken = consume();
if (firstToken.kind == SyntaxKind.DOUBLE_GT_TOKEN) {
return firstToken;
}
STToken endLGToken = consume();
STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, firstToken.leadingMinutiae(),
endLGToken.trailingMinutiae());
if (hasTrailingMinutiae(firstToken)) {
doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);
}
return doubleGTToken;
}
/**
* Parse unsigned right shift token (>>>).
* This method should only be called by seeing a `TRIPPLE_GT_TOKEN` or
* by seeing a `GT_TOKEN` followed by two `GT_TOKEN`s
*
* @return Parsed node
*/
private STNode parseUnsignedRightShiftToken() {
STNode firstToken = consume();
if (firstToken.kind == SyntaxKind.TRIPPLE_GT_TOKEN) {
return firstToken;
}
STNode middleGTToken = consume();
STNode endLGToken = consume();
STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,
firstToken.leadingMinutiae(), endLGToken.trailingMinutiae());
boolean validOpenGTToken = !hasTrailingMinutiae(firstToken);
boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken);
if (validOpenGTToken && validMiddleGTToken) {
return unsignedRightShiftToken;
}
unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);
return unsignedRightShiftToken;
}
/**
* Parse wait action.
* <p>
* <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code>
*
* @return Wait action node
*/
private STNode parseWaitAction() {
STNode waitKeyword = parseWaitKeyword();
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
return parseMultiWaitAction(waitKeyword);
}
return parseSingleOrAlternateWaitAction(waitKeyword);
}
/**
* Parse wait keyword.
*
* @return wait keyword
*/
private STNode parseWaitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WAIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WAIT_KEYWORD);
return parseWaitKeyword();
}
}
/**
* Parse single or alternate wait actions.
* <p>
* <code>
* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+
* <br/>
* wait-future-expr := expression but not mapping-constructor-expr
* </code>
*
* @param waitKeyword wait keyword
* @return Single or alternate wait action node
*/
private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);
STToken nextToken = peek();
if (isEndOfWaitFutureExprList(nextToken.kind)) {
endContext();
STNode waitFutureExprs = STNodeFactory
.createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);
}
List<STNode> waitFutureExprList = new ArrayList<>();
STNode waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
STNode waitFutureExprEnd;
while (!isEndOfWaitFutureExprList(nextToken.kind)) {
waitFutureExprEnd = parseWaitFutureExprEnd();
if (waitFutureExprEnd == null) {
break;
}
waitFutureExprList.add(waitFutureExprEnd);
waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
}
endContext();
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));
}
private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case OPEN_BRACE_TOKEN:
return true;
case PIPE_TOKEN:
default:
return false;
}
}
private STNode parseWaitFutureExpr() {
STNode waitFutureExpr = parseActionOrExpression();
if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {
waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,
DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);
} else if (isAction(waitFutureExpr)) {
waitFutureExpr =
SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);
}
return waitFutureExpr;
}
private STNode parseWaitFutureExprEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
return parsePipeToken();
default:
if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) {
return null;
}
recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END);
return parseWaitFutureExprEnd();
}
}
/**
* Parse multiple wait action.
* <p>
* <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code>
*
* @param waitKeyword Wait keyword
* @return Multiple wait action node
*/
private STNode parseMultiWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.MULTI_WAIT_FIELDS);
STNode openBrace = parseOpenBrace();
STNode waitFields = parseWaitFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);
STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);
}
private STNode parseWaitFields() {
List<STNode> waitFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfWaitFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
STNode waitFieldEnd;
while (!isEndOfWaitFields(nextToken.kind)) {
waitFieldEnd = parseWaitFieldEnd();
if (waitFieldEnd == null) {
break;
}
waitFields.add(waitFieldEnd);
waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
}
return STNodeFactory.createNodeList(waitFields);
}
private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseWaitFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_END);
return parseWaitFieldEnd();
}
}
/**
* Parse wait field.
* <p>
* <code>wait-field := variable-name | field-name : wait-future-expr</code>
*
* @return Receiver field node
*/
private STNode parseWaitField() {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);
identifier = STNodeFactory.createSimpleNameReferenceNode(identifier);
return createQualifiedWaitField(identifier);
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);
return parseWaitField();
}
}
private STNode createQualifiedWaitField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode waitFutureExpr = parseWaitFutureExpr();
return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);
}
/**
* Parse annot access expression.
* <p>
* <code>
* annot-access-expr := expression .@ annot-tag-reference
* <br/>
* annot-tag-reference := qualified-identifier | identifier
* </code>
*
* @param lhsExpr Preceding expression of the annot access access
* @return Parsed node
*/
private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode annotAccessToken = parseAnnotChainingToken();
STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);
}
/**
* Parse annot-chaining-token.
*
* @return Parsed node
*/
private STNode parseAnnotChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);
return parseAnnotChainingToken();
}
}
/**
* Parse field access identifier.
* <p>
* <code>field-access-identifier := qualified-identifier | identifier</code>
*
* @return Parsed node
*/
private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {
STToken nextToken = peek();
if (!isPredeclaredIdentifier(nextToken.kind)) {
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);
return parseQualifiedIdentifier(identifier, isInConditionalExpr);
}
return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);
}
/**
* Parse query action.
* <p>
* <code>query-action := query-pipeline do-clause
* <br/>
* do-clause := do block-stmt
* </code>
*
* @param queryConstructType Query construct type. This is only for validation
* @param queryPipeline Query pipeline
* @param selectClause Select clause if any This is only for validation.
* @return Query action node
*/
private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause) {
if (queryConstructType != null) {
queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType,
DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION);
}
if (selectClause != null) {
queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,
DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);
}
startContext(ParserRuleContext.DO_CLAUSE);
STNode doKeyword = parseDoKeyword();
STNode blockStmt = parseBlockNode();
endContext();
return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt);
}
/**
* Parse 'do' keyword.
*
* @return do keyword node
*/
private STNode parseDoKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.DO_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.DO_KEYWORD);
return parseDoKeyword();
}
}
/**
* Parse optional field access or xml optional attribute access expression.
* <p>
* <code>
* optional-field-access-expr := expression ?. field-name
* <br/>
* xml-optional-attribute-access-expr := expression ?. xml-attribute-name
* <br/>
* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier
* <br/>
* xml-qualified-name := xml-namespace-prefix : identifier
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @param lhsExpr Preceding expression of the optional access
* @return Parsed node
*/
private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode optionalFieldAccessToken = parseOptionalChainingToken();
STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);
}
/**
* Parse optional chaining token.
*
* @return parsed node
*/
private STNode parseOptionalChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);
return parseOptionalChainingToken();
}
}
/**
* Parse conditional expression.
* <p>
* <code>conditional-expr := expression ? expression : expression</code>
*
* @param lhsExpr Preceding expression of the question mark
* @param isInConditionalExpr whether calling from a conditional-expr
* @return Parsed node
*/
private STNode parseConditionalExpression(STNode lhsExpr, boolean isInConditionalExpr) {
startContext(ParserRuleContext.CONDITIONAL_EXPRESSION);
STNode questionMark = parseQuestionMark();
STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);
if (peek().kind != SyntaxKind.COLON_TOKEN) {
if (middleExpr.kind == SyntaxKind.CONDITIONAL_EXPRESSION) {
STConditionalExpressionNode innerConditionalExpr = (STConditionalExpressionNode) middleExpr;
STNode innerMiddleExpr = innerConditionalExpr.middleExpression;
STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, false);
if (rightMostQNameRef != null) {
middleExpr = generateConditionalExprForRightMost(innerConditionalExpr.lhsExpression,
innerConditionalExpr.questionMarkToken, innerMiddleExpr, rightMostQNameRef);
endContext();
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,
innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);
}
STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, true);
if (leftMostQNameRef != null) {
middleExpr = generateConditionalExprForLeftMost(innerConditionalExpr.lhsExpression,
innerConditionalExpr.questionMarkToken, innerMiddleExpr, leftMostQNameRef);
endContext();
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,
innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);
}
}
STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, false);
if (rightMostQNameRef != null) {
endContext();
return generateConditionalExprForRightMost(lhsExpr, questionMark, middleExpr, rightMostQNameRef);
}
STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, true);
if (leftMostQNameRef != null) {
endContext();
return generateConditionalExprForLeftMost(lhsExpr, questionMark, middleExpr, leftMostQNameRef);
}
}
return parseConditionalExprRhs(lhsExpr, questionMark, middleExpr, isInConditionalExpr);
}
private STNode generateConditionalExprForRightMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,
STNode rightMostQualifiedNameRef) {
STQualifiedNameReferenceNode qualifiedNameRef =
(STQualifiedNameReferenceNode) rightMostQualifiedNameRef;
STNode endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);
STNode simpleNameRef =
ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);
middleExpr = middleExpr.replace(rightMostQualifiedNameRef, simpleNameRef);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,
endExpr);
}
private STNode generateConditionalExprForLeftMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,
STNode leftMostQualifiedNameRef) {
STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) leftMostQualifiedNameRef;
STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);
STNode endExpr = middleExpr.replace(leftMostQualifiedNameRef, simpleNameRef);
middleExpr = ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,
endExpr);
}
private STNode parseConditionalExprRhs(STNode lhsExpr, STNode questionMark, STNode middleExpr,
boolean isInConditionalExpr) {
STNode colon = parseColon();
endContext();
STNode endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false,
isInConditionalExpr);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);
}
/**
* Parse enum declaration.
* <p>
* module-enum-decl :=
* metadata
* [public] enum identifier { enum-member (, enum-member)* } [;]
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @param metadata
* @param qualifier
* @return Parsed enum node.
*/
private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.MODULE_ENUM_DECLARATION);
STNode enumKeywordToken = parseEnumKeyword();
STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);
STNode openBraceToken = parseOpenBrace();
STNode enumMemberList = parseEnumMemberList();
STNode closeBraceToken = parseCloseBrace();
STNode semicolon = parseOptionalSemicolon();
endContext();
openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,
DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);
return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,
openBraceToken, enumMemberList, closeBraceToken, semicolon);
}
/**
* Parse 'enum' keyword.
*
* @return enum keyword node
*/
private STNode parseEnumKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ENUM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ENUM_KEYWORD);
return parseEnumKeyword();
}
}
/**
* Parse enum member list.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return enum member list node.
*/
private STNode parseEnumMemberList() {
startContext(ParserRuleContext.ENUM_MEMBER_LIST);
if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> enumMemberList = new ArrayList<>();
STNode enumMember = parseEnumMember();
STNode enumMemberRhs;
while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) {
enumMemberRhs = parseEnumMemberEnd();
if (enumMemberRhs == null) {
break;
}
enumMemberList.add(enumMember);
enumMemberList.add(enumMemberRhs);
enumMember = parseEnumMember();
}
enumMemberList.add(enumMember);
endContext();
return STNodeFactory.createNodeList(enumMemberList);
}
/**
* Parse enum member.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return Parsed enum member node.
*/
private STNode parseEnumMember() {
STNode metadata;
switch (peek().kind) {
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
metadata = STNodeFactory.createEmptyNode();
}
STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);
return parseEnumMemberRhs(metadata, identifierNode);
}
private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {
STNode equalToken, constExprNode;
switch (peek().kind) {
case EQUAL_TOKEN:
equalToken = parseAssignOp();
constExprNode = parseExpression();
break;
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
equalToken = STNodeFactory.createEmptyNode();
constExprNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS);
return parseEnumMemberRhs(metadata, identifierNode);
}
return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);
}
private STNode parseEnumMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_END);
return parseEnumMemberEnd();
}
}
private STNode parseTransactionStmtOrVarDecl(STNode annots, List<STNode> qualifiers, STToken transactionKeyword) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
reportInvalidStatementAnnots(annots, qualifiers);
reportInvalidQualifierList(qualifiers);
return parseTransactionStatement(transactionKeyword);
case COLON_TOKEN:
if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
default:
Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF);
if (solution.action == Action.KEEP ||
(solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword);
}
}
/**
* Parse transaction statement.
* <p>
* <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code>
*
* @return Transaction statement node
*/
private STNode parseTransactionStatement(STNode transactionKeyword) {
startContext(ParserRuleContext.TRANSACTION_STMT);
STNode blockStmt = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause);
}
/**
* Parse commit action.
* <p>
* <code>commit-action := "commit"</code>
*
* @return Commit action node
*/
private STNode parseCommitAction() {
STNode commitKeyword = parseCommitKeyword();
return STNodeFactory.createCommitActionNode(commitKeyword);
}
/**
* Parse commit keyword.
*
* @return parsed node
*/
private STNode parseCommitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.COMMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.COMMIT_KEYWORD);
return parseCommitKeyword();
}
}
/**
* Parse retry statement.
* <p>
* <code>
* retry-stmt := `retry` retry-spec block-stmt [on-fail-clause]
* <br/>
* retry-spec := [type-parameter] [ `(` arg-list `)` ]
* </code>
*
* @return Retry statement node
*/
private STNode parseRetryStatement() {
startContext(ParserRuleContext.RETRY_STMT);
STNode retryKeyword = parseRetryKeyword();
STNode retryStmt = parseRetryKeywordRhs(retryKeyword);
return retryStmt;
}
private STNode parseRetryKeywordRhs(STNode retryKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case LT_TOKEN:
STNode typeParam = parseTypeParameter();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
case OPEN_PAREN_TOKEN:
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
typeParam = STNodeFactory.createEmptyNode();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
default:
recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS);
return parseRetryKeywordRhs(retryKeyword);
}
}
private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {
STNode args;
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
args = parseParenthesizedArgList();
break;
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
args = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS);
return parseRetryTypeParamRhs(retryKeyword, typeParam);
}
STNode blockStmt = parseRetryBody();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause);
}
private STNode parseRetryBody() {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
return parseBlockNode();
case TRANSACTION_KEYWORD:
return parseTransactionStatement(consume());
default:
recover(peek(), ParserRuleContext.RETRY_BODY);
return parseRetryBody();
}
}
/**
* Parse optional on fail clause.
*
* @return Parsed node
*/
private STNode parseOptionalOnFailClause() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ON_KEYWORD) {
return parseOnFailClause();
}
if (isEndOfRegularCompoundStmt(nextToken.kind)) {
return STNodeFactory.createEmptyNode();
}
recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS);
return parseOptionalOnFailClause();
}
private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) {
switch (nodeKind) {
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case AT_TOKEN:
case EOF_TOKEN:
return true;
default:
return isStatementStartingToken(nodeKind);
}
}
private boolean isStatementStartingToken(SyntaxKind nodeKind) {
switch (nodeKind) {
case FINAL_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case PANIC_KEYWORD:
case CONTINUE_KEYWORD:
case BREAK_KEYWORD:
case RETURN_KEYWORD:
case LOCK_KEYWORD:
case OPEN_BRACE_TOKEN:
case FORK_KEYWORD:
case FOREACH_KEYWORD:
case XMLNS_KEYWORD:
case TRANSACTION_KEYWORD:
case RETRY_KEYWORD:
case ROLLBACK_KEYWORD:
case MATCH_KEYWORD:
case FAIL_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case TRAP_KEYWORD:
case START_KEYWORD:
case FLUSH_KEYWORD:
case LEFT_ARROW_TOKEN:
case WAIT_KEYWORD:
case COMMIT_KEYWORD:
case WORKER_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
return true;
default:
if (isTypeStartingToken(nodeKind)) {
return true;
}
if (isValidExpressionStart(nodeKind, 1)) {
return true;
}
return false;
}
}
/**
* Parse on fail clause.
* <p>
* <code>
* on-fail-clause := on fail [typed-binding-pattern] statement-block
* </code>
*
* @return On fail clause node
*/
private STNode parseOnFailClause() {
startContext(ParserRuleContext.ON_FAIL_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode failKeyword = parseFailKeyword();
STToken token = peek();
STNode typeDescriptor = STNodeFactory.createEmptyNode();
STNode identifier = STNodeFactory.createEmptyNode();
if (token.kind != SyntaxKind.OPEN_BRACE_TOKEN) {
typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false,
TypePrecedence.DEFAULT);
identifier = parseIdentifier(ParserRuleContext.VARIABLE_NAME);
}
STNode blockStatement = parseBlockNode();
endContext();
return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier,
blockStatement);
}
/**
* Parse retry keyword.
*
* @return parsed node
*/
private STNode parseRetryKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RETRY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.RETRY_KEYWORD);
return parseRetryKeyword();
}
}
/**
* Parse transaction statement.
* <p>
* <code>rollback-stmt := "rollback" [expression] ";"</code>
*
* @return Rollback statement node
*/
private STNode parseRollbackStatement() {
startContext(ParserRuleContext.ROLLBACK_STMT);
STNode rollbackKeyword = parseRollbackKeyword();
STNode expression;
if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {
expression = STNodeFactory.createEmptyNode();
} else {
expression = parseExpression();
}
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);
}
/**
* Parse rollback keyword.
*
* @return Rollback keyword node
*/
private STNode parseRollbackKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ROLLBACK_KEYWORD);
return parseRollbackKeyword();
}
}
/**
* Parse transactional expression.
* <p>
* <code>transactional-expr := "transactional"</code>
*
* @return Transactional expression node
*/
private STNode parseTransactionalExpression() {
STNode transactionalKeyword = parseTransactionalKeyword();
return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);
}
/**
* Parse transactional keyword.
*
* @return Transactional keyword node
*/
private STNode parseTransactionalKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD);
return parseTransactionalKeyword();
}
}
/**
* Parse base16 literal.
* <p>
* <code>
* byte-array-literal := Base16Literal | Base64Literal
* <br/>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* </code>
*
* @return parsed node
*/
private STNode parseByteArrayLiteral() {
STNode type;
if (peek().kind == SyntaxKind.BASE16_KEYWORD) {
type = parseBase16Keyword();
} else {
type = parseBase64Keyword();
}
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
if (startingBackTick.isMissing()) {
startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode content = STNodeFactory.createEmptyNode();
STNode byteArrayLiteral =
STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);
byteArrayLiteral =
SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);
return byteArrayLiteral;
}
STNode content = parseByteArrayContent();
return parseByteArrayLiteral(type, startingBackTick, content);
}
/**
* Parse byte array literal.
*
* @param typeKeyword keyword token, possible values are `base16` and `base64`
* @param startingBackTick starting backtick token
* @param byteArrayContent byte array literal content to be validated
* @return parsed byte array literal node
*/
private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) {
STNode content = STNodeFactory.createEmptyNode();
STNode newStartingBackTick = startingBackTick;
STNodeList items = (STNodeList) byteArrayContent;
if (items.size() == 1) {
STNode item = items.get(0);
if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else {
content = item;
}
} else if (items.size() > 1) {
STNode clonedStartingBackTick = startingBackTick;
for (int index = 0; index < items.size(); index++) {
STNode item = items.get(index);
clonedStartingBackTick =
SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);
}
newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
}
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);
}
/**
* Parse <code>base16</code> keyword.
*
* @return base16 keyword node
*/
private STNode parseBase16Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE16_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE16_KEYWORD);
return parseBase16Keyword();
}
}
/**
* Parse <code>base64</code> keyword.
*
* @return base64 keyword node
*/
private STNode parseBase64Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE64_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE64_KEYWORD);
return parseBase64Keyword();
}
}
/**
* Validate and parse byte array literal content.
* An error is reported, if the content is invalid.
*
* @return parsed node
*/
private STNode parseByteArrayContent() {
STToken nextToken = peek();
List<STNode> items = new ArrayList<>();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode content = parseTemplateItem();
items.add(content);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
/**
* Validate base16 literal content.
* <p>
* <code>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* HexGroup := WS HexDigit WS HexDigit
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase16LiteralContent(String content) {
char[] charArray = content.toCharArray();
int hexDigitCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
default:
if (isHexDigit(c)) {
hexDigitCount++;
} else {
return false;
}
break;
}
}
return hexDigitCount % 2 == 0;
}
/**
* Validate base64 literal content.
* <p>
* <code>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* <br/>
* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char
* <br/>
* PaddedBase64Group :=
* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar
* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar
* <br/>
* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /
* <br/>
* PaddingChar := =
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase64LiteralContent(String content) {
char[] charArray = content.toCharArray();
int base64CharCount = 0;
int paddingCharCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
case LexerTerminals.EQUAL:
paddingCharCount++;
break;
default:
if (isBase64Char(c)) {
if (paddingCharCount == 0) {
base64CharCount++;
} else {
return false;
}
} else {
return false;
}
break;
}
}
if (paddingCharCount > 2) {
return false;
} else if (paddingCharCount == 0) {
return base64CharCount % 4 == 0;
} else {
return base64CharCount % 4 == 4 - paddingCharCount;
}
}
/**
* <p>
* Check whether a given char is a base64 char.
* </p>
* <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code>
*
* @param c character to check
* @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise.
*/
static boolean isBase64Char(int c) {
if ('a' <= c && c <= 'z') {
return true;
}
if ('A' <= c && c <= 'Z') {
return true;
}
if (c == '+' || c == '/') {
return true;
}
return isDigit(c);
}
static boolean isHexDigit(int c) {
if ('a' <= c && c <= 'f') {
return true;
}
if ('A' <= c && c <= 'F') {
return true;
}
return isDigit(c);
}
static boolean isDigit(int c) {
return ('0' <= c && c <= '9');
}
/**
* Parse xml filter expression.
* <p>
* <code>xml-filter-expr := expression .< xml-name-pattern ></code>
*
* @param lhsExpr Preceding expression of .< token
* @return Parsed node
*/
private STNode parseXMLFilterExpression(STNode lhsExpr) {
STNode xmlNamePatternChain = parseXMLFilterExpressionRhs();
return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>filer-expression-rhs := .< xml-name-pattern ></code>
*
* @return Parsed node
*/
private STNode parseXMLFilterExpressionRhs() {
STNode dotLTToken = parseDotLTToken();
return parseXMLNamePatternChain(dotLTToken);
}
/**
* Parse xml name pattern chain.
* <p>
* <code>
* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step
* <br/>
* filer-expression-rhs := .< xml-name-pattern >
* <br/>
* xml-element-children-step := /< xml-name-pattern >
* <br/>
* xml-element-descendants-step := /**\/<xml-name-pattern >
* </code>
*
* @param startToken Preceding token of xml name pattern
* @return Parsed node
*/
private STNode parseXMLNamePatternChain(STNode startToken) {
startContext(ParserRuleContext.XML_NAME_PATTERN);
STNode xmlNamePattern = parseXMLNamePattern();
STNode gtToken = parseGTToken();
endContext();
startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,
DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);
return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);
}
/**
* Parse <code> .< </code> token.
*
* @return Parsed node
*/
private STNode parseDotLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);
return parseDotLTToken();
}
}
/**
* Parse xml name pattern.
* <p>
* <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code>
*
* @return Parsed node
*/
private STNode parseXMLNamePattern() {
List<STNode> xmlAtomicNamePatternList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfXMLNamePattern(nextToken.kind)) {
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
STNode separator;
while (!isEndOfXMLNamePattern(peek().kind)) {
separator = parseXMLNamePatternSeparator();
if (separator == null) {
break;
}
xmlAtomicNamePatternList.add(separator);
xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
}
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {
switch (tokenKind) {
case GT_TOKEN:
case EOF_TOKEN:
return true;
case IDENTIFIER_TOKEN:
case ASTERISK_TOKEN:
case COLON_TOKEN:
default:
return false;
}
}
private STNode parseXMLNamePatternSeparator() {
STToken token = peek();
switch (token.kind) {
case PIPE_TOKEN:
return consume();
case GT_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);
return parseXMLNamePatternSeparator();
}
}
/**
* Parse xml atomic name pattern.
* <p>
* <code>
* xml-atomic-name-pattern :=
* *
* | identifier
* | xml-namespace-prefix : identifier
* | xml-namespace-prefix : *
* </code>
*
* @return Parsed node
*/
private STNode parseXMLAtomicNamePattern() {
startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);
STNode atomicNamePattern = parseXMLAtomicNamePatternBody();
endContext();
return atomicNamePattern;
}
private STNode parseXMLAtomicNamePatternBody() {
STToken token = peek();
STNode identifier;
switch (token.kind) {
case ASTERISK_TOKEN:
return consume();
case IDENTIFIER_TOKEN:
identifier = consume();
break;
default:
recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);
return parseXMLAtomicNamePatternBody();
}
return parseXMLAtomicNameIdentifier(identifier);
}
private STNode parseXMLAtomicNameIdentifier(STNode identifier) {
STToken token = peek();
if (token.kind == SyntaxKind.COLON_TOKEN) {
STNode colon = consume();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {
STToken endToken = consume();
return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);
}
}
return STNodeFactory.createSimpleNameReferenceNode(identifier);
}
/**
* Parse xml step expression.
* <p>
* <code>xml-step-expr := expression xml-step-start</code>
*
* @param lhsExpr Preceding expression of /*, /<, or /**\/< token
* @return Parsed node
*/
private STNode parseXMLStepExpression(STNode lhsExpr) {
STNode xmlStepStart = parseXMLStepStart();
return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>
* xml-step-start :=
* xml-all-children-step
* | xml-element-children-step
* | xml-element-descendants-step
* <br/>
* xml-all-children-step := /*
* </code>
*
* @return Parsed node
*/
private STNode parseXMLStepStart() {
STToken token = peek();
STNode startToken;
switch (token.kind) {
case SLASH_ASTERISK_TOKEN:
return consume();
case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:
startToken = parseDoubleSlashDoubleAsteriskLTToken();
break;
case SLASH_LT_TOKEN:
default:
startToken = parseSlashLTToken();
break;
}
return parseXMLNamePatternChain(startToken);
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseSlashLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);
return parseSlashLTToken();
}
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseDoubleSlashDoubleAsteriskLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);
return parseDoubleSlashDoubleAsteriskLTToken();
}
}
/**
* Parse match statement.
* <p>
* <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code>
*
* @return Match statement
*/
private STNode parseMatchStatement() {
startContext(ParserRuleContext.MATCH_STMT);
STNode matchKeyword = parseMatchKeyword();
STNode actionOrExpr = parseActionOrExpression();
startContext(ParserRuleContext.MATCH_BODY);
STNode openBrace = parseOpenBrace();
List<STNode> matchClausesList = new ArrayList<>();
while (!isEndOfMatchClauses(peek().kind)) {
STNode clause = parseMatchClause();
matchClausesList.add(clause);
}
STNode matchClauses = STNodeFactory.createNodeList(matchClausesList);
if (isNodeListEmpty(matchClauses)) {
openBrace = SyntaxErrors.addDiagnostic(openBrace,
DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES);
}
STNode closeBrace = parseCloseBrace();
endContext();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace,
onFailClause);
}
/**
* Parse match keyword.
*
* @return Match keyword node
*/
private STNode parseMatchKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.MATCH_KEYWORD);
return parseMatchKeyword();
}
}
private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case TYPE_KEYWORD:
return true;
default:
return isEndOfStatements();
}
}
/**
* Parse a single match match clause.
* <p>
* <code>
* match-clause := match-pattern-list [match-guard] => block-stmt
* <br/>
* match-guard := if expression
* </code>
*
* @return A match clause
*/
private STNode parseMatchClause() {
STNode matchPatterns = parseMatchPatternList();
STNode matchGuard = parseMatchGuard();
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode blockStmt = parseBlockNode();
if (isNodeListEmpty(matchPatterns)) {
STToken identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode constantPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);
matchPatterns = STNodeFactory.createNodeList(constantPattern);
DiagnosticErrorCode errorCode = DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN;
if (matchGuard != null) {
matchGuard = SyntaxErrors.addDiagnostic(matchGuard, errorCode);
} else {
rightDoubleArrow = SyntaxErrors.addDiagnostic(rightDoubleArrow, errorCode);
}
}
return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);
}
/**
* Parse match guard.
* <p>
* <code>match-guard := if expression</code>
*
* @return Match guard
*/
private STNode parseMatchGuard() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IF_KEYWORD:
STNode ifKeyword = parseIfKeyword();
STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false);
return STNodeFactory.createMatchGuardNode(ifKeyword, expr);
case RIGHT_DOUBLE_ARROW_TOKEN:
return STNodeFactory.createEmptyNode();
default:
recover(nextToken, ParserRuleContext.OPTIONAL_MATCH_GUARD);
return parseMatchGuard();
}
}
/**
* Parse match patterns list.
* <p>
* <code>match-pattern-list := match-pattern (| match-pattern)*</code>
*
* @return Match patterns list
*/
private STNode parseMatchPatternList() {
startContext(ParserRuleContext.MATCH_PATTERN);
List<STNode> matchClauses = new ArrayList<>();
while (!isEndOfMatchPattern(peek().kind)) {
STNode clause = parseMatchPattern();
if (clause == null) {
break;
}
matchClauses.add(clause);
STNode seperator = parseMatchPatternListMemberRhs();
if (seperator == null) {
break;
}
matchClauses.add(seperator);
}
endContext();
return STNodeFactory.createNodeList(matchClauses);
}
private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case PIPE_TOKEN:
case IF_KEYWORD:
case RIGHT_DOUBLE_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse match pattern.
* <p>
* <code>
* match-pattern := var binding-pattern
* | wildcard-match-pattern
* | const-pattern
* | list-match-pattern
* | mapping-match-pattern
* | error-match-pattern
* </code>
*
* @return Match pattern
*/
private STNode parseMatchPattern() {
STToken nextToken = peek();
if (isPredeclaredIdentifier(nextToken.kind)) {
STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
}
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
return parseSimpleConstExpr();
case VAR_KEYWORD:
return parseVarTypedBindingPattern();
case OPEN_BRACKET_TOKEN:
return parseListMatchPattern();
case OPEN_BRACE_TOKEN:
return parseMappingMatchPattern();
case ERROR_KEYWORD:
return parseErrorMatchPattern();
default:
recover(nextToken, ParserRuleContext.MATCH_PATTERN_START);
return parseMatchPattern();
}
}
private STNode parseMatchPatternListMemberRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
return parsePipeToken();
case IF_KEYWORD:
case RIGHT_DOUBLE_ARROW_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS);
return parseMatchPatternListMemberRhs();
}
}
/**
* Parse var typed binding pattern.
* <p>
* <code>var binding-pattern</code>
* </p>
*
* @return Parsed typed binding pattern node
*/
private STNode parseVarTypedBindingPattern() {
STNode varKeyword = parseVarKeyword();
STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword);
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern);
}
/**
* Parse var keyword.
*
* @return Var keyword node
*/
private STNode parseVarKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.VAR_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.VAR_KEYWORD);
return parseVarKeyword();
}
}
/**
* Parse list match pattern.
* <p>
* <code>
* list-match-pattern := [ list-member-match-patterns ]
* list-member-match-patterns :=
* match-pattern (, match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* </code>
* </p>
*
* @return Parsed list match pattern node
*/
private STNode parseListMatchPattern() {
startContext(ParserRuleContext.LIST_MATCH_PATTERN);
STNode openBracketToken = parseOpenBracket();
List<STNode> matchPatternList = new ArrayList<>();
STNode listMatchPatternMemberRhs = null;
boolean isEndOfFields = false;
while (!isEndOfListMatchPattern()) {
STNode listMatchPatternMember = parseListMatchPatternMember();
matchPatternList.add(listMatchPatternMember);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
isEndOfFields = true;
break;
}
if (listMatchPatternMemberRhs != null) {
matchPatternList.add(listMatchPatternMemberRhs);
} else {
break;
}
}
while (isEndOfFields && listMatchPatternMemberRhs != null) {
updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null);
if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {
break;
}
STNode invalidField = parseListMatchPatternMember();
updateLastNodeInListWithInvalidNode(matchPatternList, invalidField,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
}
STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);
STNode closeBracketToken = parseCloseBracket();
endContext();
return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken);
}
public boolean isEndOfListMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListMatchPatternMember() {
STNode nextToken = peek();
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
default:
return parseMatchPattern();
}
}
/**
* Parse rest match pattern.
* <p>
* <code>
* rest-match-pattern := ... var variable-name
* </code>
* </p>
*
* @return Parsed rest match pattern node
*/
private STNode parseRestMatchPattern() {
startContext(ParserRuleContext.REST_MATCH_PATTERN);
STNode ellipsisToken = parseEllipsis();
STNode varKeywordToken = parseVarKeyword();
STNode variableName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);
return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);
}
private STNode parseListMatchPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);
return parseListMatchPatternMemberRhs();
}
}
/**
* Parse mapping match pattern.
* <p>
* mapping-match-pattern := { field-match-patterns }
* <br/>
* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* <br/>
* field-match-pattern := field-name : match-pattern
* <br/>
* rest-match-pattern := ... var variable-name
* </p>
*
* @return Parsed Node.
*/
private STNode parseMappingMatchPattern() {
startContext(ParserRuleContext.MAPPING_MATCH_PATTERN);
STNode openBraceToken = parseOpenBrace();
STNode fieldMatchPatterns = parseFieldMatchPatternList();
STNode closeBraceToken = parseCloseBrace();
endContext();
return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken);
}
private STNode parseFieldMatchPatternList() {
List<STNode> fieldMatchPatterns = new ArrayList<>();
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
return STNodeFactory.createEmptyNodeList();
}
fieldMatchPatterns.add(fieldMatchPatternMember);
if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
invalidateExtraFieldMatchPatterns(fieldMatchPatterns);
return STNodeFactory.createNodeList(fieldMatchPatterns);
}
return parseFieldMatchPatternList(fieldMatchPatterns);
}
private STNode parseFieldMatchPatternList(List<STNode> fieldMatchPatterns) {
while (!isEndOfMappingMatchPattern()) {
STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();
if (fieldMatchPatternRhs == null) {
break;
}
fieldMatchPatterns.add(fieldMatchPatternRhs);
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
fieldMatchPatternMember = createMissingFieldMatchPattern();
}
fieldMatchPatterns.add(fieldMatchPatternMember);
if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
invalidateExtraFieldMatchPatterns(fieldMatchPatterns);
break;
}
}
return STNodeFactory.createNodeList(fieldMatchPatterns);
}
private STNode createMissingFieldMatchPattern() {
STNode fieldName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode colon = SyntaxErrors.createMissingToken(SyntaxKind.COLON_TOKEN);
STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode matchPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);
STNode fieldMatchPatternMember = STNodeFactory.createFieldMatchPatternNode(fieldName, colon, matchPattern);
fieldMatchPatternMember = SyntaxErrors.addDiagnostic(fieldMatchPatternMember,
DiagnosticErrorCode.ERROR_MISSING_FIELD_MATCH_PATTERN_MEMBER);
return fieldMatchPatternMember;
}
/**
* Parse and invalidate all field match pattern members after a rest-match-pattern.
*
* @param fieldMatchPatterns field-match-patterns list
*/
private void invalidateExtraFieldMatchPatterns(List<STNode> fieldMatchPatterns) {
while (!isEndOfMappingMatchPattern()) {
STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();
if (fieldMatchPatternRhs == null) {
break;
}
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) fieldMatchPatternRhs).text());
} else {
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs, null);
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternMember,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
}
}
}
private STNode parseFieldMatchPatternMember() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return parseFieldMatchPattern();
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.FIELD_MATCH_PATTERNS_START);
return parseFieldMatchPatternMember();
}
}
/**
* Parse filed match pattern.
* <p>
* field-match-pattern := field-name : match-pattern
* </p>
*
* @return Parsed field match pattern node
*/
public STNode parseFieldMatchPattern() {
STNode fieldNameNode = parseVariableName();
STNode colonToken = parseColon();
STNode matchPattern = parseMatchPattern();
return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);
}
public boolean isEndOfMappingMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseFieldMatchPatternRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);
return parseFieldMatchPatternRhs();
}
}
private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr);
default:
if (isMatchPatternEnd(peek().kind)) {
return typeRefOrConstExpr;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
}
}
private boolean isMatchPatternEnd(SyntaxKind tokenKind) {
switch (tokenKind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_PAREN_TOKEN:
case PIPE_TOKEN:
case IF_KEYWORD:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse functional match pattern.
* <p>
* error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern )
* error-arg-list-match-pattern :=
* error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns]
* | [error-field-match-patterns]
* error-message-match-pattern := simple-match-pattern
* error-cause-match-pattern := simple-match-pattern | error-match-pattern
* simple-match-pattern :=
* wildcard-match-pattern
* | const-pattern
* | var variable-name
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* </p>
*
* @return Parsed functional match pattern node.
*/
private STNode parseErrorMatchPattern() {
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
STNode errorKeyword = consume();
return parseErrorMatchPattern(errorKeyword);
}
private STNode parseErrorMatchPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorMatchPattern(errorKeyword);
}
return parseErrorMatchPattern(errorKeyword, typeRef);
}
private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesisToken = parseOpenParenthesis();
STNode argListMatchPatternNode = parseErrorArgListMatchPatterns();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken,
argListMatchPatternNode, closeParenthesisToken);
}
private STNode parseErrorArgListMatchPatterns() {
List<STNode> argListMatchPatterns = new ArrayList<>();
if (isEndOfErrorFieldMatchPatterns()) {
return STNodeFactory.createNodeList(argListMatchPatterns);
}
startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG);
STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START);
endContext();
if (isSimpleMatchPattern(firstArg.kind)) {
argListMatchPatterns.add(firstArg);
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END);
if (argEnd != null) {
STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS);
if (isValidSecondArgMatchPattern(secondArg.kind)) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(secondArg);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
}
}
} else {
if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&
firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) {
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
} else {
argListMatchPatterns.add(firstArg);
}
}
parseErrorFieldMatchPatterns(argListMatchPatterns);
return STNodeFactory.createNodeList(argListMatchPatterns);
}
private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) {
switch (matchPatternKind) {
case IDENTIFIER_TOKEN:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case NULL_LITERAL:
case NIL_LITERAL:
case BOOLEAN_LITERAL:
case TYPED_BINDING_PATTERN:
case UNARY_EXPRESSION:
return true;
default:
return false;
}
}
private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case ERROR_MATCH_PATTERN:
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
return true;
default:
if (isSimpleMatchPattern(syntaxKind)) {
return true;
}
return false;
}
}
/**
* Parse error field match patterns.
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* @param argListMatchPatterns
*/
private void parseErrorFieldMatchPatterns(List<STNode> argListMatchPatterns) {
SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN;
while (!isEndOfErrorFieldMatchPatterns()) {
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN);
DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListMatchPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);
}
}
}
private boolean isEndOfErrorFieldMatchPatterns() {
return isEndOfErrorFieldBindingPatterns();
}
private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgListMatchPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListMatchPattern(ParserRuleContext context) {
STToken nextToken = peek();
if (isPredeclaredIdentifier(nextToken.kind)) {
return parseNamedArgOrSimpleMatchPattern();
}
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseMatchPattern();
case VAR_KEYWORD:
STNode varType = createBuiltinSimpleNameReference(consume());
STNode variableName = createCaptureOrWildcardBP(parseVariableName());
return STNodeFactory.createTypedBindingPatternNode(varType, variableName);
case CLOSE_PAREN_TOKEN:
return SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN);
default:
recover(nextToken, context);
return parseErrorArgListMatchPattern(context);
}
}
private STNode parseNamedArgOrSimpleMatchPattern() {
STNode constRefExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);
if (constRefExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || peek().kind != SyntaxKind.EQUAL_TOKEN) {
return constRefExpr;
}
return parseNamedArgMatchPattern(((STSimpleNameReferenceNode) constRefExpr).name);
}
/**
* Parses the next named arg match pattern.
* <br/>
* <code>named-arg-match-pattern := arg-name = match-pattern</code>
* <br/>
* <br/>
*
* @return arg match pattern list node added the new arg match pattern
*/
private STNode parseNamedArgMatchPattern(STNode identifier) {
startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);
STNode equalToken = parseAssignOp();
STNode matchPattern = parseMatchPattern();
endContext();
return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);
}
private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
default:
return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED;
}
}
/**
* Parse markdown documentation.
*
* @return markdown documentation node
*/
private STNode parseMarkdownDocumentation() {
List<STNode> markdownDocLineList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {
STToken documentationString = consume();
STNode parsedDocLines = parseDocumentationString(documentationString);
appendParsedDocumentationLines(markdownDocLineList, parsedDocLines);
nextToken = peek();
}
STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);
return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);
}
/**
* Parse documentation string.
*
* @return markdown documentation line list node
*/
private STNode parseDocumentationString(STToken documentationStringToken) {
List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());
Collection<STNodeDiagnostic> diagnostics = new ArrayList<>((documentationStringToken.diagnostics()));
CharReader charReader = CharReader.from(documentationStringToken.text());
DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics);
AbstractTokenReader tokenReader = new TokenReader(documentationLexer);
DocumentationParser documentationParser = new DocumentationParser(tokenReader);
return documentationParser.parse();
}
private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) {
List<STNode> leadingTriviaList = new ArrayList<>();
int bucketCount = leadingMinutiaeNode.bucketCount();
for (int i = 0; i < bucketCount; i++) {
leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));
}
return leadingTriviaList;
}
private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) {
int bucketCount = parsedDocLines.bucketCount();
for (int i = 0; i < bucketCount; i++) {
STNode markdownDocLine = parsedDocLines.childInBucket(i);
markdownDocLineList.add(markdownDocLine);
}
}
/**
* Parse any statement that starts with a token that has ambiguity between being
* a type-desc or an expression.
*
* @param annots Annotations
* @param qualifiers
* @return Statement node
*/
private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true);
return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);
}
private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
List<STNode> varDeclQualifiers = new ArrayList<>();
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false);
}
STNode expr = getExpression(typedBindingPatternOrExpr);
expr = getExpression(parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true));
return parseStatementStartWithExprRhs(expr);
}
private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment);
}
private STNode parseTypedBindingPatternOrExpr(List<STNode> qualifiers, boolean allowAssignment) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
if (isPredeclaredIdentifier(nextToken.kind)) {
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseTypedBPOrExprStartsWithOpenParenthesis();
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT);
}
}
/**
* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr
* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or
* the expression-rhs.
*
* @param typeOrExpr Type desc or the expression
* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a
* valid lvalue expression
* @return Typed-binding-pattern node or an expression node
*/
private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipeOrAndToken = parseBinaryOperator();
STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);
if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
STNode newTypeDesc = mergeTypes(typeOrExpr, pipeOrAndToken, typedBP.typeDescriptor);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);
}
if (peek().kind == SyntaxKind.EQUAL_TOKEN) {
return createCaptureBPWithMissingVarName(typeOrExpr, pipeOrAndToken, rhsTypedBPOrExpr);
}
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,
pipeOrAndToken, rhsTypedBPOrExpr);
case SEMICOLON_TOKEN:
if (isExpression(typeOrExpr.kind)) {
return typeOrExpr;
}
if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case EQUAL_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,
ParserRuleContext.AMBIGUOUS_STMT);
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
default:
if (isCompoundAssignment(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return typeOrExpr;
}
STToken token = peek();
SyntaxKind typeOrExprKind = typeOrExpr.kind;
if (typeOrExprKind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
typeOrExprKind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
recover(token, ParserRuleContext.BINDING_PATTERN_OR_VAR_REF_RHS);
} else {
recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS);
}
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
}
private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) {
lhsType = getTypeDescFromExpr(lhsType);
rhsType = getTypeDescFromExpr(rhsType);
STNode newTypeDesc = mergeTypes(lhsType, separatorToken, rhsType);
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
ParserRuleContext.VARIABLE_NAME);
STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP);
}
private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {
typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);
}
private STNode parseTypedBPOrExprStartsWithOpenParenthesis() {
STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();
if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {
return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);
}
return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);
}
private boolean isDefiniteTypeDesc(SyntaxKind kind) {
return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.FUTURE_TYPE_DESC) <= 0;
}
private boolean isDefiniteExpr(SyntaxKind kind) {
if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return false;
}
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0;
}
private boolean isDefiniteAction(SyntaxKind kind) {
return kind.compareTo(SyntaxKind.REMOTE_METHOD_CALL_ACTION) >= 0 &&
kind.compareTo(SyntaxKind.CLIENT_RESOURCE_ACCESS_ACTION) <= 0;
}
/**
* Parse type or expression that starts with open parenthesis. Possible options are:
* 1) () - nil type-desc or nil-literal
* 2) (T) - Parenthesized type-desc
* 3) (expr) - Parenthesized expression
* 4) (param, param, ..) - Anon function params
*
* @return Type-desc or expression node
*/
private STNode parseTypedDescOrExprStartsWithOpenParenthesis() {
STNode openParen = parseOpenParenthesis();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
STNode closeParen = parseCloseParenthesis();
return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);
}
STNode typeOrExpr = parseTypeDescOrExpr();
if (isAction(typeOrExpr)) {
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,
closeParen);
}
if (isExpression(typeOrExpr.kind)) {
startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);
return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false);
}
STNode typeDescNode = getTypeDescFromExpr(typeOrExpr);
typeDescNode = parseComplexTypeDescriptor(typeDescNode, ParserRuleContext.TYPE_DESC_IN_PARENTHESIS, false);
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen);
}
/**
* Parse type-desc or expression. This method does not handle binding patterns.
*
* @return Type-desc node or expression node
*/
private STNode parseTypeDescOrExpr() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypeDescOrExpr(typeDescQualifiers);
}
private STNode parseTypeDescOrExpr(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();
break;
case FUNCTION_KEYWORD:
typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
break;
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypeDescOrExprRhs(typeOrExpr);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());
break;
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypeDescOrExprRhs(basicLiteral);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
}
if (isDefiniteTypeDesc(typeOrExpr.kind)) {
return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseTypeDescOrExprRhs(typeOrExpr);
}
private boolean isExpression(SyntaxKind kind) {
switch (kind) {
case NUMERIC_LITERAL:
case STRING_LITERAL_TOKEN:
case NIL_LITERAL:
case NULL_LITERAL:
case BOOLEAN_LITERAL:
return true;
default:
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&
kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0;
}
}
/**
* Parse statement that starts with an empty parenthesis. Empty parenthesis can be
* 1) Nil literal
* 2) Nil type-desc
* 3) Anon-function params
*
* @param openParen Open parenthesis
* @param closeParen Close parenthesis
* @return Parsed node
*/
private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {
STToken nextToken = peek();
switch (nextToken.kind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
STNode params = STNodeFactory.createEmptyNodeList();
STNode anonFuncParam =
STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(anonFuncParam, false);
default:
return STNodeFactory.createNilLiteralNode(openParen, closeParen);
}
}
private STNode parseAnonFuncExprOrTypedBPWithFuncType(List<STNode> qualifiers) {
STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {
return exprOrTypeDesc;
}
return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);
}
/**
* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.
*
* @param qualifiers Preceding qualifiers
* @return Anon-func-expr or function-type-desc
*/
private STNode parseAnonFuncExprOrFuncTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);
STNode qualifierList;
STNode functionKeyword = parseFunctionKeyword();
STNode funcSignature;
if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) {
funcSignature = parseFuncSignature(true);
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, true);
qualifierList = nodes[0];
functionKeyword = nodes[1];
endContext();
return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature);
}
funcSignature = STNodeFactory.createEmptyNode();
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, false);
qualifierList = nodes[0];
functionKeyword = nodes[1];
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) {
ParserRuleContext currentCtx = getCurrentContext();
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.EXPRESSION_STATEMENT);
}
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);
STNode funcBody = parseAnonFuncBody(false);
STNode annots = STNodeFactory.createEmptyNodeList();
STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList,
functionKeyword, funcSignature, funcBody);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);
case IDENTIFIER_TOKEN:
default:
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,
true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
}
private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {
STToken nextToken = peek();
STNode typeDesc;
switch (nextToken.kind) {
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipeOrAndToken = parseBinaryOperator();
STNode rhsTypeDescOrExpr = parseTypeDescOrExpr();
if (isExpression(rhsTypeDescOrExpr.kind)) {
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,
pipeOrAndToken, rhsTypeDescOrExpr);
}
typeDesc = getTypeDescFromExpr(typeOrExpr);
rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);
return mergeTypes(typeDesc, pipeOrAndToken, rhsTypeDescOrExpr);
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
typeDesc = parseComplexTypeDescriptor(getTypeDescFromExpr(typeOrExpr),
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);
return typeDesc;
case SEMICOLON_TOKEN:
return getTypeDescFromExpr(typeOrExpr);
case EQUAL_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
case COMMA_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,
ParserRuleContext.AMBIGUOUS_STMT);
case ELLIPSIS_TOKEN:
STNode ellipsis = parseEllipsis();
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);
default:
if (isCompoundAssignment(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false);
}
recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS);
return parseTypeDescOrExprRhs(typeOrExpr);
}
}
private boolean isAmbiguous(STNode node) {
switch (node.kind) {
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
case BRACKETED_LIST:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case INDEXED_EXPRESSION:
STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;
if (!isAmbiguous(indexExpr.containerExpression)) {
return false;
}
STNode keys = indexExpr.keyExpression;
for (int i = 0; i < keys.bucketCount(); i++) {
STNode item = keys.childInBucket(i);
if (item.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAmbiguous(item)) {
return false;
}
}
return true;
default:
return false;
}
}
private boolean isAllBasicLiterals(STNode node) {
switch (node.kind) {
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case BRACKETED_LIST:
STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;
for (STNode member : list.members) {
if (member.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAllBasicLiterals(member)) {
return false;
}
}
return true;
case UNARY_EXPRESSION:
STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;
if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&
unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {
return false;
}
return isNumericLiteral(unaryExpr.expression);
default:
return false;
}
}
private boolean isNumericLiteral(STNode node) {
switch (node.kind) {
case NUMERIC_LITERAL:
return true;
default:
return false;
}
}
/**
* Parse binding-patterns.
* <p>
* <code>
* binding-pattern := capture-binding-pattern
* | wildcard-binding-pattern
* | list-binding-pattern
* | mapping-binding-pattern
* | functional-binding-pattern
* <br/><br/>
* <p>
* capture-binding-pattern := variable-name
* variable-name := identifier
* <br/><br/>
* <p>
* wildcard-binding-pattern := _
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* <p>
* mapping-binding-pattern := { field-binding-patterns }
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* <br/>
* rest-binding-pattern := ... variable-name
* <p>
* <br/><br/>
* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )
* <br/>
* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]
* | other-arg-binding-patterns
* <br/>
* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*
* <br/>
* positional-arg-binding-pattern := binding-pattern
* <br/>
* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]
* | [rest-binding-pattern]
* <br/>
* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*
* <br/>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return binding-pattern node
*/
private STNode parseBindingPattern() {
switch (peek().kind) {
case OPEN_BRACKET_TOKEN:
return parseListBindingPattern();
case IDENTIFIER_TOKEN:
return parseBindingPatternStartsWithIdentifier();
case OPEN_BRACE_TOKEN:
return parseMappingBindingPattern();
case ERROR_KEYWORD:
return parseErrorBindingPattern();
default:
recover(peek(), ParserRuleContext.BINDING_PATTERN);
return parseBindingPattern();
}
}
private STNode parseBindingPatternStartsWithIdentifier() {
STNode argNameOrBindingPattern =
parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
STToken secondToken = peek();
if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern);
}
if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {
STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern,
DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP);
return STNodeFactory.createCaptureBindingPatternNode(identifier);
}
return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);
}
private STNode createCaptureOrWildcardBP(STNode varName) {
STNode bindingPattern;
if (isWildcardBP(varName)) {
bindingPattern = getWildcardBindingPattern(varName);
} else {
bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);
}
return bindingPattern;
}
/**
* Parse list-binding-patterns.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return list-binding-pattern node
*/
private STNode parseListBindingPattern() {
startContext(ParserRuleContext.LIST_BINDING_PATTERN);
STNode openBracket = parseOpenBracket();
List<STNode> bindingPatternsList = new ArrayList<>();
STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);
endContext();
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) {
if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) {
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
STNode listBindingPatternMember = parseListBindingPatternMember();
bindingPatternsList.add(listBindingPatternMember);
STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) {
STNode member = firstMember;
STToken token = peek();
STNode listBindingPatternRhs = null;
while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {
listBindingPatternRhs = parseListBindingPatternMemberRhs();
if (listBindingPatternRhs == null) {
break;
}
bindingPatterns.add(listBindingPatternRhs);
member = parseListBindingPatternMember();
bindingPatterns.add(member);
token = peek();
}
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
private STNode parseListBindingPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);
return parseListBindingPatternMemberRhs();
}
}
private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse list-binding-pattern member.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return List binding pattern member
*/
private STNode parseListBindingPatternMember() {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case OPEN_BRACKET_TOKEN:
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);
return parseListBindingPatternMember();
}
}
/**
* Parse rest binding pattern.
* <p>
* <code>
* rest-binding-pattern := ... variable-name
* </code>
*
* @return Rest binding pattern node
*/
private STNode parseRestBindingPattern() {
startContext(ParserRuleContext.REST_BINDING_PATTERN);
STNode ellipsis = parseEllipsis();
STNode varName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);
return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);
}
/**
* Parse Typed-binding-pattern.
* <p>
* <code>
* typed-binding-pattern := inferable-type-descriptor binding-pattern
* <br/><br/>
* inferable-type-descriptor := type-descriptor | var
* </code>
*
* @return Typed binding pattern node
*/
private STNode parseTypedBindingPattern(ParserRuleContext context) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPattern(typeDescQualifiers, context);
}
private STNode parseTypedBindingPattern(List<STNode> qualifiers, ParserRuleContext context) {
STNode typeDesc = parseTypeDescriptor(qualifiers,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false, TypePrecedence.DEFAULT);
STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);
return typeBindingPattern;
}
/**
* Parse mapping-binding-patterns.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPattern() {
startContext(ParserRuleContext.MAPPING_BINDING_PATTERN);
STNode openBrace = parseOpenBrace();
STToken token = peek();
if (isEndOfMappingBindingPattern(token.kind)) {
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
List<STNode> bindingPatterns = new ArrayList<>();
STNode prevMember = parseMappingBindingPatternMember();
if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);
}
private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) {
STToken token = peek();
STNode mappingBindingPatternRhs = null;
while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
mappingBindingPatternRhs = parseMappingBindingPatternEnd();
if (mappingBindingPatternRhs == null) {
break;
}
bindingPatterns.add(mappingBindingPatternRhs);
prevMember = parseMappingBindingPatternMember();
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
break;
}
bindingPatterns.add(prevMember);
token = peek();
}
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
/**
* Parse mapping-binding-pattern entry.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern
* | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPatternMember() {
STToken token = peek();
switch (token.kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
default:
return parseFieldBindingPattern();
}
}
private STNode parseMappingBindingPatternEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.MAPPING_BINDING_PATTERN_END);
return parseMappingBindingPatternEnd();
}
}
/**
* Parse field-binding-pattern.
* <code>field-binding-pattern := field-name : binding-pattern | varname</code>
*
* @return field-binding-pattern node
*/
private STNode parseFieldBindingPattern() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);
return parseFieldBindingPattern(simpleNameReference);
default:
recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
return parseFieldBindingPattern();
}
}
private STNode parseFieldBindingPattern(STNode simpleNameReference) {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);
case COLON_TOKEN:
STNode colon = parseColon();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);
default:
recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_END);
return parseFieldBindingPattern(simpleNameReference);
}
}
private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || isEndOfModuleLevelNode(1);
}
private STNode parseErrorTypeDescOrErrorBP(STNode annots) {
STToken nextNextToken = peek(2);
switch (nextNextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseAsErrorBindingPattern();
case LT_TOKEN:
return parseAsErrorTypeDesc(annots);
case IDENTIFIER_TOKEN:
SyntaxKind nextNextNextTokenKind = peek(3).kind;
if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN ||
nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {
return parseAsErrorBindingPattern();
}
default:
return parseAsErrorTypeDesc(annots);
}
}
private STNode parseAsErrorBindingPattern() {
startContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(parseErrorBindingPattern());
}
private STNode parseAsErrorTypeDesc(STNode annots) {
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(getAnnotations(annots), finalKeyword);
}
/**
* Parse error binding pattern node.
* <p>
* <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code>
* <br/><br/>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* named-arg-binding-pattern := arg-name = binding-pattern
*
* @return Error binding pattern node.
*/
private STNode parseErrorBindingPattern() {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = parseErrorKeyword();
return parseErrorBindingPattern(errorKeyword);
}
private STNode parseErrorBindingPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorBindingPattern(errorKeyword);
}
return parseErrorBindingPattern(errorKeyword, typeRef);
}
private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesis = parseOpenParenthesis();
STNode argListBindingPatterns = parseErrorArgListBindingPatterns();
STNode closeParenthesis = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis,
argListBindingPatterns, closeParenthesis);
}
/**
* Parse error arg list binding pattern.
* <p>
* <code>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* <p>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* <p>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* <p>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* <p>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* <p>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return Error arg list binding patterns.
*/
private STNode parseErrorArgListBindingPatterns() {
List<STNode> argListBindingPatterns = new ArrayList<>();
if (isEndOfErrorFieldBindingPatterns()) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
private STNode parseErrorArgListBindingPatterns(List<STNode> argListBindingPatterns) {
STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true);
if (firstArg == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
switch (firstArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
case ERROR_BINDING_PATTERN:
STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier);
missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP,
DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN);
STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN);
argListBindingPatterns.add(missingErrorMsgBP);
argListBindingPatterns.add(missingComma);
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
default:
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsg(List<STNode> argListBindingPatterns) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END);
if (argEnd == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false);
assert secondArg != null;
switch (secondArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(secondArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind);
default:
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg,
DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List<STNode> argListBindingPatterns,
SyntaxKind lastValidArgKind) {
while (!isEndOfErrorFieldBindingPatterns()) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false);
assert currentArg != null;
DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListBindingPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);
}
}
return STNodeFactory.createNodeList(argListBindingPatterns);
}
private boolean isEndOfErrorFieldBindingPatterns() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgsBindingPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case IDENTIFIER_TOKEN:
STNode argNameOrSimpleBindingPattern = consume();
return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern);
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
case CLOSE_PAREN_TOKEN:
if (isFirstArg) {
return null;
}
default:
recover(peek(), context);
return parseErrorArgListBindingPattern(context, isFirstArg);
}
}
private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) {
STToken secondToken = peek();
switch (secondToken.kind) {
case EQUAL_TOKEN:
STNode equal = consume();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern,
equal, bindingPattern);
case COMMA_TOKEN:
case CLOSE_PAREN_TOKEN:
default:
return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern);
}
}
private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind,
SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_BINDING_PATTERN:
case REST_BINDING_PATTERN:
if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
default:
return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED;
}
}
/*
* This parses Typed binding patterns and deals with ambiguity between types,
* and binding patterns. An example is 'T[a]'.
* The ambiguity lies in between:
* 1) Array Type
* 2) List binding pattern
* 3) Member access expression.
*/
/**
* Parse the component after the type-desc, of a typed-binding-pattern.
*
* @param typeDesc Starting type-desc of the typed-binding-pattern
* @return Typed-binding pattern
*/
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {
return parseTypedBindingPatternTypeRhs(typeDesc, context, true);
}
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case OPEN_BRACKET_TOKEN:
STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);
assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;
return typedBindingPattern;
case CLOSE_PAREN_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
if (!isRoot) {
return typeDesc;
}
default:
recover(nextToken, ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS);
return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot);
}
}
/**
* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.
*
* @param typeDescOrExpr Type desc or the expression at the start
* @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous
* @return Parsed node
*/
private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
if (isBracketedListEnd(peek().kind)) {
return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);
}
STNode member = parseBracketedListMember(isTypedBindingPattern);
SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern);
switch (currentNodeType) {
case ARRAY_TYPE_DESC:
STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);
return typedBindingPattern;
case LIST_BINDING_PATTERN:
STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case INDEXED_EXPRESSION:
return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);
case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS:
break;
case NONE:
default:
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd != null) {
List<STNode> memberList = new ArrayList<>();
memberList.add(getBindingPattern(member, true));
memberList.add(memberEnd);
bindingPattern = parseAsListBindingPattern(openBracket, memberList);
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
}
STNode closeBracket = parseCloseBracket();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {
member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);
STNode closeBracket = parseCloseBracket();
endContext();
STNode keyExpr = STNodeFactory.createNodeList(member);
STNode memberAccessExpr =
STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);
}
private boolean isBracketedListEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse a member of an ambiguous bracketed list. This member could be:
* 1) Array length
* 2) Key expression of a member-access-expr
* 3) A member-binding pattern of a list-binding-pattern.
*
* @param isTypedBindingPattern Is this in a definite typed-binding pattern
* @return Parsed member node
*/
private STNode parseBracketedListMember(boolean isTypedBindingPattern) {
STToken nextToken = peek();
switch (nextToken.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
case STRING_LITERAL_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
case ELLIPSIS_TOKEN:
case OPEN_BRACKET_TOKEN:
return parseStatementStartBracketedListMember();
case IDENTIFIER_TOKEN:
if (isTypedBindingPattern) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
break;
default:
if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) ||
isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) {
break;
}
ParserRuleContext recoverContext =
isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH
: ParserRuleContext.BRACKETED_LIST_MEMBER;
recover(peek(), recoverContext);
return parseBracketedListMember(isTypedBindingPattern);
}
STNode expr = parseExpression();
if (isWildcardBP(expr)) {
return getWildcardBindingPattern(expr);
}
return expr;
}
/**
* Treat the current node as an array, and parse the remainder of the binding pattern.
*
* @param typeDesc Type-desc
* @param openBracket Open bracket
* @param member Member
* @return Parsed node
*/
private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {
typeDesc = getTypeDescFromExpr(typeDesc);
switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode closeBracket = parseCloseBracket();
endContext();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,
context);
}
private STNode parseBracketedListMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);
return parseBracketedListMemberEnd();
}
}
/**
* We reach here to break ambiguity of T[a]. This could be:
* 1) Array Type Desc
* 2) Member access on LHS
* 3) Typed-binding-pattern
*
* @param typeDescOrExpr Type name or the expr that precede the open-bracket.
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Open bracket
* @param isTypedBindingPattern Is this is a typed-binding-pattern.
* @return Specific node that matches to T[a], after solving ambiguity.
*/
private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
case OPEN_BRACKET_TOKEN:
if (isTypedBindingPattern) {
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
}
STNode keyExpr = getKeyExpr(member);
STNode expr =
STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);
case QUESTION_MARK_TOKEN:
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
typeDesc = parseComplexTypeDescriptor(arrayTypeDesc,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
return parseTypedBindingPatternTypeRhs(typeDesc, context);
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern);
case IN_KEYWORD:
if (context != ParserRuleContext.FOREACH_STMT &&
context != ParserRuleContext.FROM_CLAUSE &&
context != ParserRuleContext.JOIN_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case EQUAL_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
}
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
case SEMICOLON_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case CLOSE_BRACE_TOKEN:
case COMMA_TOKEN:
if (context == ParserRuleContext.AMBIGUOUS_STMT) {
keyExpr = getKeyExpr(member);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
default:
if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) {
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
break;
}
ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS;
if (isTypedBindingPattern) {
recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS;
}
recover(peek(), recoveryCtx);
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode getKeyExpr(STNode member) {
if (member == null) {
STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);
STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier);
return STNodeFactory.createNodeList(missingVarRef);
}
return STNodeFactory.createNodeList(member);
}
private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket) {
STNode bindingPatterns = STNodeFactory.createEmptyNodeList();
if (!isEmpty(member)) {
SyntaxKind memberKind = member.kind;
if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) {
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME);
STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken);
return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName);
}
STNode bindingPattern = getBindingPattern(member, true);
bindingPatterns = STNodeFactory.createNodeList(bindingPattern);
}
STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
/**
* Parse a union or intersection type-desc/binary-expression that involves ambiguous
* bracketed list in lhs.
* <p>
* e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code>
* <p>
* Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this
* is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However,
* if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes
* the type-desc, and <code>[b]</code> becomes the binding pattern.
*
* @param typeDescOrExpr Type desc or the expression
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Close bracket
* @return Parsed node
*/
private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern) {
STNode pipeOrAndToken = parseUnionOrIntersectionToken();
STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;
STNode rhsTypeDesc = rhsTypedBindingPattern.typeDescriptor;
STNode newTypeDesc = mergeTypes(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);
}
if (isTypedBindingPattern) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr);
}
STNode keyExpr = getExpression(member);
STNode containerExpr = getExpression(typeDescOrExpr);
STNode lhsExpr =
STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,
typedBindingPatternOrExpr);
}
/**
* Merges two types separated by <code>|</code> or <code>&</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param pipeOrAndToken pipe or bitwise-and token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypes(STNode lhsTypeDesc, STNode pipeOrAndToken, STNode rhsTypeDesc) {
if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {
return mergeTypesWithUnion(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
} else {
return mergeTypesWithIntersection(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
}
}
/**
* Merges two types separated by <code>|</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param pipeToken pipe token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypesWithUnion(STNode lhsTypeDesc, STNode pipeToken, STNode rhsTypeDesc) {
if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostUnionWithAUnion(lhsTypeDesc, pipeToken, rhsUnionTypeDesc);
} else {
return createUnionTypeDesc(lhsTypeDesc, pipeToken, rhsTypeDesc);
}
}
/**
* Merges two types separated by <code>&</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param bitwiseAndToken bitwise-and token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypesWithIntersection(STNode lhsTypeDesc, STNode bitwiseAndToken, STNode rhsTypeDesc) {
if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode lhsUnionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;
if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
rhsTypeDesc = replaceLeftMostIntersectionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,
bitwiseAndToken, (STIntersectionTypeDescriptorNode) rhsTypeDesc);
return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);
} else if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
rhsTypeDesc = replaceLeftMostUnionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,
bitwiseAndToken, (STUnionTypeDescriptorNode) rhsTypeDesc);
return replaceLeftMostUnionWithAUnion(lhsUnionTypeDesc.leftTypeDesc,
lhsUnionTypeDesc.pipeToken, (STUnionTypeDescriptorNode) rhsTypeDesc);
} else {
rhsTypeDesc = createIntersectionTypeDesc(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, rhsTypeDesc);
return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);
}
}
if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostUnionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsUnionTypeDesc);
} else if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode rhsIntSecTypeDesc = (STIntersectionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostIntersectionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsIntSecTypeDesc);
} else {
return createIntersectionTypeDesc(lhsTypeDesc, bitwiseAndToken, rhsTypeDesc);
}
}
private STNode replaceLeftMostUnionWithAUnion(STNode typeDesc, STNode pipeToken,
STUnionTypeDescriptorNode unionTypeDesc) {
STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostUnionWithAUnion(typeDesc, pipeToken, (STUnionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createUnionTypeDesc(typeDesc, pipeToken, leftTypeDesc);
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode replaceLeftMostUnionWithAIntersection(STNode typeDesc, STNode bitwiseAndToken,
STUnionTypeDescriptorNode unionTypeDesc) {
STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostUnionWithAIntersection(typeDesc, bitwiseAndToken,
(STUnionTypeDescriptorNode) leftTypeDesc));
}
if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,
(STIntersectionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode replaceLeftMostIntersectionWithAIntersection(STNode typeDesc,
STNode bitwiseAndToken,
STIntersectionTypeDescriptorNode intersectionTypeDesc) {
STNode leftTypeDesc = intersectionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc,
replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,
(STIntersectionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);
return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {
if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);
lhsTypeDesc = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);
} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc =
getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);
lhsTypeDesc = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,
intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);
} else {
lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket);
}
return lhsTypeDesc;
}
/**
* Parse union (|) or intersection (&) type operator.
*
* @return pipe or bitwise and token
*/
private STNode parseUnionOrIntersectionToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);
return parseUnionOrIntersectionToken();
}
}
/**
* Infer the type of the ambiguous bracketed list, based on the type of the member.
*
* @param memberNode Member node
* @return Inferred type of the bracketed list
*/
private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) {
if (isEmpty(memberNode)) {
return SyntaxKind.NONE;
}
if (isDefiniteTypeDesc(memberNode.kind)) {
return SyntaxKind.TUPLE_TYPE_DESC;
}
switch (memberNode.kind) {
case ASTERISK_LITERAL:
return SyntaxKind.ARRAY_TYPE_DESC;
case CAPTURE_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
return SyntaxKind.LIST_BINDING_PATTERN;
case QUALIFIED_NAME_REFERENCE:
case REST_TYPE:
return SyntaxKind.TUPLE_TYPE_DESC;
case NUMERIC_LITERAL:
if (isTypedBindingPattern) {
return SyntaxKind.ARRAY_TYPE_DESC;
}
return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS;
case SIMPLE_NAME_REFERENCE:
case BRACKETED_LIST:
case MAPPING_BP_OR_MAPPING_CONSTRUCTOR:
return SyntaxKind.NONE;
case ERROR_CONSTRUCTOR:
if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
default:
if (isTypedBindingPattern) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
}
}
/*
* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.
* The ambiguity lies in between:
* 1) Assignment that starts with list binding pattern
* 2) Var-decl statement that starts with tuple type
* 3) Statement that starts with list constructor, such as sync-send, etc.
*/
/**
* Parse any statement that starts with an open-bracket.
*
* @param annots Annotations attached to the statement.
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {
startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);
return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);
}
private STNode parseMemberBracketedList() {
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStatementStartsWithOpenBracket(annots, false, false);
}
/**
* The bracketed list at the start of a statement can be one of the following.
* 1) List binding pattern
* 2) Tuple type
* 3) List constructor
*
* @param isRoot Is this the root of the list
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {
startContext(ParserRuleContext.STMT_START_BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
while (!isBracketedListEnd(peek().kind)) {
STNode member = parseStatementStartBracketedListMember();
SyntaxKind currentNodeType = getStmtStartBracketedListType(member);
switch (currentNodeType) {
case TUPLE_TYPE_DESC:
member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case MEMBER_TYPE_DESC:
case REST_TYPE:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case LIST_BINDING_PATTERN:
return parseAsListBindingPattern(openBracket, memberList, member, isRoot);
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case LIST_BP_OR_LIST_CONSTRUCTOR:
return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);
case NONE:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
}
STNode closeBracket = parseCloseBracket();
STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket,
isRoot, possibleMappingField);
return bracketedList;
}
/**
* Parse a member of a list-binding-pattern, tuple-type-desc, or
* list-constructor-expr, when the parent is ambiguous.
*
* @return Parsed node
*/
private STNode parseStatementStartBracketedListMember() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseStatementStartBracketedListMember(typeDescQualifiers);
}
private STNode parseStatementStartBracketedListMember(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMemberBracketedList();
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (isWildcardBP(identifier)) {
STNode varName = ((STSimpleNameReferenceNode) identifier).name;
return getWildcardBindingPattern(varName);
}
nextToken = peek();
if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) {
return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);
case OPEN_BRACE_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMappingBindingPatterOrMappingConstructor();
case ERROR_KEYWORD:
reportInvalidQualifierList(qualifiers);
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorBindingPatternOrErrorConstructor();
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case ELLIPSIS_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseRestBindingOrSpreadMember();
case XML_KEYWORD:
case STRING_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr(qualifiers);
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrFuncTypeDesc(qualifiers);
case AT_TOKEN:
return parseTupleMember();
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER);
return parseStatementStartBracketedListMember(qualifiers);
}
}
private STNode parseRestBindingOrSpreadMember() {
STNode ellipsis = parseEllipsis();
STNode expr = parseExpression();
if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return STNodeFactory.createRestBindingPatternNode(ellipsis, expr);
} else {
return STNodeFactory.createSpreadMemberNode(ellipsis, expr);
}
}
private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
STNode member, boolean isRoot) {
memberList.add(member);
STNode memberEnd = parseBracketedListMemberEnd();
STNode tupleTypeDescOrListCons;
if (memberEnd == null) {
STNode closeBracket = parseCloseBracket();
tupleTypeDescOrListCons =
parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
} else {
memberList.add(memberEnd);
tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);
}
return tupleTypeDescOrListCons;
}
/**
* Parse tuple type desc or list constructor.
*
* @return Parsed node
*/
private STNode parseTupleTypeDescOrListConstructor(STNode annots) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);
}
private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
boolean isRoot) {
STToken nextToken = peek();
while (!isBracketedListEnd(nextToken.kind)) {
STNode member = parseTupleTypeDescOrListConstructorMember(annots);
SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);
switch (currentNodeType) {
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case REST_TYPE:
case MEMBER_TYPE_DESC:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC:
member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
nextToken = peek();
}
STNode closeBracket = parseCloseBracket();
return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
}
private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
return parseTupleTypeDescOrListConstructor(annots);
case IDENTIFIER_TOKEN:
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);
case OPEN_BRACE_TOKEN:
return parseMappingConstructorExpr();
case ERROR_KEYWORD:
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorConstructorExpr(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case XML_KEYWORD:
case STRING_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr();
case AT_TOKEN:
return parseTupleMember();
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER);
return parseTupleTypeDescOrListConstructorMember(annots);
}
}
private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {
return getStmtStartBracketedListType(memberNode);
}
private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket,
boolean isRoot) {
STNode tupleTypeOrListConst;
switch (peek().kind) {
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
if (!isRoot) {
endContext();
return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,
closeBracket);
}
default:
if (isValidExprRhsStart(peek().kind, closeBracket.kind) ||
(isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {
members = getExpressionList(members, false);
STNode memberExpressions = STNodeFactory.createNodeList(members);
tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,
memberExpressions, closeBracket);
break;
}
STNode memberTypeDescs = STNodeFactory.createNodeList(getTupleMemberList(members));
STNode tupleTypeDesc =
STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);
tupleTypeOrListConst =
parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
endContext();
if (!isRoot) {
return tupleTypeOrListConst;
}
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot);
} | switch (nextTokenKind) { | private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {
switch (peek(lookahead + 1).kind) {
case IDENTIFIER_TOKEN:
SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;
switch (tokenAfterIdentifier) {
case ON_KEYWORD:
case OPEN_BRACE_TOKEN:
return true;
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
case QUESTION_MARK_TOKEN:
return false;
default:
return false;
}
case ON_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse listener declaration, given the qualifier.
* <p>
* <code>
* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;
* </code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.LISTENER_DECL);
STNode listenerKeyword = parseListenerKeyword();
if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode listenerDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);
endContext();
return listenerDecl;
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse listener keyword.
*
* @return Parsed node
*/
private STNode parseListenerKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LISTENER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LISTENER_KEYWORD);
return parseListenerKeyword();
}
}
/**
* Parse constant declaration, given the qualifier.
* <p>
* <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.CONSTANT_DECL);
STNode constKeyword = parseConstantKeyword();
return parseConstDecl(metadata, qualifier, constKeyword);
}
/**
* Parse the components that follows after the const keyword of a constant declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case ANNOTATION_KEYWORD:
endContext();
return parseAnnotationDeclaration(metadata, qualifier, constKeyword);
case IDENTIFIER_TOKEN:
STNode constantDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false);
endContext();
return constantDecl;
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.CONST_DECL_TYPE);
return parseConstDecl(metadata, qualifier, constKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
boolean isListener) {
STNode varNameOrTypeName = parseStatementStartIdentifier();
return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);
}
/**
* Parse the component that follows the first identifier in a const decl. The identifier
* can be either the type-name (a user defined type) or the var-name there the type-name
* is not present.
*
* @param qualifier Qualifier that precedes the constant decl
* @param keyword Keyword
* @param typeOrVarName Identifier that follows the const-keywoord
* @return Parsed node
*/
private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,
STNode typeOrVarName, boolean isListener) {
if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode type = typeOrVarName;
STNode variableName = parseVariableName();
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
STNode type;
STNode variableName;
switch (peek().kind) {
case IDENTIFIER_TOKEN:
type = typeOrVarName;
variableName = parseVariableName();
break;
case EQUAL_TOKEN:
variableName = ((STSimpleNameReferenceNode) typeOrVarName).name;
type = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.CONST_DECL_RHS);
return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener);
}
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,
STNode type, STNode variableName) {
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
if (isListener) {
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse const keyword.
*
* @return Parsed node
*/
private STNode parseConstantKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONST_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONST_KEYWORD);
return parseConstantKeyword();
}
}
/**
* Parse typeof expression.
* <p>
* <code>
* typeof-expr := typeof expression
* </code>
*
* @param isRhsExpr
* @return Typeof expression node
*/
private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode typeofKeyword = parseTypeofKeyword();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);
}
/**
* Parse typeof-keyword.
*
* @return Typeof-keyword node
*/
private STNode parseTypeofKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TYPEOF_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TYPEOF_KEYWORD);
return parseTypeofKeyword();
}
}
/**
* Parse optional type descriptor given the type.
* <p>
* <code>optional-type-descriptor := type-descriptor `?`</code>
* </p>
*
* @param typeDescriptorNode Preceding type descriptor
* @return Parsed node
*/
private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {
startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);
STNode questionMarkToken = parseQuestionMark();
endContext();
return createOptionalTypeDesc(typeDescriptorNode, questionMarkToken);
}
private STNode createOptionalTypeDesc(STNode typeDescNode, STNode questionMarkToken) {
if (typeDescNode.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDescNode;
STNode middleTypeDesc = createOptionalTypeDesc(unionTypeDesc.rightTypeDesc, questionMarkToken);
typeDescNode = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);
} else if (typeDescNode.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDescNode;
STNode middleTypeDesc = createOptionalTypeDesc(intersectionTypeDesc.rightTypeDesc, questionMarkToken);
typeDescNode = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,
intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);
} else {
typeDescNode = validateForUsageOfVar(typeDescNode);
typeDescNode = STNodeFactory.createOptionalTypeDescriptorNode(typeDescNode, questionMarkToken);
}
return typeDescNode;
}
/**
* Parse unary expression.
* <p>
* <code>
* unary-expr := + expression | - expression | ~ expression | ! expression
* </code>
*
* @param isRhsExpr
* @return Unary expression node
*/
private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode unaryOperator = parseUnaryOperator();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);
}
/**
* Parse unary operator.
* <code>UnaryOperator := + | - | ~ | !</code>
*
* @return Parsed node
*/
private STNode parseUnaryOperator() {
STToken token = peek();
if (isUnaryOperator(token.kind)) {
return consume();
} else {
recover(token, ParserRuleContext.UNARY_OPERATOR);
return parseUnaryOperator();
}
}
/**
* Check whether the given token kind is a unary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise
*/
private boolean isUnaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse array type descriptor.
* <p>
* <code>
* array-type-descriptor := array-member-type-descriptor [ [ array-length ] ]
* array-member-type-descriptor := type-descriptor
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* inferred-array-length := *
* </code>
* </p>
*
* @param memberTypeDesc
* @return Parsed Node
*/
private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode openBracketToken = parseOpenBracket();
STNode arrayLengthNode = parseArrayLength();
STNode closeBracketToken = parseCloseBracket();
endContext();
return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken);
}
private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode,
STNode closeBracketToken) {
memberTypeDesc = validateForUsageOfVar(memberTypeDesc);
if (arrayLengthNode != null) {
switch (arrayLengthNode.kind) {
case ASTERISK_LITERAL:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind;
if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken,
arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
arrayLengthNode = STNodeFactory.createEmptyNode();
}
}
List<STNode> arrayDimensions = new ArrayList();
if (memberTypeDesc.kind == SyntaxKind.ARRAY_TYPE_DESC) {
STArrayTypeDescriptorNode innerArrayType = (STArrayTypeDescriptorNode) memberTypeDesc;
STNode innerArrayDimensions = innerArrayType.dimensions;
int dimensionCount = innerArrayDimensions.bucketCount();
for (int i = 0; i < dimensionCount; i++) {
arrayDimensions.add(innerArrayDimensions.childInBucket(i));
}
memberTypeDesc = innerArrayType.memberTypeDesc;
}
STNode arrayDimension = STNodeFactory.createArrayDimensionNode(openBracketToken, arrayLengthNode,
closeBracketToken);
arrayDimensions.add(arrayDimension);
STNode arrayDimensionNodeList = STNodeFactory.createNodeList(arrayDimensions);
return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, arrayDimensionNodeList);
}
/**
* Parse array length.
* <p>
* <code>
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* constant-reference-expr := variable-reference-expr
* </code>
* </p>
*
* @return Parsed array length
*/
private STNode parseArrayLength() {
STToken token = peek();
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);
default:
recover(token, ParserRuleContext.ARRAY_LENGTH);
return parseArrayLength();
}
}
/**
* Parse annotations.
* <p>
* <i>Note: In the <a href="https:
* annotations-list is specified as one-or-more annotations. And the usage is marked as
* optional annotations-list. However, for the consistency of the tree, here we make the
* annotation-list as zero-or-more annotations, and the usage is not-optional.</i>
* <p>
* <code>annots := annotation*</code>
*
* @return Parsed node
*/
private STNode parseOptionalAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation list with at least one annotation.
*
* @return Annotation list
*/
private STNode parseAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
annotList.add(parseAnnotation());
while (peek().kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation attachment.
* <p>
* <code>annotation := @ annot-tag-reference annot-value</code>
*
* @return Parsed node
*/
private STNode parseAnnotation() {
STNode atToken = parseAtToken();
STNode annotReference;
if (isPredeclaredIdentifier(peek().kind)) {
annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);
} else {
annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
annotReference = STNodeFactory.createSimpleNameReferenceNode(annotReference);
}
STNode annotValue;
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
annotValue = parseMappingConstructorExpr();
} else {
annotValue = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);
}
/**
* Parse '@' token.
*
* @return Parsed node
*/
private STNode parseAtToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.AT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.AT);
return parseAtToken();
}
}
/**
* Parse metadata. Meta data consist of optional doc string and
* an annotations list.
* <p>
* <code>metadata := [DocumentationString] annots</code>
*
* @return Parse node
*/
private STNode parseMetaData() {
STNode docString;
STNode annotations;
switch (peek().kind) {
case DOCUMENTATION_STRING:
docString = parseMarkdownDocumentation();
annotations = parseOptionalAnnotations();
break;
case AT_TOKEN:
docString = STNodeFactory.createEmptyNode();
annotations = parseOptionalAnnotations();
break;
default:
return STNodeFactory.createEmptyNode();
}
return createMetadata(docString, annotations);
}
/**
* Create metadata node.
*
* @return A metadata node
*/
private STNode createMetadata(STNode docString, STNode annotations) {
if (annotations == null && docString == null) {
return STNodeFactory.createEmptyNode();
} else {
return STNodeFactory.createMetadataNode(docString, annotations);
}
}
/**
* Parse type test expression.
* <code>
* type-test-expr := expression (is | !is) type-descriptor
* </code>
*
* @param lhsExpr Preceding expression of the is expression
* @return Is expression node
*/
private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode isOrNotIsKeyword = parseIsOrNotIsKeyword();
STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr);
return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isOrNotIsKeyword, typeDescriptor);
}
/**
* Parse `is` keyword or `!is` keyword.
*
* @return is-keyword or not-is-keyword node
*/
private STNode parseIsOrNotIsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IS_KEYWORD ||
token.kind == SyntaxKind.NOT_IS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IS_KEYWORD);
return parseIsOrNotIsKeyword();
}
}
/**
* Parse local type definition statement statement.
* <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code>
*
* @return local type definition statement statement
*/
private STNode parseLocalTypeDefinitionStatement(STNode annots) {
startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);
STNode typeKeyword = parseTypeKeyword();
STNode typeName = parseTypeName();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,
semicolon);
}
/**
* Parse statement which is only consists of an action or expression.
*
* @param annots Annotations
* @return Statement node
*/
private STNode parseExpressionStatement(STNode annots) {
startContext(ParserRuleContext.EXPRESSION_STATEMENT);
STNode expression = parseActionOrExpressionInLhs(annots);
return getExpressionAsStatement(expression);
}
/**
* Parse statements that starts with an expression.
*
* @return Statement node
*/
private STNode parseStatementStartWithExpr(STNode annots) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode expr = parseActionOrExpressionInLhs(annots);
return parseStatementStartWithExprRhs(expr);
}
/**
* Parse the component followed by the expression, at the beginning of a statement.
*
* @param expression Action or expression in LHS
* @return Statement node
*/
private STNode parseStatementStartWithExprRhs(STNode expression) {
SyntaxKind nextTokenKind = peek().kind;
if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) {
return getExpressionAsStatement(expression);
}
switch (nextTokenKind) {
case EQUAL_TOKEN:
switchContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(expression);
case IDENTIFIER_TOKEN:
default:
if (isCompoundAssignment(nextTokenKind)) {
return parseCompoundAssignmentStmtRhs(expression);
}
ParserRuleContext context;
if (isPossibleExpressionStatement(expression)) {
context = ParserRuleContext.EXPR_STMT_RHS;
} else {
context = ParserRuleContext.STMT_START_WITH_EXPR_RHS;
}
recover(peek(), context);
return parseStatementStartWithExprRhs(expression);
}
}
private boolean isPossibleExpressionStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
case CHECK_EXPRESSION:
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
return true;
default:
return false;
}
}
private STNode getExpressionAsStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
return parseCallStatement(expression);
case CHECK_EXPRESSION:
return parseCheckStatement(expression);
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
case CLIENT_RESOURCE_ACCESS_ACTION:
return parseActionStatement(expression);
default:
STNode semicolon = parseSemicolon();
endContext();
expression = getExpression(expression);
STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,
expression, semicolon);
exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);
return exprStmt;
}
}
private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {
STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);
STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;
if (lengthExprs.isEmpty()) {
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),
indexedExpr.closeBracket);
}
STNode lengthExpr = lengthExprs.get(0);
switch (lengthExpr.kind) {
case SIMPLE_NAME_REFERENCE:
STSimpleNameReferenceNode nameRef = (STSimpleNameReferenceNode) lengthExpr;
if (nameRef.name.isMissing()) {
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),
indexedExpr.closeBracket);
}
break;
case ASTERISK_LITERAL:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind;
if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(
indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);
lengthExpr = STNodeFactory.createEmptyNode();
}
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket);
}
/**
* <p>
* Parse call statement, given the call expression.
* </p>
* <code>
* call-stmt := call-expr ;
* <br/>
* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr
* </code>
*
* @param expression Call expression associated with the call statement
* @return Call statement node
*/
private STNode parseCallStatement(STNode expression) {
return parseCallStatementOrCheckStatement(expression);
}
/**
* <p>
* Parse checking statement.
* </p>
* <code>
* checking-stmt := checking-expr ;
* <br/>
* checking-expr := checking-keyword expr ;
* </code>
*
* @param expression Checking expression associated with the checking statement
* @return Checking statement node
*/
private STNode parseCheckStatement(STNode expression) {
return parseCallStatementOrCheckStatement(expression);
}
private STNode parseCallStatementOrCheckStatement(STNode expression) {
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);
}
private STNode parseActionStatement(STNode action) {
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);
}
/**
* Parse client resource access action, given the starting expression.
* <br/><br/>
* <code>
* client-resource-access-action := expression "->" "/" [resource-access-path] ["." method-name] ["(" arg-list ")"]
* </code>
*
* @param expression Expression
* @param rightArrow Right arrow token
* @param slashToken Slash token
* @return Parsed node
*/
private STNode parseClientResourceAccessAction(STNode expression, STNode rightArrow, STNode slashToken,
boolean isRhsExpr, boolean isInMatchGuard) {
startContext(ParserRuleContext.CLIENT_RESOURCE_ACCESS_ACTION);
STNode resourceAccessPath = parseOptionalResourceAccessPath(isRhsExpr, isInMatchGuard);
STNode resourceAccessMethodDot = parseOptionalResourceAccessMethodDot(isRhsExpr, isInMatchGuard);
STNode resourceAccessMethodName = STNodeFactory.createEmptyNode();
if (resourceAccessMethodDot != null) {
resourceAccessMethodName = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());
}
STNode resourceMethodCallArgList = parseOptionalResourceAccessActionArgList(isRhsExpr, isInMatchGuard);
endContext();
return STNodeFactory.createClientResourceAccessActionNode(expression, rightArrow, slashToken,
resourceAccessPath, resourceAccessMethodDot, resourceAccessMethodName, resourceMethodCallArgList);
}
private STNode parseOptionalResourceAccessPath(boolean isRhsExpr, boolean isInMatchGuard) {
STNode resourceAccessPath = STNodeFactory.createEmptyNodeList();
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACKET_TOKEN:
resourceAccessPath = parseResourceAccessPath(isRhsExpr, isInMatchGuard);
break;
case DOT_TOKEN:
case OPEN_PAREN_TOKEN:
break;
default:
if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) {
break;
}
recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_PATH);
return parseOptionalResourceAccessPath(isRhsExpr, isInMatchGuard);
}
return resourceAccessPath;
}
private STNode parseOptionalResourceAccessMethodDot(boolean isRhsExpr, boolean isInMatchGuard) {
STNode dotToken = STNodeFactory.createEmptyNode();
STToken nextToken = peek();
switch (nextToken.kind) {
case DOT_TOKEN:
dotToken = consume();
break;
case OPEN_PAREN_TOKEN:
break;
default:
if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) {
break;
}
recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_METHOD);
return parseOptionalResourceAccessMethodDot(isRhsExpr, isInMatchGuard);
}
return dotToken;
}
private STNode parseOptionalResourceAccessActionArgList(boolean isRhsExpr, boolean isInMatchGuard) {
STNode argList = STNodeFactory.createEmptyNode();
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
argList = parseParenthesizedArgList();
break;
default:
if (isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard)) {
break;
}
recover(nextToken, ParserRuleContext.OPTIONAL_RESOURCE_ACCESS_ACTION_ARG_LIST);
return parseOptionalResourceAccessActionArgList(isRhsExpr, isInMatchGuard);
}
return argList;
}
/**
* Parse resource access path.
* <br/><br/>
* <code>
* resource-access-path :=
* resource-access-segments ["/" resource-access-rest-segment]
* | resource-access-rest-segment
* <br/><br/>
* resource-access-segments := resource-access-segment ("/" resource-access-segment ")*
* <br/><br/>
* resource-access-segment := resource-path-segment-name | computed-resource-access-segment
* <br/><br/>
* resource-path-segment-name := identifier
* </code>
* @return
*/
private STNode parseResourceAccessPath(boolean isRhsExpr, boolean isInMatchGuard) {
List<STNode> pathSegmentList = new ArrayList<>();
STNode pathSegment = parseResourceAccessSegment();
pathSegmentList.add(pathSegment);
STNode leadingSlash;
STNode previousPathSegmentNode = pathSegment;
while (!isEndOfResourceAccessPathSegments(peek(), isRhsExpr, isInMatchGuard)) {
leadingSlash = parseResourceAccessSegmentRhs(isRhsExpr, isInMatchGuard);
if (leadingSlash == null) {
break;
}
pathSegment = parseResourceAccessSegment();
if (previousPathSegmentNode.kind == SyntaxKind.RESOURCE_ACCESS_REST_SEGMENT) {
updateLastNodeInListWithInvalidNode(pathSegmentList, leadingSlash, null);
updateLastNodeInListWithInvalidNode(pathSegmentList, pathSegment,
DiagnosticErrorCode.RESOURCE_ACCESS_SEGMENT_IS_NOT_ALLOWED_AFTER_REST_SEGMENT);
} else {
pathSegmentList.add(leadingSlash);
pathSegmentList.add(pathSegment);
previousPathSegmentNode = pathSegment;
}
}
return STNodeFactory.createNodeList(pathSegmentList);
}
private STNode parseResourceAccessSegment() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return consume();
case OPEN_BRACKET_TOKEN:
return parseComputedOrResourceAccessRestSegment(consume());
default:
recover(nextToken, ParserRuleContext.RESOURCE_ACCESS_PATH_SEGMENT);
return parseResourceAccessSegment();
}
}
/**
* Parse computed resource segment or resource access rest segment.
* <code>
* <br/>
* computed-resource-access-segment := "[" expression "]"
* <br/>
* resource-access-rest-segment := "[" "..." expression "]"
* </code>
* @param openBracket Open bracket token
* @return Parsed node
*/
private STNode parseComputedOrResourceAccessRestSegment(STNode openBracket) {
STToken nextToken = peek();
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
STNode ellipsisToken = consume();
STNode expression = parseExpression();
STNode closeBracketToken = parseCloseBracket();
return STNodeFactory.createResourceAccessRestSegmentNode(openBracket, ellipsisToken,
expression, closeBracketToken);
default:
if (isValidExprStart(nextToken.kind)) {
expression = parseExpression();
closeBracketToken = parseCloseBracket();
return STNodeFactory.createComputedResourceAccessSegmentNode(openBracket, expression,
closeBracketToken);
}
recover(nextToken, ParserRuleContext.COMPUTED_SEGMENT_OR_REST_SEGMENT);
return parseComputedOrResourceAccessRestSegment(openBracket);
}
}
/**
* Parse resource access segment end.
*
* @return Parsed node
*/
private STNode parseResourceAccessSegmentRhs(boolean isRhsExpr, boolean isInMatchGuard) {
STToken nextToken = peek();
switch (nextToken.kind) {
case SLASH_TOKEN:
return consume();
default:
if (isEndOfResourceAccessPathSegments(nextToken, isRhsExpr, isInMatchGuard)) {
return null;
}
recover(nextToken, ParserRuleContext.RESOURCE_ACCESS_SEGMENT_RHS);
return parseResourceAccessSegmentRhs(isRhsExpr, isInMatchGuard);
}
}
private boolean isEndOfResourceAccessPathSegments(STToken nextToken, boolean isRhsExpr, boolean isInMatchGuard) {
switch (nextToken.kind) {
case DOT_TOKEN:
case OPEN_PAREN_TOKEN:
return true;
default:
return isEndOfActionOrExpression(nextToken, isRhsExpr, isInMatchGuard);
}
}
private STNode parseRemoteMethodCallOrClientResourceAccessOrAsyncSendAction(STNode expression, boolean isRhsExpr,
boolean isInMatchGuard) {
STNode rightArrow = parseRightArrow();
return parseClientResourceAccessOrAsyncSendActionRhs(expression, rightArrow, isRhsExpr, isInMatchGuard);
}
private STNode parseClientResourceAccessOrAsyncSendActionRhs(STNode expression, STNode rightArrow,
boolean isRhsExpr, boolean isInMatchGuard) {
STNode name;
STToken nextToken = peek();
switch (nextToken.kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
return parseAsyncSendAction(expression, rightArrow, name);
case CONTINUE_KEYWORD:
case COMMIT_KEYWORD:
name = getKeywordAsSimpleNameRef();
break;
case SLASH_TOKEN:
STNode slashToken = consume();
return parseClientResourceAccessAction(expression, rightArrow, slashToken, isRhsExpr, isInMatchGuard);
default:
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
isEndOfActionOrExpression(nextNextToken, isRhsExpr, isInMatchGuard) ||
nextToken.isMissing()) {
name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());
break;
}
}
STToken token = peek();
Solution solution = recover(token, ParserRuleContext.REMOTE_OR_RESOURCE_CALL_OR_ASYNC_SEND_RHS);
if (solution.action == Action.KEEP) {
name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());
break;
}
return parseClientResourceAccessOrAsyncSendActionRhs(expression, rightArrow, isRhsExpr, isInMatchGuard);
}
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseRemoteMethodCallAction(expression, rightArrow, name);
case SEMICOLON_TOKEN:
case CLOSE_PAREN_TOKEN:
case OPEN_BRACE_TOKEN:
case COMMA_TOKEN:
case FROM_KEYWORD:
case JOIN_KEYWORD:
case ON_KEYWORD:
case LET_KEYWORD:
case WHERE_KEYWORD:
case ORDER_KEYWORD:
case LIMIT_KEYWORD:
case SELECT_KEYWORD:
return parseAsyncSendAction(expression, rightArrow, name);
default:
if (isGroupOrCollectKeyword(nextToken)) {
return parseAsyncSendAction(expression, rightArrow, name);
}
recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END);
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
}
private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {
return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);
}
/**
* Parse remote method call action.
* <p>
* <code>
* remote-method-call-action := expression -> method-name ( arg-list )
* <br/>
* async-send-action := expression -> peer-worker ;
* </code>
*
* @param expression LHS expression
* @param rightArrow right arrow token
* @param name remote method name
* @return
*/
private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {
STNode openParenToken = parseArgListOpenParenthesis();
STNode arguments = parseArgsList();
STNode closeParenToken = parseArgListCloseParenthesis();
return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,
closeParenToken);
}
/**
* Parse right arrow (<code>-></code>) token.
*
* @return Parsed node
*/
private STNode parseRightArrow() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.RIGHT_ARROW);
return parseRightArrow();
}
}
/**
* Parse map type descriptor.
* map-type-descriptor := `map` type-parameter
*
* @return Parsed node
*/
private STNode parseMapTypeDescriptor(STNode mapKeyword) {
STNode typeParameter = parseTypeParameter();
return STNodeFactory.createMapTypeDescriptorNode(mapKeyword, typeParameter);
}
/**
* Parse parameterized type descriptor.
* parameterized-type-descriptor := `typedesc` [type-parameter]
* <br/> | `future` [type-parameter]
* <br/> | `xml` [type-parameter]
* <br/> | `error` [type-parameter]
*
* @return Parsed node
*/
private STNode parseParameterizedTypeDescriptor(STNode keywordToken) {
STNode typeParamNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
typeParamNode = parseTypeParameter();
} else {
typeParamNode = STNodeFactory.createEmptyNode();
}
SyntaxKind parameterizedTypeDescKind = getParameterizedTypeDescKind(keywordToken);
return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeDescKind, keywordToken,
typeParamNode);
}
private SyntaxKind getParameterizedTypeDescKind(STNode keywordToken) {
switch (keywordToken.kind) {
case TYPEDESC_KEYWORD:
return SyntaxKind.TYPEDESC_TYPE_DESC;
case FUTURE_KEYWORD:
return SyntaxKind.FUTURE_TYPE_DESC;
case XML_KEYWORD:
return SyntaxKind.XML_TYPE_DESC;
case ERROR_KEYWORD:
default:
return SyntaxKind.ERROR_TYPE_DESC;
}
}
/**
* Parse <code> < </code> token.
*
* @return Parsed node
*/
private STNode parseGTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.GT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.GT);
return parseGTToken();
}
}
/**
* Parse <code> > </code> token.
*
* @return Parsed node
*/
private STNode parseLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.LT);
return parseLTToken();
}
}
/**
* Parse nil literal. Here nil literal is only referred to ( ).
*
* @return Parsed node
*/
private STNode parseNilLiteral() {
startContext(ParserRuleContext.NIL_LITERAL);
STNode openParenthesisToken = parseOpenParenthesis();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);
}
/**
* Parse annotation declaration, given the qualifier.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {
startContext(ParserRuleContext.ANNOTATION_DECL);
STNode annotationKeyword = parseAnnotationKeyword();
STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
endContext();
return annotDecl;
}
/**
* Parse annotation keyword.
*
* @return Parsed node
*/
private STNode parseAnnotationKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOTATION_KEYWORD);
return parseAnnotationKeyword();
}
}
/**
* Parse the components that follows after the annotation keyword of a annotation declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @param annotationKeyword
* @return Parsed node
*/
private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);
return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
/**
* Parse annotation tag.
* <p>
* <code>annot-tag := identifier</code>
*
* @return
*/
private STNode parseAnnotationTag() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.ANNOTATION_TAG);
return parseAnnotationTag();
}
}
private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);
if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag, annotTag);
}
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {
STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,
ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);
}
/**
* Parse the component that follows the first identifier in an annotation decl. The identifier
* can be either the type-name (a user defined type) or the annot-tag, where the type-name
* is not present.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the annotation decl
* @param constKeyword Const keyword
* @param annotationKeyword Annotation keyword
* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDescOrAnnotTag) {
STToken nextToken = peek();
STNode typeDesc;
STNode annotTag;
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
typeDesc = typeDescOrAnnotTag;
annotTag = parseAnnotationTag();
break;
case SEMICOLON_TOKEN:
case ON_KEYWORD:
typeDesc = STNodeFactory.createEmptyNode();
annotTag = typeDescOrAnnotTag;
break;
default:
recover(peek(), ParserRuleContext.ANNOT_DECL_RHS);
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);
}
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDesc, STNode annotTag) {
STNode onKeyword;
STNode attachPoints;
STToken nextToken = peek();
switch (nextToken.kind) {
case SEMICOLON_TOKEN:
onKeyword = STNodeFactory.createEmptyNode();
attachPoints = STNodeFactory.createEmptyNodeList();
break;
case ON_KEYWORD:
onKeyword = parseOnKeyword();
attachPoints = parseAnnotationAttachPoints();
onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
break;
default:
recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS);
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,
typeDesc, annotTag, onKeyword, attachPoints, semicolonToken);
}
/**
* Parse annotation attach points.
* <p>
* <code>
* annot-attach-points := annot-attach-point (, annot-attach-point)*
* <br/><br/>
* annot-attach-point := dual-attach-point | source-only-attach-point
* <br/><br/>
* dual-attach-point := [source] dual-attach-point-ident
* <br/><br/>
* dual-attach-point-ident :=
* type
* | class
* | [object|service remote] function
* | parameter
* | return
* | service
* | [object|record] field
* <br/><br/>
* source-only-attach-point := source source-only-attach-point-ident
* <br/><br/>
* source-only-attach-point-ident :=
* annotation
* | external
* | var
* | const
* | listener
* | worker
* </code>
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoints() {
startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);
List<STNode> attachPoints = new ArrayList<>();
STToken nextToken = peek();
if (isEndAnnotAttachPointList(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode attachPoint = parseAnnotationAttachPoint();
attachPoints.add(attachPoint);
nextToken = peek();
STNode leadingComma;
while (!isEndAnnotAttachPointList(nextToken.kind)) {
leadingComma = parseAttachPointEnd();
if (leadingComma == null) {
break;
}
attachPoints.add(leadingComma);
attachPoint = parseAnnotationAttachPoint();
if (attachPoint == null) {
STToken missingAttachPointIdent = SyntaxErrors.createMissingToken(SyntaxKind.TYPE_KEYWORD);
STNode identList = STNodeFactory.createNodeList(missingAttachPointIdent);
attachPoint = STNodeFactory.createAnnotationAttachPointNode(STNodeFactory.createEmptyNode(), identList);
attachPoint = SyntaxErrors.addDiagnostic(attachPoint,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
attachPoints.add(attachPoint);
break;
}
attachPoints.add(attachPoint);
nextToken = peek();
}
if (attachPoint.lastToken().isMissing() && this.tokenReader.peek().kind == SyntaxKind.IDENTIFIER_TOKEN &&
!this.tokenReader.head().hasTrailingNewline()) {
STToken nextNonVirtualToken = this.tokenReader.read();
updateLastNodeInListWithInvalidNode(attachPoints, nextNonVirtualToken,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextNonVirtualToken.text());
}
endContext();
return STNodeFactory.createNodeList(attachPoints);
}
/**
* Parse annotation attach point end.
*
* @return Parsed node
*/
private STNode parseAttachPointEnd() {
switch (peek().kind) {
case SEMICOLON_TOKEN:
return null;
case COMMA_TOKEN:
return consume();
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_END);
return parseAttachPointEnd();
}
}
private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse annotation attach point.
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoint() {
switch (peek().kind) {
case EOF_TOKEN:
return null;
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
case SOURCE_KEYWORD:
STNode sourceKeyword = parseSourceKeyword();
return parseAttachPointIdent(sourceKeyword);
case OBJECT_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case RECORD_KEYWORD:
case CLASS_KEYWORD:
sourceKeyword = STNodeFactory.createEmptyNode();
STNode firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT);
return parseAnnotationAttachPoint();
}
}
/**
* Parse source keyword.
*
* @return Parsed node
*/
private STNode parseSourceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SOURCE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SOURCE_KEYWORD);
return parseSourceKeyword();
}
}
/**
* Parse attach point ident gievn.
* <p>
* <code>
* source-only-attach-point-ident := annotation | external | var | const | listener | worker
* <br/><br/>
* dual-attach-point-ident := type | class | [object|service remote] function | parameter
* | return | service | [object|record] field
* </code>
*
* @param sourceKeyword Source keyword
* @return Parsed node
*/
private STNode parseAttachPointIdent(STNode sourceKeyword) {
switch (peek().kind) {
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
STNode firstIdent = consume();
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case OBJECT_KEYWORD:
case RESOURCE_KEYWORD:
case RECORD_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT);
return parseAttachPointIdent(sourceKeyword);
}
}
/**
* Parse dual-attach-point ident.
*
* @param sourceKeyword Source keyword
* @param firstIdent first part of the dual attach-point
* @return Parsed node
*/
private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {
STNode secondIdent;
switch (firstIdent.kind) {
case OBJECT_KEYWORD:
secondIdent = parseIdentAfterObjectIdent();
break;
case RESOURCE_KEYWORD:
secondIdent = parseFunctionIdent();
break;
case RECORD_KEYWORD:
secondIdent = parseFieldIdent();
break;
case SERVICE_KEYWORD:
return parseServiceAttachPoint(sourceKeyword, firstIdent);
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
default:
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
/**
* Parse remote ident.
*
* @return Parsed node
*/
private STNode parseRemoteIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.REMOTE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.REMOTE_IDENT);
return parseRemoteIdent();
}
}
/**
* Parse service attach point.
* <code>service-attach-point := service | service remote function</code>
*
* @return Parsed node
*/
private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) {
STNode identList;
STToken token = peek();
switch (token.kind) {
case REMOTE_KEYWORD:
STNode secondIdent = parseRemoteIdent();
STNode thirdIdent = parseFunctionIdent();
identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case COMMA_TOKEN:
case SEMICOLON_TOKEN:
identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
default:
recover(token, ParserRuleContext.SERVICE_IDENT_RHS);
return parseServiceAttachPoint(sourceKeyword, firstIdent);
}
}
/**
* Parse the idents that are supported after object-ident.
*
* @return Parsed node
*/
private STNode parseIdentAfterObjectIdent() {
STToken token = peek();
switch (token.kind) {
case FUNCTION_KEYWORD:
case FIELD_KEYWORD:
return consume();
default:
recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);
return parseIdentAfterObjectIdent();
}
}
/**
* Parse function ident.
*
* @return Parsed node
*/
private STNode parseFunctionIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FUNCTION_IDENT);
return parseFunctionIdent();
}
}
/**
* Parse field ident.
*
* @return Parsed node
*/
private STNode parseFieldIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FIELD_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FIELD_IDENT);
return parseFieldIdent();
}
}
/**
* Parse XML namespace declaration.
* <p>
* <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;
* <br/>
* xml-namespace-uri := simple-const-expr
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @return
*/
private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {
startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);
STNode xmlnsKeyword = parseXMLNSKeyword();
STNode namespaceUri = parseSimpleConstExpr();
while (!isValidXMLNameSpaceURI(namespaceUri)) {
xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri,
DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);
namespaceUri = parseSimpleConstExpr();
}
STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
endContext();
return xmlnsDecl;
}
/**
* Parse xmlns keyword.
*
* @return Parsed node
*/
private STNode parseXMLNSKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XMLNS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XMLNS_KEYWORD);
return parseXMLNSKeyword();
}
}
private boolean isValidXMLNameSpaceURI(STNode expr) {
switch (expr.kind) {
case STRING_LITERAL:
case QUALIFIED_NAME_REFERENCE:
case SIMPLE_NAME_REFERENCE:
return true;
case IDENTIFIER_TOKEN:
default:
return false;
}
}
private STNode parseSimpleConstExpr() {
startContext(ParserRuleContext.CONSTANT_EXPRESSION);
STNode expr = parseSimpleConstExprInternal();
endContext();
return expr;
}
/**
* Parse simple constants expr.
*
* @return Parsed node
*/
private STNode parseSimpleConstExprInternal() {
STToken nextToken = peek();
switch (nextToken.kind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return parseBasicLiteral();
case PLUS_TOKEN:
case MINUS_TOKEN:
return parseSignedIntOrFloat();
case OPEN_PAREN_TOKEN:
return parseNilLiteral();
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
recover(nextToken, ParserRuleContext.CONSTANT_EXPRESSION_START);
return parseSimpleConstExprInternal();
}
}
/**
* Parse the portion after the namsepsace-uri of an XML declaration.
*
* @param xmlnsKeyword XMLNS keyword
* @param namespaceUri Namespace URI
* @return Parsed node
*/
private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {
STNode asKeyword = STNodeFactory.createEmptyNode();
STNode namespacePrefix = STNodeFactory.createEmptyNode();
switch (peek().kind) {
case AS_KEYWORD:
asKeyword = parseAsKeyword();
namespacePrefix = parseNamespacePrefix();
break;
case SEMICOLON_TOKEN:
break;
default:
recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL);
return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
}
STNode semicolon = parseSemicolon();
if (isModuleVar) {
return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,
namespacePrefix, semicolon);
}
return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,
semicolon);
}
/**
* Parse import prefix.
*
* @return Parsed node
*/
private STNode parseNamespacePrefix() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);
return parseNamespacePrefix();
}
}
/**
* Parse named worker declaration.
* <p>
* <code>named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt }
* </code>
*
* @param annots Annotations attached to the worker decl
* @param qualifiers Preceding transactional keyword in a list
* @return Parsed node
*/
private STNode parseNamedWorkerDeclaration(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.NAMED_WORKER_DECL);
STNode transactionalKeyword = getTransactionalKeyword(qualifiers);
STNode workerKeyword = parseWorkerKeyword();
STNode workerName = parseWorkerName();
STNode returnTypeDesc = parseReturnTypeDescriptor();
STNode workerBody = parseBlockNode();
endContext();
return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName,
returnTypeDesc, workerBody);
}
private STNode getTransactionalKeyword(List<STNode> qualifierList) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,
((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
STNode transactionalKeyword;
if (validatedList.isEmpty()) {
transactionalKeyword = STNodeFactory.createEmptyNode();
} else {
transactionalKeyword = validatedList.get(0);
}
return transactionalKeyword;
}
private STNode parseReturnTypeDescriptor() {
STToken token = peek();
if (token.kind != SyntaxKind.RETURNS_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
STNode returnsKeyword = consume();
STNode annot = parseOptionalAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);
return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);
}
/**
* Parse worker keyword.
*
* @return Parsed node
*/
private STNode parseWorkerKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_KEYWORD);
return parseWorkerKeyword();
}
}
/**
* Parse worker name.
* <p>
* <code>worker-name := identifier</code>
*
* @return Parsed node
*/
private STNode parseWorkerName() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_NAME);
return parseWorkerName();
}
}
/**
* Parse lock statement.
* <code>lock-stmt := lock block-stmt [on-fail-clause]</code>
*
* @return Lock statement
*/
private STNode parseLockStatement() {
startContext(ParserRuleContext.LOCK_STMT);
STNode lockKeyword = parseLockKeyword();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause);
}
/**
* Parse lock-keyword.
*
* @return lock-keyword node
*/
private STNode parseLockKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LOCK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LOCK_KEYWORD);
return parseLockKeyword();
}
}
/**
* Parse union type descriptor.
* union-type-descriptor := type-descriptor | type-descriptor
*
* @param leftTypeDesc Type desc in the LHS os the union type desc.
* @param context Current context.
* @return parsed union type desc node
*/
private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode pipeToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,
TypePrecedence.UNION);
return mergeTypesWithUnion(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Creates a union type descriptor after validating lhs and rhs types.
* <p>
* <i>Note: Since type precedence and associativity are not taken into account here,
* this method should not be called directly when types are unknown.
* <br/>
* Call {@link
*
* @param leftTypeDesc lhs type
* @param pipeToken pipe token
* @param rightTypeDesc rhs type
* @return a UnionTypeDescriptorNode
*/
private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Parse pipe token.
*
* @return parsed pipe token node
*/
private STNode parsePipeToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.PIPE);
return parsePipeToken();
}
}
private boolean isTypeStartingToken(SyntaxKind nodeKind) {
return isTypeStartingToken(nodeKind, getNextNextToken());
}
private static boolean isTypeStartingToken(SyntaxKind nextTokenKind, STToken nextNextToken) {
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
case SERVICE_KEYWORD:
case RECORD_KEYWORD:
case OBJECT_KEYWORD:
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
case OPEN_PAREN_TOKEN:
case MAP_KEYWORD:
case STREAM_KEYWORD:
case TABLE_KEYWORD:
case FUNCTION_KEYWORD:
case OPEN_BRACKET_TOKEN:
case DISTINCT_KEYWORD:
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case TRANSACTION_KEYWORD:
return true;
default:
if (isParameterizedTypeToken(nextTokenKind)) {
return true;
}
if (isSingletonTypeDescStart(nextTokenKind, nextNextToken)) {
return true;
}
return isSimpleType(nextTokenKind);
}
}
/**
* Check if the token kind is a type descriptor in terminal expression.
* <p>
* simple-type-in-expr :=
* boolean | int | byte | float | decimal | string | handle | json | anydata | any | never
*
* @param nodeKind token kind to check
* @return <code>true</code> for simple type token in expression. <code>false</code> otherwise.
*/
private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) {
switch (nodeKind) {
case VAR_KEYWORD:
case READONLY_KEYWORD:
return false;
default:
return isSimpleType(nodeKind);
}
}
static boolean isSimpleType(SyntaxKind nodeKind) {
switch (nodeKind) {
case INT_KEYWORD:
case FLOAT_KEYWORD:
case DECIMAL_KEYWORD:
case BOOLEAN_KEYWORD:
case STRING_KEYWORD:
case BYTE_KEYWORD:
case JSON_KEYWORD:
case HANDLE_KEYWORD:
case ANY_KEYWORD:
case ANYDATA_KEYWORD:
case NEVER_KEYWORD:
case VAR_KEYWORD:
case READONLY_KEYWORD:
return true;
default:
return false;
}
}
static boolean isPredeclaredPrefix(SyntaxKind nodeKind) {
switch (nodeKind) {
case BOOLEAN_KEYWORD:
case DECIMAL_KEYWORD:
case ERROR_KEYWORD:
case FLOAT_KEYWORD:
case FUNCTION_KEYWORD:
case FUTURE_KEYWORD:
case INT_KEYWORD:
case MAP_KEYWORD:
case OBJECT_KEYWORD:
case STREAM_KEYWORD:
case STRING_KEYWORD:
case TABLE_KEYWORD:
case TRANSACTION_KEYWORD:
case TYPEDESC_KEYWORD:
case XML_KEYWORD:
return true;
default:
return false;
}
}
private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) {
return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN;
}
private static SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) {
switch (typeKeyword) {
case INT_KEYWORD:
return SyntaxKind.INT_TYPE_DESC;
case FLOAT_KEYWORD:
return SyntaxKind.FLOAT_TYPE_DESC;
case DECIMAL_KEYWORD:
return SyntaxKind.DECIMAL_TYPE_DESC;
case BOOLEAN_KEYWORD:
return SyntaxKind.BOOLEAN_TYPE_DESC;
case STRING_KEYWORD:
return SyntaxKind.STRING_TYPE_DESC;
case BYTE_KEYWORD:
return SyntaxKind.BYTE_TYPE_DESC;
case JSON_KEYWORD:
return SyntaxKind.JSON_TYPE_DESC;
case HANDLE_KEYWORD:
return SyntaxKind.HANDLE_TYPE_DESC;
case ANY_KEYWORD:
return SyntaxKind.ANY_TYPE_DESC;
case ANYDATA_KEYWORD:
return SyntaxKind.ANYDATA_TYPE_DESC;
case NEVER_KEYWORD:
return SyntaxKind.NEVER_TYPE_DESC;
case VAR_KEYWORD:
return SyntaxKind.VAR_TYPE_DESC;
case READONLY_KEYWORD:
return SyntaxKind.READONLY_TYPE_DESC;
default:
assert false : typeKeyword + " is not a built-in type";
return SyntaxKind.TYPE_REFERENCE;
}
}
/**
* Parse fork-keyword.
*
* @return Fork-keyword node
*/
private STNode parseForkKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FORK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FORK_KEYWORD);
return parseForkKeyword();
}
}
/**
* Parse fork statement.
* <code>fork-stmt := fork { named-worker-decl+ }</code>
*
* @return Fork statement
*/
private STNode parseForkStatement() {
startContext(ParserRuleContext.FORK_STMT);
STNode forkKeyword = parseForkKeyword();
STNode openBrace = parseOpenBrace();
ArrayList<STNode> workers = new ArrayList<>();
while (!isEndOfStatements()) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
if (validateStatement(stmt)) {
continue;
}
switch (stmt.kind) {
case NAMED_WORKER_DECLARATION:
workers.add(stmt);
break;
default:
if (workers.isEmpty()) {
openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
} else {
updateLastNodeInListWithInvalidNode(workers, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
}
}
}
STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);
STNode closeBrace = parseCloseBrace();
endContext();
STNode forkStmt =
STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);
if (isNodeListEmpty(namedWorkerDeclarations)) {
return SyntaxErrors.addDiagnostic(forkStmt,
DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);
}
return forkStmt;
}
/**
* Parse trap expression.
* <p>
* <code>
* trap-expr := trap expression
* </code>
*
* @param allowActions Allow actions
* @param isRhsExpr Whether this is a RHS expression or not
* @return Trap expression node
*/
private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
STNode trapKeyword = parseTrapKeyword();
STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr);
if (isAction(expr)) {
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);
}
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);
}
/**
* Parse trap-keyword.
*
* @return Trap-keyword node
*/
private STNode parseTrapKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRAP_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRAP_KEYWORD);
return parseTrapKeyword();
}
}
/**
* Parse list constructor expression.
* <p>
* <code>
* list-constructor-expr := [ [ list-members ] ]
* <br/>
* list-members := list-member (, list-member)*
* <br/>
* list-member := expression | spread-member
* <br/>
* spread-member := ... expression
* </code>
*
* @return Parsed node
*/
private STNode parseListConstructorExpr() {
startContext(ParserRuleContext.LIST_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode listMembers = parseListMembers();
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createListConstructorExpressionNode(openBracket, listMembers, closeBracket);
}
/**
* Parse optional list member list.
*
* @return Parsed node
*/
private STNode parseListMembers() {
List<STNode> listMembers = new ArrayList<>();
if (isEndOfListConstructor(peek().kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode listMember = parseListMember();
listMembers.add(listMember);
return parseListMembers(listMembers);
}
private STNode parseListMembers(List<STNode> listMembers) {
STNode listConstructorMemberEnd;
while (!isEndOfListConstructor(peek().kind)) {
listConstructorMemberEnd = parseListConstructorMemberEnd();
if (listConstructorMemberEnd == null) {
break;
}
listMembers.add(listConstructorMemberEnd);
STNode listMember = parseListMember();
listMembers.add(listMember);
}
return STNodeFactory.createNodeList(listMembers);
}
/**
* Parse list member.
* <p>
* <code>
* list-member := expression | spread-member
* </code>
*
* @return Parsed node
*/
private STNode parseListMember() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {
return parseSpreadMember();
} else {
return parseExpression();
}
}
/**
* Parse spread member.
* <p>
* <code>
* spread-member := ... expression
* </code>
*
* @return Parsed node
*/
private STNode parseSpreadMember() {
STNode ellipsis = parseEllipsis();
STNode expr = parseExpression();
return STNodeFactory.createSpreadMemberNode(ellipsis, expr);
}
private boolean isEndOfListConstructor(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListConstructorMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);
return parseListConstructorMemberEnd();
}
}
/**
* Parse foreach statement.
* <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code>
*
* @return foreach statement
*/
private STNode parseForEachStatement() {
startContext(ParserRuleContext.FOREACH_STMT);
STNode forEachKeyword = parseForEachKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);
STNode inKeyword = parseInKeyword();
STNode actionOrExpr = parseActionOrExpression();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,
blockStatement, onFailClause);
}
/**
* Parse foreach-keyword.
*
* @return ForEach-keyword node
*/
private STNode parseForEachKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FOREACH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FOREACH_KEYWORD);
return parseForEachKeyword();
}
}
/**
* Parse in-keyword.
*
* @return In-keyword node
*/
private STNode parseInKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IN_KEYWORD);
return parseInKeyword();
}
}
/**
* Parse type cast expression.
* <p>
* <code>
* type-cast-expr := < type-cast-param > expression
* <br/>
* type-cast-param := [annots] type-descriptor | annots
* </code>
*
* @return Parsed node
*/
private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
startContext(ParserRuleContext.TYPE_CAST);
STNode ltToken = parseLTToken();
return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr);
}
private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions,
boolean isInConditionalExpr) {
STNode typeCastParam = parseTypeCastParam();
STNode gtToken = parseGTToken();
endContext();
STNode expression =
parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);
return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);
}
private STNode parseTypeCastParam() {
STNode annot;
STNode type;
STToken token = peek();
switch (token.kind) {
case AT_TOKEN:
annot = parseOptionalAnnotations();
token = peek();
if (isTypeStartingToken(token.kind)) {
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
} else {
type = STNodeFactory.createEmptyNode();
}
break;
default:
annot = STNodeFactory.createEmptyNode();
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
break;
}
return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);
}
/**
* Parse table constructor expression.
* <p>
* <code>
* table-constructor-expr-rhs := [ [row-list] ]
* </code>
*
* @param tableKeyword tableKeyword that precedes this rhs
* @param keySpecifier keySpecifier that precedes this rhs
* @return Parsed node
*/
private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {
switchContext(ParserRuleContext.TABLE_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode rowList = parseRowList();
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,
closeBracket);
}
/**
* Parse table-keyword.
*
* @return Table-keyword node
*/
private STNode parseTableKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TABLE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TABLE_KEYWORD);
return parseTableKeyword();
}
}
/**
* Parse table rows.
* <p>
* <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code>
*
* @return Parsed node
*/
private STNode parseRowList() {
STToken nextToken = peek();
if (isEndOfTableRowList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> mappings = new ArrayList<>();
STNode mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
STNode rowEnd;
while (!isEndOfTableRowList(nextToken.kind)) {
rowEnd = parseTableRowEnd();
if (rowEnd == null) {
break;
}
mappings.add(rowEnd);
mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
}
return STNodeFactory.createNodeList(mappings);
}
private boolean isEndOfTableRowList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
case COMMA_TOKEN:
case OPEN_BRACE_TOKEN:
return false;
default:
return isEndOfMappingConstructor(tokenKind);
}
}
private STNode parseTableRowEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.TABLE_ROW_END);
return parseTableRowEnd();
}
}
/**
* Parse key specifier.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier() {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode keyKeyword = parseKeyKeyword();
STNode openParen = parseOpenParenthesis();
STNode fieldNames = parseFieldNames();
STNode closeParen = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);
}
/**
* Parse key-keyword.
*
* @return Key-keyword node
*/
private STNode parseKeyKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.KEY_KEYWORD) {
return consume();
}
if (isKeyKeyword(token)) {
return getKeyKeyword(consume());
}
recover(token, ParserRuleContext.KEY_KEYWORD);
return parseKeyKeyword();
}
static boolean isKeyKeyword(STToken token) {
return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());
}
private STNode getKeyKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),
token.diagnostics());
}
private STToken getUnderscoreKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.UNDERSCORE_KEYWORD, token.leadingMinutiae(),
token.trailingMinutiae(), token.diagnostics());
}
/**
* Parse field names.
* <p>
* <code>field-name-list := [ field-name (, field-name)* ]</code>
*
* @return Parsed node
*/
private STNode parseFieldNames() {
STToken nextToken = peek();
if (isEndOfFieldNamesList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> fieldNames = new ArrayList<>();
STNode fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
STNode leadingComma;
while (!isEndOfFieldNamesList(nextToken.kind)) {
leadingComma = parseComma();
fieldNames.add(leadingComma);
fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
}
return STNodeFactory.createNodeList(fieldNames);
}
private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case IDENTIFIER_TOKEN:
return false;
default:
return true;
}
}
/**
* Parse error-keyword.
*
* @return Parsed error-keyword node
*/
private STNode parseErrorKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ERROR_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ERROR_KEYWORD);
return parseErrorKeyword();
}
}
/**
* Parse stream type descriptor.
* <p>
* stream-type-descriptor := stream [stream-type-parameters]
* <br/>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type descriptor node
*/
private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) {
STNode streamTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
streamTypeParamsNode = parseStreamTypeParamsNode();
} else {
streamTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);
}
/**
* Parse stream type params node.
* <p>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type params node
*/
private STNode parseStreamTypeParamsNode() {
STNode ltToken = parseLTToken();
startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
endContext();
return streamTypedesc;
}
private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {
STNode commaToken, rightTypeDescNode, gtToken;
switch (peek().kind) {
case COMMA_TOKEN:
commaToken = parseComma();
rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
break;
case GT_TOKEN:
commaToken = STNodeFactory.createEmptyNode();
rightTypeDescNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS);
return parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
}
gtToken = parseGTToken();
return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,
gtToken);
}
/**
* Parse let expression.
* <p>
* <code>
* let-expr := let let-var-decl [, let-var-decl]* in expression
* </code>
*
* @return Parsed node
*/
private STNode parseLetExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr, false);
STNode inKeyword = parseInKeyword();
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false,
isInConditionalExpr);
return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);
}
/**
* Parse let-keyword.
*
* @return Let-keyword node
*/
private STNode parseLetKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LET_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LET_KEYWORD);
return parseLetKeyword();
}
}
/**
* Parse let variable declarations.
* <p>
* <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code>
*
* @return Parsed node
*/
private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr, boolean allowActions) {
startContext(context);
List<STNode> varDecls = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfLetVarDeclarations(nextToken, getNextNextToken())) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode varDec = parseLetVarDecl(context, isRhsExpr, allowActions);
varDecls.add(varDec);
nextToken = peek();
STNode leadingComma;
while (!isEndOfLetVarDeclarations(nextToken, getNextNextToken())) {
leadingComma = parseComma();
varDecls.add(leadingComma);
varDec = parseLetVarDecl(context, isRhsExpr, allowActions);
varDecls.add(varDec);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(varDecls);
}
static boolean isEndOfLetVarDeclarations(STToken nextToken, STToken nextNextToken) {
SyntaxKind tokenKind = nextToken.kind;
switch (tokenKind) {
case COMMA_TOKEN:
case AT_TOKEN:
return false;
case IN_KEYWORD:
return true;
default:
return isGroupOrCollectKeyword(nextToken) || !isTypeStartingToken(tokenKind, nextNextToken);
}
}
/**
* Parse let variable declaration.
* <p>
* <code>let-var-decl := [annots] typed-binding-pattern = expression</code>
*
* @return Parsed node
*/
private STNode parseLetVarDecl(ParserRuleContext context, boolean isRhsExpr, boolean allowActions) {
STNode annot = parseOptionalAnnotations();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);
STNode assign = parseAssignOp();
STNode expression = context == ParserRuleContext.LET_CLAUSE_LET_VAR_DECL ?
parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions) :
parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);
return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);
}
/**
* Parse raw backtick string template expression.
* <p>
* <code>BacktickString := `expression`</code>
*
* @return Template expression node
*/
private STNode parseTemplateExpression() {
STNode type = STNodeFactory.createEmptyNode();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
private STNode parseTemplateContent() {
List<STNode> items = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
items.add(contentItem);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
private boolean isEndOfBacktickContent(SyntaxKind kind) {
switch (kind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
return false;
}
}
private STNode parseTemplateItem() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return parseInterpolation();
}
return consume();
}
/**
* Parse string template expression.
* <p>
* <code>string-template-expr := string ` expression `</code>
*
* @return String template expression node
*/
private STNode parseStringTemplateExpression() {
STNode type = parseStringKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
/**
* Parse <code>string</code> keyword.
*
* @return string keyword node
*/
private STNode parseStringKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STRING_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.STRING_KEYWORD);
return parseStringKeyword();
}
}
/**
* Parse XML template expression.
* <p>
* <code>xml-template-expr := xml BacktickString</code>
*
* @return XML template expression
*/
private STNode parseXMLTemplateExpression() {
STNode xmlKeyword = parseXMLKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
if (startingBackTick.isMissing()) {
return createMissingTemplateExpressionNode(xmlKeyword, SyntaxKind.XML_TEMPLATE_EXPRESSION);
}
STNode content = parseTemplateContentAsXML();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,
startingBackTick, content, endingBackTick);
}
/**
* Parse <code>xml</code> keyword.
*
* @return xml keyword node
*/
private STNode parseXMLKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XML_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XML_KEYWORD);
return parseXMLKeyword();
}
}
/**
* Parse the content of the template string as XML. This method first read the
* input in the same way as the raw-backtick-template (BacktickString). Then
* it parses the content as XML.
*
* @return XML node
*/
private STNode parseTemplateContentAsXML() {
ArrayDeque<STNode> expressions = new ArrayDeque<>();
StringBuilder xmlStringBuilder = new StringBuilder();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {
xmlStringBuilder.append(((STToken) contentItem).text());
} else {
xmlStringBuilder.append("${}");
expressions.add(contentItem);
}
nextToken = peek();
}
CharReader charReader = CharReader.from(xmlStringBuilder.toString());
AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader));
XMLParser xmlParser = new XMLParser(tokenReader, expressions);
return xmlParser.parse();
}
/**
* Parse regular expression constructor.
* <p>
* <code>regexp-constructor-expr := re BacktickString</code>
*
* @return Regular expression template expression
*/
private STNode parseRegExpTemplateExpression() {
STNode reKeyword = consume();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
if (startingBackTick.isMissing()) {
return createMissingTemplateExpressionNode(reKeyword, SyntaxKind.REGEX_TEMPLATE_EXPRESSION);
}
STNode content = parseTemplateContentAsRegExp();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.REGEX_TEMPLATE_EXPRESSION, reKeyword,
startingBackTick, content, endingBackTick);
}
private STNode createMissingTemplateExpressionNode(STNode reKeyword, SyntaxKind kind) {
STNode startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode content = STAbstractNodeFactory.createEmptyNodeList();
STNode templateExpr =
STNodeFactory.createTemplateExpressionNode(kind, reKeyword, startingBackTick, content, endingBackTick);
templateExpr = SyntaxErrors.addDiagnostic(templateExpr, DiagnosticErrorCode.ERROR_MISSING_BACKTICK_STRING);
return templateExpr;
}
/**
* Parse the content of the template string as regular expression. This method first read the
* input in the same way as the raw-backtick-template (BacktickString). Then
* it parses the content as regular expression.
*
* @return Template expression node
*/
private STNode parseTemplateContentAsRegExp() {
this.tokenReader.startMode(ParserMode.REGEXP);
ArrayDeque<STNode> expressions = new ArrayDeque<>();
StringBuilder regExpStringBuilder = new StringBuilder();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {
regExpStringBuilder.append(((STToken) contentItem).text());
} else {
regExpStringBuilder.append("${}");
expressions.add(contentItem);
}
nextToken = peek();
}
this.tokenReader.endMode();
CharReader charReader = CharReader.from(regExpStringBuilder.toString());
AbstractTokenReader tokenReader = new TokenReader(new RegExpLexer(charReader));
RegExpParser regExpParser = new RegExpParser(tokenReader, expressions);
return regExpParser.parse();
}
/**
* Parse interpolation of a back-tick string.
* <p>
* <code>
* interpolation := ${ expression }
* </code>
*
* @return Interpolation node
*/
private STNode parseInterpolation() {
startContext(ParserRuleContext.INTERPOLATION);
STNode interpolStart = parseInterpolationStart();
STNode expr = parseExpression();
while (!isEndOfInterpolation()) {
STToken nextToken = consume();
expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());
}
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);
}
private boolean isEndOfInterpolation() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
ParserMode currentLexerMode = this.tokenReader.getCurrentMode();
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION &&
currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT;
}
}
/**
* Parse interpolation start token.
* <p>
* <code>interpolation-start := ${</code>
*
* @return Interpolation start token
*/
private STNode parseInterpolationStart() {
STToken token = peek();
if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);
return parseInterpolationStart();
}
}
/**
* Parse back-tick token.
*
* @return Back-tick token
*/
private STNode parseBacktickToken(ParserRuleContext ctx) {
STToken token = peek();
if (token.kind == SyntaxKind.BACKTICK_TOKEN) {
return consume();
} else {
recover(token, ctx);
return parseBacktickToken(ctx);
}
}
/**
* Parse table type descriptor.
* <p>
* table-type-descriptor := table row-type-parameter [key-constraint]
* row-type-parameter := type-parameter
* key-constraint := key-specifier | key-type-constraint
* key-specifier := key ( [ field-name (, field-name)* ] )
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed table type desc node.
*/
private STNode parseTableTypeDescriptor(STNode tableKeywordToken) {
STNode rowTypeParameterNode = parseRowTypeParameter();
STNode keyConstraintNode;
STToken nextToken = peek();
if (isKeyKeyword(nextToken)) {
STNode keyKeywordToken = getKeyKeyword(consume());
keyConstraintNode = parseKeyConstraint(keyKeywordToken);
} else {
keyConstraintNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);
}
/**
* Parse row type parameter node.
* <p>
* row-type-parameter := type-parameter
* </p>
*
* @return Parsed node.
*/
private STNode parseRowTypeParameter() {
startContext(ParserRuleContext.ROW_TYPE_PARAM);
STNode rowTypeParameterNode = parseTypeParameter();
endContext();
return rowTypeParameterNode;
}
/**
* Parse type parameter node.
* <p>
* type-parameter := < type-descriptor >
* </p>
*
* @return Parsed node
*/
private STNode parseTypeParameter() {
STNode ltToken = parseLTToken();
STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
STNode gtToken = parseGTToken();
return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);
}
/**
* Parse key constraint.
* <p>
* key-constraint := key-specifier | key-type-constraint
* </p>
*
* @return Parsed node.
*/
private STNode parseKeyConstraint(STNode keyKeywordToken) {
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
return parseKeySpecifier(keyKeywordToken);
case LT_TOKEN:
return parseKeyTypeConstraint(keyKeywordToken);
default:
recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS);
return parseKeyConstraint(keyKeywordToken);
}
}
/**
* Parse key specifier given parsed key keyword token.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier(STNode keyKeywordToken) {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode openParenToken = parseOpenParenthesis();
STNode fieldNamesNode = parseFieldNames();
STNode closeParenToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);
}
/**
* Parse key type constraint.
* <p>
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed node
*/
private STNode parseKeyTypeConstraint(STNode keyKeywordToken) {
STNode typeParameterNode = parseTypeParameter();
return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);
}
/**
* Parse function type descriptor.
* <p>
* <code>
* function-type-descriptor := function-quals function function-signature
* <br/> | [isolated] function
* <br/>
* function-quals := (transactional | isolated)*
* </code>
*
* @param qualifiers Preceding type descriptor qualifiers
* @return Function type descriptor node
*/
private STNode parseFunctionTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC);
STNode functionKeyword = parseFunctionKeyword();
boolean hasFuncSignature = false;
STNode signature = STNodeFactory.createEmptyNode();
if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN ||
isSyntaxKindInList(qualifiers, SyntaxKind.TRANSACTIONAL_KEYWORD)) {
signature = parseFuncSignature(true);
hasFuncSignature = true;
}
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, hasFuncSignature);
STNode qualifierList = nodes[0];
functionKeyword = nodes[1];
endContext();
return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature);
}
private STNode getLastNodeInList(List<STNode> nodeList) {
return nodeList.get(nodeList.size() - 1);
}
private STNode[] createFuncTypeQualNodeList(List<STNode> qualifierList, STNode functionKeyword,
boolean hasFuncSignature) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) {
validatedList.add(qualifier);
} else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
functionKeyword = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(functionKeyword, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
STNode nodeList = STNodeFactory.createNodeList(validatedList);
return new STNode[]{ nodeList, functionKeyword };
}
private boolean isRegularFuncQual(SyntaxKind tokenKind) {
switch (tokenKind) {
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse explicit anonymous function expression.
* <p>
* <code>explicit-anonymous-function-expr :=
* [annots] (isolated| transactional) function function-signature anon-func-body</code>
*
* @param annots Annotations.
* @param qualifiers Function qualifiers
* @param isRhsExpr Is expression in rhs context
* @return Anonymous function expression node
*/
private STNode parseExplicitFunctionExpression(STNode annots, List<STNode> qualifiers, boolean isRhsExpr) {
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
STNode funcKeyword = parseFunctionKeyword();
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, funcKeyword, true);
STNode qualifierList = nodes[0];
funcKeyword = nodes[1];
STNode funcSignature = parseFuncSignature(false);
STNode funcBody = parseAnonFuncBody(isRhsExpr);
return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword,
funcSignature, funcBody);
}
/**
* Parse anonymous function body.
* <p>
* <code>anon-func-body := block-function-body | expr-function-body</code>
*
* @param isRhsExpr Is expression in rhs context
* @return Anon function body node
*/
private STNode parseAnonFuncBody(boolean isRhsExpr) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case EOF_TOKEN:
STNode body = parseFunctionBodyBlock(true);
endContext();
return body;
case RIGHT_DOUBLE_ARROW_TOKEN:
endContext();
return parseExpressionFuncBody(true, isRhsExpr);
default:
recover(peek(), ParserRuleContext.ANON_FUNC_BODY);
return parseAnonFuncBody(isRhsExpr);
}
}
/**
* Parse expression function body.
* <p>
* <code>expr-function-body := => expression</code>
*
* @param isAnon Is anonymous function.
* @param isRhsExpr Is expression in rhs context
* @return Expression function body node
*/
private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false);
STNode semiColon;
if (isAnon) {
semiColon = STNodeFactory.createEmptyNode();
} else {
semiColon = parseSemicolon();
}
return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);
}
/**
* Parse '=>' token.
*
* @return Double right arrow token
*/
private STNode parseDoubleRightArrow() {
STToken token = peek();
if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);
return parseDoubleRightArrow();
}
}
private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {
switch (params.kind) {
case SIMPLE_NAME_REFERENCE:
case INFER_PARAM_LIST:
break;
case BRACED_EXPRESSION:
params = getAnonFuncParam((STBracedExpressionNode) params);
break;
case NIL_LITERAL:
STNilLiteralNode nilLiteralNode = (STNilLiteralNode) params;
params = STNodeFactory.createImplicitAnonymousFunctionParameters(nilLiteralNode.openParenToken,
STNodeFactory.createNodeList(new ArrayList<>()), nilLiteralNode.closeParenToken);
break;
default:
STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam);
}
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.REMOTE_CALL_ACTION, isRhsExpr, false);
return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);
}
/**
* Create a new anon-func-param node from a braced expression.
*
* @param bracedExpression Braced expression
* @return Anon-func param node
*/
private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) {
List<STNode> paramList = new ArrayList<>();
STNode innerExpression = bracedExpression.expression;
STNode openParen = bracedExpression.openParen;
if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
paramList.add(innerExpression);
} else {
openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
}
return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen,
STNodeFactory.createNodeList(paramList), bracedExpression.closeParen);
}
/**
* Parse implicit anon function expression.
*
* @param openParen Open parenthesis token
* @param firstParam First parameter
* @param isRhsExpr Is expression in rhs context
* @return Implicit anon function expression node
*/
private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {
List<STNode> paramList = new ArrayList<>();
paramList.add(firstParam);
STToken nextToken = peek();
STNode paramEnd;
STNode param;
while (!isEndOfAnonFuncParametersList(nextToken.kind)) {
paramEnd = parseImplicitAnonFuncParamEnd();
if (paramEnd == null) {
break;
}
paramList.add(paramEnd);
param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);
param = STNodeFactory.createSimpleNameReferenceNode(param);
paramList.add(param);
nextToken = peek();
}
STNode params = STNodeFactory.createNodeList(paramList);
STNode closeParen = parseCloseParenthesis();
endContext();
STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(inferedParams, isRhsExpr);
}
private STNode parseImplicitAnonFuncParamEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);
return parseImplicitAnonFuncParamEnd();
}
}
private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse tuple type descriptor.
* <p>
* <code>tuple-type-descriptor := [ tuple-member-type-descriptors ]
* <br/><br/>
* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]
* | [ tuple-rest-descriptor ]
* <br/><br/>
* member-type-descriptor := [annots] type-descriptor
* tuple-rest-descriptor := type-descriptor ...
* </code>
*
* @return
*/
private STNode parseTupleTypeDesc() {
STNode openBracket = parseOpenBracket();
startContext(ParserRuleContext.TUPLE_MEMBERS);
STNode memberTypeDesc = parseTupleMemberTypeDescList();
STNode closeBracket = parseCloseBracket();
endContext();
openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,
DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);
return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);
}
/**
* Parse tuple member type descriptors.
*
* @return Parsed node
*/
private STNode parseTupleMemberTypeDescList() {
List<STNode> typeDescList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfTypeList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode typeDesc = parseTupleMember();
return parseTupleTypeMembers(typeDesc, typeDescList);
}
private STNode parseTupleTypeMembers(STNode firstMember, List<STNode> memberList) {
STNode tupleMemberRhs;
while (!isEndOfTypeList(peek().kind)) {
if (firstMember.kind == SyntaxKind.REST_TYPE) {
firstMember = invalidateTypeDescAfterRestDesc(firstMember);
break;
}
tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
memberList.add(firstMember);
memberList.add(tupleMemberRhs);
firstMember = parseTupleMember();
}
memberList.add(firstMember);
return STNodeFactory.createNodeList(memberList);
}
private STNode parseTupleMember() {
STNode annot = parseOptionalAnnotations();
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
return createMemberOrRestNode(annot, typeDesc);
}
private STNode createMemberOrRestNode(STNode annot, STNode typeDesc) {
STNode tupleMemberRhs = parseTypeDescInTupleRhs();
if (tupleMemberRhs != null) {
if (!((STNodeList) annot).isEmpty()) {
typeDesc = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(typeDesc, annot,
DiagnosticErrorCode.ERROR_ANNOTATIONS_NOT_ALLOWED_FOR_TUPLE_REST_DESCRIPTOR);
}
return STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);
}
return STNodeFactory.createMemberTypeDescriptorNode(annot, typeDesc);
}
private STNode invalidateTypeDescAfterRestDesc(STNode restDescriptor) {
while (!isEndOfTypeList(peek().kind)) {
STNode tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, tupleMemberRhs, null);
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, parseTupleMember(),
DiagnosticErrorCode.ERROR_TYPE_DESC_AFTER_REST_DESCRIPTOR);
}
return restDescriptor;
}
private STNode parseTupleMemberRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.TUPLE_TYPE_MEMBER_RHS);
return parseTupleMemberRhs();
}
}
private STNode parseTypeDescInTupleRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
return null;
case ELLIPSIS_TOKEN:
return parseEllipsis();
default:
recover(nextToken, ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);
return parseTypeDescInTupleRhs();
}
}
private boolean isEndOfTypeList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse table constructor or query expression.
* <p>
* <code>
* table-constructor-or-query-expr := table-constructor-expr | query-expr
* <br/>
* table-constructor-expr := table [key-specifier] [ [row-list] ]
* <br/>
* query-expr := [query-construct-type] query-pipeline select-clause
* [query-construct-type] query-pipeline select-clause on-conflict-clause?
* <br/>
* query-construct-type := table key-specifier | stream | map
* </code>
*
* @return Parsed node
*/
private STNode parseTableConstructorOrQuery(boolean isRhsExpr, boolean allowActions) {
startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);
STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr, allowActions);
endContext();
return tableOrQueryExpr;
}
private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr, boolean allowActions) {
STNode queryConstructType;
switch (peek().kind) {
case FROM_KEYWORD:
queryConstructType = STNodeFactory.createEmptyNode();
return parseQueryExprRhs(queryConstructType, isRhsExpr, allowActions);
case TABLE_KEYWORD:
STNode tableKeyword = parseTableKeyword();
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr, allowActions);
case STREAM_KEYWORD:
case MAP_KEYWORD:
STNode streamOrMapKeyword = consume();
STNode keySpecifier = STNodeFactory.createEmptyNode();
queryConstructType = parseQueryConstructType(streamOrMapKeyword, keySpecifier);
return parseQueryExprRhs(queryConstructType, isRhsExpr, allowActions);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START);
return parseTableConstructorOrQueryInternal(isRhsExpr, allowActions);
}
}
private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr, boolean allowActions) {
STNode keySpecifier;
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
keySpecifier = STNodeFactory.createEmptyNode();
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
case KEY_KEYWORD:
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions);
case IDENTIFIER_TOKEN:
if (isKeyKeyword(nextToken)) {
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions);
}
break;
default:
break;
}
recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS);
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr, allowActions);
}
private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr,
boolean allowActions) {
switch (peek().kind) {
case FROM_KEYWORD:
return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr, allowActions);
case OPEN_BRACKET_TOKEN:
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS);
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr, allowActions);
}
}
/**
* Parse query construct type.
* <p>
* <code>query-construct-type := table key-specifier | stream | map</code>
*
* @return Parsed node
*/
private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {
return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);
}
/**
* Parse query action or expression.
* <p>
* <code>
* query-expr-rhs := query-pipeline select-clause
* query-pipeline select-clause on-conflict-clause?
* <br/>
* query-pipeline := from-clause intermediate-clause*
* </code>
*
* @param queryConstructType queryConstructType that precedes this rhs
* @return Parsed node
*/
private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr, boolean allowActions) {
switchContext(ParserRuleContext.QUERY_EXPRESSION);
STNode fromClause = parseFromClause(isRhsExpr, allowActions);
List<STNode> clauses = new ArrayList<>();
STNode intermediateClause;
STNode selectClause = null;
STNode collectClause = null;
while (!isEndOfIntermediateClause(peek().kind)) {
intermediateClause = parseIntermediateClause(isRhsExpr, allowActions);
if (intermediateClause == null) {
break;
}
if (selectClause != null) {
selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,
DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);
continue;
} else if (collectClause != null) {
collectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(collectClause, intermediateClause,
DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_COLLECT_CLAUSE);
continue;
}
if (intermediateClause.kind == SyntaxKind.SELECT_CLAUSE) {
selectClause = intermediateClause;
} else if (intermediateClause.kind == SyntaxKind.COLLECT_CLAUSE) {
collectClause = intermediateClause;
} else {
clauses.add(intermediateClause);
continue;
}
if (isNestedQueryExpr() || !isValidIntermediateQueryStart(peek())) {
break;
}
}
if (peek().kind == SyntaxKind.DO_KEYWORD) {
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
return parseQueryAction(queryConstructType, queryPipeline, selectClause);
}
if (selectClause == null && collectClause == null) {
STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);
STNode expr = STNodeFactory
.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);
if (clauses.isEmpty()) {
fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
} else {
int lastIndex = clauses.size() - 1;
STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),
DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
clauses.set(lastIndex, intClauseWithDiagnostic);
}
}
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
STNode onConflictClause = parseOnConflictClause(isRhsExpr);
return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline,
selectClause == null ? collectClause : selectClause, onConflictClause);
}
/**
* Check whether currently parsing query expr is a nested query expression.
*
* @return <code>true</code> if currently parsing query-expr is a nested query-expr. <code>false</code> otherwise.
*/
private boolean isNestedQueryExpr() {
return Collections.frequency(this.errorHandler.getContextStack(), ParserRuleContext.QUERY_EXPRESSION) > 1;
}
private boolean isValidIntermediateQueryStart(STToken token) {
switch (token.kind) {
case FROM_KEYWORD:
case WHERE_KEYWORD:
case LET_KEYWORD:
case SELECT_KEYWORD:
case JOIN_KEYWORD:
case OUTER_KEYWORD:
case ORDER_KEYWORD:
case BY_KEYWORD:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
case LIMIT_KEYWORD:
return true;
case IDENTIFIER_TOKEN:
return isGroupOrCollectKeyword(token);
default:
return false;
}
}
private static boolean isGroupOrCollectKeyword(STToken nextToken) {
return isKeywordMatch(SyntaxKind.COLLECT_KEYWORD, nextToken)
|| isKeywordMatch(SyntaxKind.GROUP_KEYWORD, nextToken);
}
private static boolean isKeywordMatch(SyntaxKind syntaxKind, STToken token) {
return token.kind == SyntaxKind.IDENTIFIER_TOKEN && syntaxKind.stringValue().equals(token.text());
}
/**
* Parse an intermediate clause.
* <p>
* <code>
* intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause
* </code>
*
* @return Parsed node
*/
private STNode parseIntermediateClause(boolean isRhsExpr, boolean allowActions) {
STToken nextToken = peek();
switch (nextToken.kind) {
case FROM_KEYWORD:
return parseFromClause(isRhsExpr, allowActions);
case WHERE_KEYWORD:
return parseWhereClause(isRhsExpr);
case LET_KEYWORD:
return parseLetClause(isRhsExpr, allowActions);
case SELECT_KEYWORD:
return parseSelectClause(isRhsExpr, allowActions);
case JOIN_KEYWORD:
case OUTER_KEYWORD:
return parseJoinClause(isRhsExpr);
case ORDER_KEYWORD:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return parseOrderByClause(isRhsExpr);
case LIMIT_KEYWORD:
return parseLimitClause(isRhsExpr);
case DO_KEYWORD:
case SEMICOLON_TOKEN:
case ON_KEYWORD:
case CONFLICT_KEYWORD:
return null;
default:
if (isKeywordMatch(SyntaxKind.COLLECT_KEYWORD, nextToken)) {
return parseCollectClause(isRhsExpr);
}
if (isKeywordMatch(SyntaxKind.GROUP_KEYWORD, nextToken)) {
return parseGroupByClause(isRhsExpr);
}
recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS);
return parseIntermediateClause(isRhsExpr, allowActions);
}
}
private STNode parseCollectClause(boolean isRhsExpr) {
startContext(ParserRuleContext.COLLECT_CLAUSE);
STNode collectKeyword = parseCollectKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
return STNodeFactory.createCollectClauseNode(collectKeyword, expression);
}
/**
* Parse collect-keyword.
*
* @return collect-keyword node
*/
private STNode parseCollectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.COLLECT_KEYWORD) {
return consume();
}
if (isKeywordMatch(SyntaxKind.COLLECT_KEYWORD, token)) {
return getCollectKeyword(consume());
}
recover(token, ParserRuleContext.COLLECT_KEYWORD);
return parseCollectKeyword();
}
private STNode getCollectKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.COLLECT_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),
token.diagnostics());
}
/**
* Parse join-keyword.
*
* @return Join-keyword node
*/
private STNode parseJoinKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.JOIN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.JOIN_KEYWORD);
return parseJoinKeyword();
}
}
/**
* Parse equals keyword.
*
* @return Parsed node
*/
private STNode parseEqualsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.EQUALS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.EQUALS_KEYWORD);
return parseEqualsKeyword();
}
}
private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RESOURCE_KEYWORD:
case LISTENER_KEYWORD:
case DOCUMENTATION_STRING:
case PRIVATE_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
case DO_KEYWORD:
case ON_KEYWORD:
case CONFLICT_KEYWORD:
return true;
default:
return isValidExprRhsStart(tokenKind, SyntaxKind.NONE);
}
}
/**
* Parse from clause.
* <p>
* <code>from-clause := from typed-binding-pattern in expression</code>
*
* @return Parsed node
*/
private STNode parseFromClause(boolean isRhsExpr, boolean allowActions) {
STNode fromKeyword = parseFromKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions);
return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);
}
/**
* Parse from-keyword.
*
* @return From-keyword node
*/
private STNode parseFromKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FROM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FROM_KEYWORD);
return parseFromKeyword();
}
}
/**
* Parse where clause.
* <p>
* <code>where-clause := where expression</code>
*
* @return Parsed node
*/
private STNode parseWhereClause(boolean isRhsExpr) {
STNode whereKeyword = parseWhereKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createWhereClauseNode(whereKeyword, expression);
}
/**
* Parse where-keyword.
*
* @return Where-keyword node
*/
private STNode parseWhereKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WHERE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WHERE_KEYWORD);
return parseWhereKeyword();
}
}
/**
* Parse limit-keyword.
*
* @return limit-keyword node
*/
private STNode parseLimitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LIMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LIMIT_KEYWORD);
return parseLimitKeyword();
}
}
/**
* Parse let clause.
* <p>
* <code>let-clause := let let-var-decl [, let-var-decl]* </code>
*
* @return Parsed node
*/
private STNode parseLetClause(boolean isRhsExpr, boolean allowActions) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr,
allowActions);
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);
}
/**
* Parse group by clause.
* <code>group-by-clause := group by grouping-key-list</code>
*
* @return Parsed node
*/
private STNode parseGroupByClause(boolean isRhsExpr) {
startContext(ParserRuleContext.GROUP_BY_CLAUSE);
STNode groupKeyword = parseGroupKeyword();
STNode byKeyword = parseByKeyword();
STNode groupingKeys = parseGroupingKeyList(isRhsExpr);
byKeyword = cloneWithDiagnosticIfListEmpty(groupingKeys, byKeyword,
DiagnosticErrorCode.ERROR_MISSING_GROUPING_KEY);
endContext();
return STNodeFactory.createGroupByClauseNode(groupKeyword, byKeyword, groupingKeys);
}
/**
* Parse group-keyword.
*
* @return group-keyword node
*/
private STNode parseGroupKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.GROUP_KEYWORD) {
return consume();
}
if (isKeywordMatch(SyntaxKind.GROUP_KEYWORD, token)) {
return getGroupKeyword(consume());
}
recover(token, ParserRuleContext.GROUP_KEYWORD);
return parseGroupKeyword();
}
private STNode getGroupKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.GROUP_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),
token.diagnostics());
}
/**
* Parse order-keyword.
*
* @return Order-keyword node
*/
private STNode parseOrderKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ORDER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ORDER_KEYWORD);
return parseOrderKeyword();
}
}
/**
* Parse by-keyword.
*
* @return By-keyword node
*/
private STNode parseByKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BY_KEYWORD);
return parseByKeyword();
}
}
/**
* Parse order by clause.
* <p>
* <code>order-by-clause := order by order-key-list
* </code>
*
* @return Parsed node
*/
private STNode parseOrderByClause(boolean isRhsExpr) {
STNode orderKeyword = parseOrderKeyword();
STNode byKeyword = parseByKeyword();
STNode orderKeys = parseOrderKeyList(isRhsExpr);
byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);
return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);
}
/**
* Parse grouping key.
* <code>grouping-key-list := grouping-key ["," grouping-key]*</code>
*
* @return Parsed node
*/
private STNode parseGroupingKeyList(boolean isRhsExpr) {
List<STNode> groupingKeys = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfGroupByKeyListElement(nextToken)) {
return STNodeFactory.createEmptyNodeList();
}
STNode groupingKey = parseGroupingKey(isRhsExpr);
groupingKeys.add(groupingKey);
nextToken = peek();
STNode groupingKeyListMemberEnd;
while (!isEndOfGroupByKeyListElement(nextToken)) {
groupingKeyListMemberEnd = parseGroupingKeyListMemberEnd();
if (groupingKeyListMemberEnd == null) {
break;
}
groupingKeys.add(groupingKeyListMemberEnd);
groupingKey = parseGroupingKey(isRhsExpr);
groupingKeys.add(groupingKey);
nextToken = peek();
}
return STNodeFactory.createNodeList(groupingKeys);
}
/**
* Parse order key.
* <p>
* <code>order-key-list := order-key [, order-key]*</code>
*
* @return Parsed node
*/
private STNode parseOrderKeyList(boolean isRhsExpr) {
startContext(ParserRuleContext.ORDER_KEY_LIST);
List<STNode> orderKeys = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfOrderKeys(nextToken)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
STNode orderKeyListMemberEnd;
while (!isEndOfOrderKeys(nextToken)) {
orderKeyListMemberEnd = parseOrderKeyListMemberEnd();
if (orderKeyListMemberEnd == null) {
break;
}
orderKeys.add(orderKeyListMemberEnd);
orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(orderKeys);
}
private boolean isEndOfGroupByKeyListElement(STToken nextToken) {
switch (nextToken.kind) {
case COMMA_TOKEN:
return false;
case EOF_TOKEN:
return true;
default:
return isQueryClauseStartToken(nextToken);
}
}
private boolean isEndOfOrderKeys(STToken nextToken) {
switch (nextToken.kind) {
case COMMA_TOKEN:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return false;
case SEMICOLON_TOKEN:
case EOF_TOKEN:
return true;
default:
return isQueryClauseStartToken(nextToken);
}
}
private boolean isQueryClauseStartToken(STToken nextToken) {
switch (nextToken.kind) {
case SELECT_KEYWORD:
case LET_KEYWORD:
case WHERE_KEYWORD:
case OUTER_KEYWORD:
case JOIN_KEYWORD:
case ORDER_KEYWORD:
case DO_KEYWORD:
case FROM_KEYWORD:
case LIMIT_KEYWORD:
return true;
case IDENTIFIER_TOKEN:
return isGroupOrCollectKeyword(nextToken);
default:
return false;
}
}
private STNode parseGroupingKeyListMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return consume();
case EOF_TOKEN:
return null;
default:
if (isQueryClauseStartToken(nextToken)) {
return null;
}
recover(peek(), ParserRuleContext.GROUPING_KEY_LIST_ELEMENT_END);
return parseGroupingKeyListMemberEnd();
}
}
private STNode parseOrderKeyListMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case EOF_TOKEN:
return null;
default:
if (isQueryClauseStartToken(nextToken)) {
return null;
}
recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);
return parseOrderKeyListMemberEnd();
}
}
private STNode parseGroupingKeyVariableDeclaration(boolean isRhsExpr) {
STNode groupingKeyElementTypeDesc =
parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER_IN_GROUPING_KEY);
startContext(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
STNode groupingKeySimpleBP = createCaptureOrWildcardBP(parseVariableName());
endContext();
STNode equalsToken = parseAssignOp();
STNode groupingKeyExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createGroupingKeyVarDeclarationNode(groupingKeyElementTypeDesc, groupingKeySimpleBP,
equalsToken, groupingKeyExpression);
}
/**
* Parse grouping key.
* <code>grouping-key := variable-name | inferable-type-descriptor variable-name "=" expression</code>
*
* @return Parsed node
*/
private STNode parseGroupingKey(boolean isRhsExpr) {
STToken nextToken = peek();
SyntaxKind nextTokenKind = nextToken.kind;
if (nextTokenKind == SyntaxKind.IDENTIFIER_TOKEN && !isPossibleGroupingKeyVarDeclaration()) {
return STNodeFactory.createSimpleNameReferenceNode(parseVariableName());
} else if (isTypeStartingToken(nextTokenKind, nextToken)) {
return parseGroupingKeyVariableDeclaration(isRhsExpr);
}
recover(nextToken, ParserRuleContext.GROUPING_KEY_LIST_ELEMENT);
return parseGroupingKey(isRhsExpr);
}
private boolean isPossibleGroupingKeyVarDeclaration() {
SyntaxKind nextNextTokenKind = getNextNextToken().kind;
return nextNextTokenKind == SyntaxKind.EQUAL_TOKEN ||
nextNextTokenKind == SyntaxKind.IDENTIFIER_TOKEN && peek(3).kind == SyntaxKind.EQUAL_TOKEN;
}
/**
* Parse order key.
* <p>
* <code>order-key := expression (ascending | descending)?</code>
*
* @return Parsed node
*/
private STNode parseOrderKey(boolean isRhsExpr) {
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode orderDirection;
STToken nextToken = peek();
switch (nextToken.kind) {
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
orderDirection = consume();
break;
default:
orderDirection = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createOrderKeyNode(expression, orderDirection);
}
/**
* Parse select clause.
* <p>
* <code>select-clause := select expression</code>
*
* @return Parsed node
*/
private STNode parseSelectClause(boolean isRhsExpr, boolean allowActions) {
startContext(ParserRuleContext.SELECT_CLAUSE);
STNode selectKeyword = parseSelectKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, allowActions);
endContext();
return STNodeFactory.createSelectClauseNode(selectKeyword, expression);
}
/**
* Parse select-keyword.
*
* @return Select-keyword node
*/
private STNode parseSelectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SELECT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SELECT_KEYWORD);
return parseSelectKeyword();
}
}
/**
* Parse on-conflict clause.
* <p>
* <code>
* onConflictClause := on conflict expression
* </code>
*
* @return On conflict clause node
*/
private STNode parseOnConflictClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
startContext(ParserRuleContext.ON_CONFLICT_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode conflictKeyword = parseConflictKeyword();
endContext();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);
}
/**
* Parse conflict keyword.
*
* @return Conflict keyword node
*/
private STNode parseConflictKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONFLICT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONFLICT_KEYWORD);
return parseConflictKeyword();
}
}
/**
* Parse limit clause.
* <p>
* <code>limitClause := limit expression</code>
*
* @return Limit expression node
*/
private STNode parseLimitClause(boolean isRhsExpr) {
STNode limitKeyword = parseLimitKeyword();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createLimitClauseNode(limitKeyword, expr);
}
/**
* Parse join clause.
* <p>
* <code>
* join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause
* <br/>
* join-var-decl := join (typeName | var) bindingPattern
* <br/>
* outer-join-var-decl := outer join var binding-pattern
* </code>
*
* @return Join clause
*/
private STNode parseJoinClause(boolean isRhsExpr) {
startContext(ParserRuleContext.JOIN_CLAUSE);
STNode outerKeyword;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {
outerKeyword = consume();
} else {
outerKeyword = STNodeFactory.createEmptyNode();
}
STNode joinKeyword = parseJoinKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
STNode onCondition = parseOnClause(isRhsExpr);
return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,
onCondition);
}
/**
* Parse on clause.
* <p>
* <code>on clause := `on` expression `equals` expression</code>
*
* @return On clause node
*/
private STNode parseOnClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (isQueryClauseStartToken(nextToken)) {
return createMissingOnClauseNode();
}
startContext(ParserRuleContext.ON_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode equalsKeyword = parseEqualsKeyword();
endContext();
STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
private STNode createMissingOnClauseNode() {
STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD);
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);
STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD);
STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
/**
* Parse start action.
* <p>
* <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code>
*
* @return Start action node
*/
private STNode parseStartAction(STNode annots) {
STNode startKeyword = parseStartKeyword();
STNode expr = parseActionOrExpression();
switch (expr.kind) {
case FUNCTION_CALL:
case METHOD_CALL:
case REMOTE_METHOD_CALL_ACTION:
break;
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case FIELD_ACCESS:
case ASYNC_SEND_ACTION:
expr = generateValidExprForStartAction(expr);
break;
default:
startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,
DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);
STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
funcName = STNodeFactory.createSimpleNameReferenceNode(funcName);
STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);
STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);
expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken,
STNodeFactory.createEmptyNodeList(), closeParenToken);
break;
}
return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);
}
private STNode generateValidExprForStartAction(STNode expr) {
STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);
STNode arguments = STNodeFactory.createEmptyNodeList();
STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);
switch (expr.kind) {
case FIELD_ACCESS:
STFieldAccessExpressionNode fieldAccessExpr = (STFieldAccessExpressionNode) expr;
return STNodeFactory.createMethodCallExpressionNode(fieldAccessExpr.expression,
fieldAccessExpr.dotToken, fieldAccessExpr.fieldName, openParenToken, arguments,
closeParenToken);
case ASYNC_SEND_ACTION:
STAsyncSendActionNode asyncSendAction = (STAsyncSendActionNode) expr;
return STNodeFactory.createRemoteMethodCallActionNode(asyncSendAction.expression,
asyncSendAction.rightArrowToken, asyncSendAction.peerWorker, openParenToken, arguments,
closeParenToken);
default:
return STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);
}
}
/**
* Parse start keyword.
*
* @return Start keyword node
*/
private STNode parseStartKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.START_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.START_KEYWORD);
return parseStartKeyword();
}
}
/**
* Parse flush action.
* <p>
* <code>flush-action := flush [peer-worker]</code>
*
* @return flush action node
*/
private STNode parseFlushAction() {
STNode flushKeyword = parseFlushKeyword();
STNode peerWorker = parseOptionalPeerWorkerName();
return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);
}
/**
* Parse flush keyword.
*
* @return flush keyword node
*/
private STNode parseFlushKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FLUSH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FLUSH_KEYWORD);
return parseFlushKeyword();
}
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parseOptionalPeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
return STNodeFactory.createEmptyNode();
}
}
/**
* Parse intersection type descriptor.
* <p>
* intersection-type-descriptor := type-descriptor & type-descriptor
* </p>
*
* @return Parsed node
*/
private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode bitwiseAndToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,
TypePrecedence.INTERSECTION);
return mergeTypesWithIntersection(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Creates an intersection type descriptor after validating lhs and rhs types.
* <p>
* <i>Note: Since type precedence and associativity are not taken into account here,
* this method should not be called directly when types are unknown.
* <br/>
* Call {@link
*
* @param leftTypeDesc lhs type
* @param bitwiseAndToken bitwise-and token
* @param rightTypeDesc rhs type
* @return an IntersectionTypeDescriptorNode
*/
private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Parse singleton type descriptor.
* <p>
* singleton-type-descriptor := simple-const-expr
* simple-const-expr :=
* nil-literal
* | boolean-literal
* | [Sign] int-literal
* | [Sign] floating-point-literal
* | string-literal
* | constant-reference-expr
* </p>
*/
private STNode parseSingletonTypeDesc() {
STNode simpleContExpr = parseSimpleConstExpr();
return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);
}
private STNode parseSignedIntOrFloat() {
STNode operator = parseUnaryOperator();
STNode literal;
STToken nextToken = peek();
switch (nextToken.kind) {
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
literal = parseBasicLiteral();
break;
default:
literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN);
literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal);
}
return STNodeFactory.createUnaryExpressionNode(operator, literal);
}
private static boolean isSingletonTypeDescStart(SyntaxKind tokenKind, STToken nextNextToken) {
switch (tokenKind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isIntOrFloat(nextNextToken);
default:
return false;
}
}
static boolean isIntOrFloat(STToken token) {
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
return true;
default:
return false;
}
}
/**
* Check whether the parser reached to a valid expression start.
*
* @param nextTokenKind Kind of the next immediate token.
* @param nextTokenIndex Index to the next token.
* @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise
*/
private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {
nextTokenIndex++;
switch (nextTokenKind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;
if (nextNextTokenKind == SyntaxKind.PIPE_TOKEN || nextNextTokenKind == SyntaxKind.BITWISE_AND_TOKEN) {
nextTokenIndex++;
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);
}
return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN ||
nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||
nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||
isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case IDENTIFIER_TOKEN:
return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case OPEN_PAREN_TOKEN:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case OPEN_BRACE_TOKEN:
case TYPEOF_KEYWORD:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
case TRAP_KEYWORD:
case OPEN_BRACKET_TOKEN:
case LT_TOKEN:
case FROM_KEYWORD:
case LET_KEYWORD:
case BACKTICK_TOKEN:
case NEW_KEYWORD:
case LEFT_ARROW_TOKEN:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
case BASE16_KEYWORD:
case BASE64_KEYWORD:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);
case TABLE_KEYWORD:
case MAP_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;
case STREAM_KEYWORD:
STToken nextNextToken = peek(nextTokenIndex);
return nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||
nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||
nextNextToken.kind == SyntaxKind.FROM_KEYWORD;
case ERROR_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;
case XML_KEYWORD:
case STRING_KEYWORD:
case RE_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;
case START_KEYWORD:
case FLUSH_KEYWORD:
case WAIT_KEYWORD:
default:
return false;
}
}
/**
* Parse sync send action.
* <p>
* <code>sync-send-action := expression ->> peer-worker</code>
*
* @param expression LHS expression of the sync send action
* @return Sync send action node
*/
private STNode parseSyncSendAction(STNode expression) {
STNode syncSendToken = parseSyncSendToken();
STNode peerWorker = parsePeerWorkerName();
return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parsePeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
recover(token, ParserRuleContext.PEER_WORKER_NAME);
return parsePeerWorkerName();
}
}
/**
* Parse sync send token.
* <p>
* <code>sync-send-token := ->> </code>
*
* @return sync send token
*/
private STNode parseSyncSendToken() {
STToken token = peek();
if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.SYNC_SEND_TOKEN);
return parseSyncSendToken();
}
}
/**
* Parse receive action.
* <p>
* <code>receive-action := single-receive-action | multiple-receive-action</code>
*
* @return Receive action
*/
private STNode parseReceiveAction() {
STNode leftArrow = parseLeftArrowToken();
STNode receiveWorkers = parseReceiveWorkers();
return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);
}
private STNode parseReceiveWorkers() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
case IDENTIFIER_TOKEN:
return parsePeerWorkerName();
case OPEN_BRACE_TOKEN:
return parseMultipleReceiveWorkers();
default:
recover(peek(), ParserRuleContext.RECEIVE_WORKERS);
return parseReceiveWorkers();
}
}
/**
* Parse multiple worker receivers.
* <p>
* <code>{ receive-field (, receive-field)* }</code>
*
* @return Multiple worker receiver node
*/
private STNode parseMultipleReceiveWorkers() {
startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);
STNode openBrace = parseOpenBrace();
STNode receiveFields = parseReceiveFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);
return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);
}
private STNode parseReceiveFields() {
List<STNode> receiveFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfReceiveFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
STNode recieveFieldEnd;
while (!isEndOfReceiveFields(nextToken.kind)) {
recieveFieldEnd = parseReceiveFieldEnd();
if (recieveFieldEnd == null) {
break;
}
receiveFields.add(recieveFieldEnd);
receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
}
return STNodeFactory.createNodeList(receiveFields);
}
private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseReceiveFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);
return parseReceiveFieldEnd();
}
}
/**
* Parse receive field.
* <p>
* <code>receive-field := peer-worker | field-name : peer-worker</code>
*
* @return Receiver field node
*/
private STNode parseReceiveField() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
return STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);
return createQualifiedReceiveField(identifier);
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD);
return parseReceiveField();
}
}
private STNode createQualifiedReceiveField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode peerWorker = parsePeerWorkerName();
return createQualifiedNameReferenceNode(identifier, colon, peerWorker);
}
/**
* Parse left arrow (<-) token.
*
* @return left arrow token
*/
private STNode parseLeftArrowToken() {
STToken token = peek();
if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);
return parseLeftArrowToken();
}
}
/**
* Parse signed right shift token (>>).
* This method should only be called by seeing a `DOUBLE_GT_TOKEN` or
* by seeing a `GT_TOKEN` followed by a `GT_TOKEN`
*
* @return Parsed node
*/
private STNode parseSignedRightShiftToken() {
STNode firstToken = consume();
if (firstToken.kind == SyntaxKind.DOUBLE_GT_TOKEN) {
return firstToken;
}
STToken endLGToken = consume();
STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, firstToken.leadingMinutiae(),
endLGToken.trailingMinutiae());
if (hasTrailingMinutiae(firstToken)) {
doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);
}
return doubleGTToken;
}
/**
* Parse unsigned right shift token (>>>).
* This method should only be called by seeing a `TRIPPLE_GT_TOKEN` or
* by seeing a `GT_TOKEN` followed by two `GT_TOKEN`s
*
* @return Parsed node
*/
private STNode parseUnsignedRightShiftToken() {
STNode firstToken = consume();
if (firstToken.kind == SyntaxKind.TRIPPLE_GT_TOKEN) {
return firstToken;
}
STNode middleGTToken = consume();
STNode endLGToken = consume();
STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,
firstToken.leadingMinutiae(), endLGToken.trailingMinutiae());
boolean validOpenGTToken = !hasTrailingMinutiae(firstToken);
boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken);
if (validOpenGTToken && validMiddleGTToken) {
return unsignedRightShiftToken;
}
unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);
return unsignedRightShiftToken;
}
/**
* Parse wait action.
* <p>
* <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code>
*
* @return Wait action node
*/
private STNode parseWaitAction() {
STNode waitKeyword = parseWaitKeyword();
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
return parseMultiWaitAction(waitKeyword);
}
return parseSingleOrAlternateWaitAction(waitKeyword);
}
/**
* Parse wait keyword.
*
* @return wait keyword
*/
private STNode parseWaitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WAIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WAIT_KEYWORD);
return parseWaitKeyword();
}
}
/**
* Parse single or alternate wait actions.
* <p>
* <code>
* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+
* <br/>
* wait-future-expr := expression but not mapping-constructor-expr
* </code>
*
* @param waitKeyword wait keyword
* @return Single or alternate wait action node
*/
private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);
STToken nextToken = peek();
if (isEndOfWaitFutureExprList(nextToken.kind)) {
endContext();
STNode waitFutureExprs = STNodeFactory
.createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);
}
List<STNode> waitFutureExprList = new ArrayList<>();
STNode waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
STNode waitFutureExprEnd;
while (!isEndOfWaitFutureExprList(nextToken.kind)) {
waitFutureExprEnd = parseWaitFutureExprEnd();
if (waitFutureExprEnd == null) {
break;
}
waitFutureExprList.add(waitFutureExprEnd);
waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
}
endContext();
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));
}
private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case OPEN_BRACE_TOKEN:
return true;
case PIPE_TOKEN:
default:
return false;
}
}
private STNode parseWaitFutureExpr() {
STNode waitFutureExpr = parseActionOrExpression();
if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {
waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,
DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);
} else if (isAction(waitFutureExpr)) {
waitFutureExpr =
SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);
}
return waitFutureExpr;
}
private STNode parseWaitFutureExprEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
return parsePipeToken();
default:
if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) {
return null;
}
recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END);
return parseWaitFutureExprEnd();
}
}
/**
* Parse multiple wait action.
* <p>
* <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code>
*
* @param waitKeyword Wait keyword
* @return Multiple wait action node
*/
private STNode parseMultiWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.MULTI_WAIT_FIELDS);
STNode openBrace = parseOpenBrace();
STNode waitFields = parseWaitFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);
STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);
}
private STNode parseWaitFields() {
List<STNode> waitFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfWaitFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
STNode waitFieldEnd;
while (!isEndOfWaitFields(nextToken.kind)) {
waitFieldEnd = parseWaitFieldEnd();
if (waitFieldEnd == null) {
break;
}
waitFields.add(waitFieldEnd);
waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
}
return STNodeFactory.createNodeList(waitFields);
}
private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseWaitFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_END);
return parseWaitFieldEnd();
}
}
/**
* Parse wait field.
* <p>
* <code>wait-field := variable-name | field-name : wait-future-expr</code>
*
* @return Receiver field node
*/
private STNode parseWaitField() {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);
identifier = STNodeFactory.createSimpleNameReferenceNode(identifier);
return createQualifiedWaitField(identifier);
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);
return parseWaitField();
}
}
private STNode createQualifiedWaitField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode waitFutureExpr = parseWaitFutureExpr();
return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);
}
/**
* Parse annot access expression.
* <p>
* <code>
* annot-access-expr := expression .@ annot-tag-reference
* <br/>
* annot-tag-reference := qualified-identifier | identifier
* </code>
*
* @param lhsExpr Preceding expression of the annot access access
* @return Parsed node
*/
private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode annotAccessToken = parseAnnotChainingToken();
STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);
}
/**
* Parse annot-chaining-token.
*
* @return Parsed node
*/
private STNode parseAnnotChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);
return parseAnnotChainingToken();
}
}
/**
* Parse field access identifier.
* <p>
* <code>field-access-identifier := qualified-identifier | identifier</code>
*
* @return Parsed node
*/
private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {
STToken nextToken = peek();
if (!isPredeclaredIdentifier(nextToken.kind)) {
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);
return parseQualifiedIdentifier(identifier, isInConditionalExpr);
}
return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);
}
/**
* Parse query action.
* <p>
* <code>query-action := query-pipeline do-clause
* <br/>
* do-clause := do block-stmt
* </code>
*
* @param queryConstructType Query construct type. This is only for validation
* @param queryPipeline Query pipeline
* @param selectClause Select clause if any This is only for validation.
* @return Query action node
*/
private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause) {
if (queryConstructType != null) {
queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType,
DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION);
}
if (selectClause != null) {
queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,
DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);
}
startContext(ParserRuleContext.DO_CLAUSE);
STNode doKeyword = parseDoKeyword();
STNode blockStmt = parseBlockNode();
endContext();
return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt);
}
/**
* Parse 'do' keyword.
*
* @return do keyword node
*/
private STNode parseDoKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.DO_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.DO_KEYWORD);
return parseDoKeyword();
}
}
/**
* Parse optional field access or xml optional attribute access expression.
* <p>
* <code>
* optional-field-access-expr := expression ?. field-name
* <br/>
* xml-optional-attribute-access-expr := expression ?. xml-attribute-name
* <br/>
* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier
* <br/>
* xml-qualified-name := xml-namespace-prefix : identifier
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @param lhsExpr Preceding expression of the optional access
* @return Parsed node
*/
private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode optionalFieldAccessToken = parseOptionalChainingToken();
STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);
}
/**
* Parse optional chaining token.
*
* @return parsed node
*/
private STNode parseOptionalChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);
return parseOptionalChainingToken();
}
}
/**
* Parse conditional expression.
* <p>
* <code>conditional-expr := expression ? expression : expression</code>
*
* @param lhsExpr Preceding expression of the question mark
* @param isInConditionalExpr whether calling from a conditional-expr
* @return Parsed node
*/
private STNode parseConditionalExpression(STNode lhsExpr, boolean isInConditionalExpr) {
startContext(ParserRuleContext.CONDITIONAL_EXPRESSION);
STNode questionMark = parseQuestionMark();
STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);
if (peek().kind != SyntaxKind.COLON_TOKEN) {
if (middleExpr.kind == SyntaxKind.CONDITIONAL_EXPRESSION) {
STConditionalExpressionNode innerConditionalExpr = (STConditionalExpressionNode) middleExpr;
STNode innerMiddleExpr = innerConditionalExpr.middleExpression;
STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, false);
if (rightMostQNameRef != null) {
middleExpr = generateConditionalExprForRightMost(innerConditionalExpr.lhsExpression,
innerConditionalExpr.questionMarkToken, innerMiddleExpr, rightMostQNameRef);
endContext();
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,
innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);
}
STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, true);
if (leftMostQNameRef != null) {
middleExpr = generateConditionalExprForLeftMost(innerConditionalExpr.lhsExpression,
innerConditionalExpr.questionMarkToken, innerMiddleExpr, leftMostQNameRef);
endContext();
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,
innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);
}
}
STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, false);
if (rightMostQNameRef != null) {
endContext();
return generateConditionalExprForRightMost(lhsExpr, questionMark, middleExpr, rightMostQNameRef);
}
STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, true);
if (leftMostQNameRef != null) {
endContext();
return generateConditionalExprForLeftMost(lhsExpr, questionMark, middleExpr, leftMostQNameRef);
}
}
return parseConditionalExprRhs(lhsExpr, questionMark, middleExpr, isInConditionalExpr);
}
private STNode generateConditionalExprForRightMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,
STNode rightMostQualifiedNameRef) {
STQualifiedNameReferenceNode qualifiedNameRef =
(STQualifiedNameReferenceNode) rightMostQualifiedNameRef;
STNode endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);
STNode simpleNameRef =
ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);
middleExpr = middleExpr.replace(rightMostQualifiedNameRef, simpleNameRef);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,
endExpr);
}
private STNode generateConditionalExprForLeftMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,
STNode leftMostQualifiedNameRef) {
STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) leftMostQualifiedNameRef;
STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);
STNode endExpr = middleExpr.replace(leftMostQualifiedNameRef, simpleNameRef);
middleExpr = ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,
endExpr);
}
private STNode parseConditionalExprRhs(STNode lhsExpr, STNode questionMark, STNode middleExpr,
boolean isInConditionalExpr) {
STNode colon = parseColon();
endContext();
STNode endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false,
isInConditionalExpr);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);
}
/**
* Parse enum declaration.
* <p>
* module-enum-decl :=
* metadata
* [public] enum identifier { enum-member (, enum-member)* } [;]
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @param metadata
* @param qualifier
* @return Parsed enum node.
*/
private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.MODULE_ENUM_DECLARATION);
STNode enumKeywordToken = parseEnumKeyword();
STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);
STNode openBraceToken = parseOpenBrace();
STNode enumMemberList = parseEnumMemberList();
STNode closeBraceToken = parseCloseBrace();
STNode semicolon = parseOptionalSemicolon();
endContext();
openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,
DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);
return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,
openBraceToken, enumMemberList, closeBraceToken, semicolon);
}
/**
* Parse 'enum' keyword.
*
* @return enum keyword node
*/
private STNode parseEnumKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ENUM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ENUM_KEYWORD);
return parseEnumKeyword();
}
}
/**
* Parse enum member list.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return enum member list node.
*/
private STNode parseEnumMemberList() {
startContext(ParserRuleContext.ENUM_MEMBER_LIST);
if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> enumMemberList = new ArrayList<>();
STNode enumMember = parseEnumMember();
STNode enumMemberRhs;
while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) {
enumMemberRhs = parseEnumMemberEnd();
if (enumMemberRhs == null) {
break;
}
enumMemberList.add(enumMember);
enumMemberList.add(enumMemberRhs);
enumMember = parseEnumMember();
}
enumMemberList.add(enumMember);
endContext();
return STNodeFactory.createNodeList(enumMemberList);
}
/**
* Parse enum member.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return Parsed enum member node.
*/
private STNode parseEnumMember() {
STNode metadata;
switch (peek().kind) {
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
metadata = STNodeFactory.createEmptyNode();
}
STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);
return parseEnumMemberRhs(metadata, identifierNode);
}
private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {
STNode equalToken, constExprNode;
switch (peek().kind) {
case EQUAL_TOKEN:
equalToken = parseAssignOp();
constExprNode = parseExpression();
break;
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
equalToken = STNodeFactory.createEmptyNode();
constExprNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS);
return parseEnumMemberRhs(metadata, identifierNode);
}
return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);
}
private STNode parseEnumMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_END);
return parseEnumMemberEnd();
}
}
private STNode parseTransactionStmtOrVarDecl(STNode annots, List<STNode> qualifiers, STToken transactionKeyword) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
reportInvalidStatementAnnots(annots, qualifiers);
reportInvalidQualifierList(qualifiers);
return parseTransactionStatement(transactionKeyword);
case COLON_TOKEN:
if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
default:
Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF);
if (solution.action == Action.KEEP ||
(solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword);
}
}
/**
* Parse transaction statement.
* <p>
* <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code>
*
* @return Transaction statement node
*/
private STNode parseTransactionStatement(STNode transactionKeyword) {
startContext(ParserRuleContext.TRANSACTION_STMT);
STNode blockStmt = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause);
}
/**
* Parse commit action.
* <p>
* <code>commit-action := "commit"</code>
*
* @return Commit action node
*/
private STNode parseCommitAction() {
STNode commitKeyword = parseCommitKeyword();
return STNodeFactory.createCommitActionNode(commitKeyword);
}
/**
* Parse commit keyword.
*
* @return parsed node
*/
private STNode parseCommitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.COMMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.COMMIT_KEYWORD);
return parseCommitKeyword();
}
}
/**
* Parse retry statement.
* <p>
* <code>
* retry-stmt := `retry` retry-spec block-stmt [on-fail-clause]
* <br/>
* retry-spec := [type-parameter] [ `(` arg-list `)` ]
* </code>
*
* @return Retry statement node
*/
private STNode parseRetryStatement() {
startContext(ParserRuleContext.RETRY_STMT);
STNode retryKeyword = parseRetryKeyword();
STNode retryStmt = parseRetryKeywordRhs(retryKeyword);
return retryStmt;
}
private STNode parseRetryKeywordRhs(STNode retryKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case LT_TOKEN:
STNode typeParam = parseTypeParameter();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
case OPEN_PAREN_TOKEN:
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
typeParam = STNodeFactory.createEmptyNode();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
default:
recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS);
return parseRetryKeywordRhs(retryKeyword);
}
}
private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {
STNode args;
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
args = parseParenthesizedArgList();
break;
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
args = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS);
return parseRetryTypeParamRhs(retryKeyword, typeParam);
}
STNode blockStmt = parseRetryBody();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause);
}
private STNode parseRetryBody() {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
return parseBlockNode();
case TRANSACTION_KEYWORD:
return parseTransactionStatement(consume());
default:
recover(peek(), ParserRuleContext.RETRY_BODY);
return parseRetryBody();
}
}
/**
* Parse optional on fail clause.
*
* @return Parsed node
*/
private STNode parseOptionalOnFailClause() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ON_KEYWORD) {
return parseOnFailClause();
}
if (isEndOfRegularCompoundStmt(nextToken.kind)) {
return STNodeFactory.createEmptyNode();
}
recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS);
return parseOptionalOnFailClause();
}
private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) {
switch (nodeKind) {
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case AT_TOKEN:
case EOF_TOKEN:
return true;
default:
return isStatementStartingToken(nodeKind);
}
}
private boolean isStatementStartingToken(SyntaxKind nodeKind) {
switch (nodeKind) {
case FINAL_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case PANIC_KEYWORD:
case CONTINUE_KEYWORD:
case BREAK_KEYWORD:
case RETURN_KEYWORD:
case LOCK_KEYWORD:
case OPEN_BRACE_TOKEN:
case FORK_KEYWORD:
case FOREACH_KEYWORD:
case XMLNS_KEYWORD:
case TRANSACTION_KEYWORD:
case RETRY_KEYWORD:
case ROLLBACK_KEYWORD:
case MATCH_KEYWORD:
case FAIL_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case TRAP_KEYWORD:
case START_KEYWORD:
case FLUSH_KEYWORD:
case LEFT_ARROW_TOKEN:
case WAIT_KEYWORD:
case COMMIT_KEYWORD:
case WORKER_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
return true;
default:
if (isTypeStartingToken(nodeKind)) {
return true;
}
if (isValidExpressionStart(nodeKind, 1)) {
return true;
}
return false;
}
}
/**
* Parse on fail clause.
* <p>
* <code>
* on-fail-clause := on fail [typed-binding-pattern] statement-block
* </code>
*
* @return On fail clause node
*/
private STNode parseOnFailClause() {
startContext(ParserRuleContext.ON_FAIL_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode failKeyword = parseFailKeyword();
STToken token = peek();
STNode typeDescriptor = STNodeFactory.createEmptyNode();
STNode identifier = STNodeFactory.createEmptyNode();
if (token.kind != SyntaxKind.OPEN_BRACE_TOKEN) {
typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false,
TypePrecedence.DEFAULT);
identifier = parseIdentifier(ParserRuleContext.VARIABLE_NAME);
}
STNode blockStatement = parseBlockNode();
endContext();
return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier,
blockStatement);
}
/**
* Parse retry keyword.
*
* @return parsed node
*/
private STNode parseRetryKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RETRY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.RETRY_KEYWORD);
return parseRetryKeyword();
}
}
/**
* Parse transaction statement.
* <p>
* <code>rollback-stmt := "rollback" [expression] ";"</code>
*
* @return Rollback statement node
*/
private STNode parseRollbackStatement() {
startContext(ParserRuleContext.ROLLBACK_STMT);
STNode rollbackKeyword = parseRollbackKeyword();
STNode expression;
if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {
expression = STNodeFactory.createEmptyNode();
} else {
expression = parseExpression();
}
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);
}
/**
* Parse rollback keyword.
*
* @return Rollback keyword node
*/
private STNode parseRollbackKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ROLLBACK_KEYWORD);
return parseRollbackKeyword();
}
}
/**
* Parse transactional expression.
* <p>
* <code>transactional-expr := "transactional"</code>
*
* @return Transactional expression node
*/
private STNode parseTransactionalExpression() {
STNode transactionalKeyword = parseTransactionalKeyword();
return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);
}
/**
* Parse transactional keyword.
*
* @return Transactional keyword node
*/
private STNode parseTransactionalKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD);
return parseTransactionalKeyword();
}
}
/**
* Parse base16 literal.
* <p>
* <code>
* byte-array-literal := Base16Literal | Base64Literal
* <br/>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* </code>
*
* @return parsed node
*/
private STNode parseByteArrayLiteral() {
STNode type;
if (peek().kind == SyntaxKind.BASE16_KEYWORD) {
type = parseBase16Keyword();
} else {
type = parseBase64Keyword();
}
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
if (startingBackTick.isMissing()) {
startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode content = STNodeFactory.createEmptyNode();
STNode byteArrayLiteral =
STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);
byteArrayLiteral =
SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);
return byteArrayLiteral;
}
STNode content = parseByteArrayContent();
return parseByteArrayLiteral(type, startingBackTick, content);
}
/**
* Parse byte array literal.
*
* @param typeKeyword keyword token, possible values are `base16` and `base64`
* @param startingBackTick starting backtick token
* @param byteArrayContent byte array literal content to be validated
* @return parsed byte array literal node
*/
private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) {
STNode content = STNodeFactory.createEmptyNode();
STNode newStartingBackTick = startingBackTick;
STNodeList items = (STNodeList) byteArrayContent;
if (items.size() == 1) {
STNode item = items.get(0);
if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else {
content = item;
}
} else if (items.size() > 1) {
STNode clonedStartingBackTick = startingBackTick;
for (int index = 0; index < items.size(); index++) {
STNode item = items.get(index);
clonedStartingBackTick =
SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);
}
newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
}
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);
}
/**
* Parse <code>base16</code> keyword.
*
* @return base16 keyword node
*/
private STNode parseBase16Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE16_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE16_KEYWORD);
return parseBase16Keyword();
}
}
/**
* Parse <code>base64</code> keyword.
*
* @return base64 keyword node
*/
private STNode parseBase64Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE64_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE64_KEYWORD);
return parseBase64Keyword();
}
}
/**
* Validate and parse byte array literal content.
* An error is reported, if the content is invalid.
*
* @return parsed node
*/
private STNode parseByteArrayContent() {
STToken nextToken = peek();
List<STNode> items = new ArrayList<>();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode content = parseTemplateItem();
items.add(content);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
/**
* Validate base16 literal content.
* <p>
* <code>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* HexGroup := WS HexDigit WS HexDigit
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase16LiteralContent(String content) {
char[] charArray = content.toCharArray();
int hexDigitCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
default:
if (isHexDigit(c)) {
hexDigitCount++;
} else {
return false;
}
break;
}
}
return hexDigitCount % 2 == 0;
}
/**
* Validate base64 literal content.
* <p>
* <code>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* <br/>
* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char
* <br/>
* PaddedBase64Group :=
* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar
* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar
* <br/>
* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /
* <br/>
* PaddingChar := =
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase64LiteralContent(String content) {
char[] charArray = content.toCharArray();
int base64CharCount = 0;
int paddingCharCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
case LexerTerminals.EQUAL:
paddingCharCount++;
break;
default:
if (isBase64Char(c)) {
if (paddingCharCount == 0) {
base64CharCount++;
} else {
return false;
}
} else {
return false;
}
break;
}
}
if (paddingCharCount > 2) {
return false;
} else if (paddingCharCount == 0) {
return base64CharCount % 4 == 0;
} else {
return base64CharCount % 4 == 4 - paddingCharCount;
}
}
/**
* <p>
* Check whether a given char is a base64 char.
* </p>
* <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code>
*
* @param c character to check
* @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise.
*/
static boolean isBase64Char(int c) {
if ('a' <= c && c <= 'z') {
return true;
}
if ('A' <= c && c <= 'Z') {
return true;
}
if (c == '+' || c == '/') {
return true;
}
return isDigit(c);
}
static boolean isHexDigit(int c) {
if ('a' <= c && c <= 'f') {
return true;
}
if ('A' <= c && c <= 'F') {
return true;
}
return isDigit(c);
}
static boolean isDigit(int c) {
return ('0' <= c && c <= '9');
}
/**
* Parse xml filter expression.
* <p>
* <code>xml-filter-expr := expression .< xml-name-pattern ></code>
*
* @param lhsExpr Preceding expression of .< token
* @return Parsed node
*/
private STNode parseXMLFilterExpression(STNode lhsExpr) {
STNode xmlNamePatternChain = parseXMLFilterExpressionRhs();
return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>filer-expression-rhs := .< xml-name-pattern ></code>
*
* @return Parsed node
*/
private STNode parseXMLFilterExpressionRhs() {
STNode dotLTToken = parseDotLTToken();
return parseXMLNamePatternChain(dotLTToken);
}
/**
* Parse xml name pattern chain.
* <p>
* <code>
* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step
* <br/>
* filer-expression-rhs := .< xml-name-pattern >
* <br/>
* xml-element-children-step := /< xml-name-pattern >
* <br/>
* xml-element-descendants-step := /**\/<xml-name-pattern >
* </code>
*
* @param startToken Preceding token of xml name pattern
* @return Parsed node
*/
private STNode parseXMLNamePatternChain(STNode startToken) {
startContext(ParserRuleContext.XML_NAME_PATTERN);
STNode xmlNamePattern = parseXMLNamePattern();
STNode gtToken = parseGTToken();
endContext();
startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,
DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);
return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);
}
/**
* Parse <code> .< </code> token.
*
* @return Parsed node
*/
private STNode parseDotLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);
return parseDotLTToken();
}
}
/**
* Parse xml name pattern.
* <p>
* <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code>
*
* @return Parsed node
*/
private STNode parseXMLNamePattern() {
List<STNode> xmlAtomicNamePatternList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfXMLNamePattern(nextToken.kind)) {
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
STNode separator;
while (!isEndOfXMLNamePattern(peek().kind)) {
separator = parseXMLNamePatternSeparator();
if (separator == null) {
break;
}
xmlAtomicNamePatternList.add(separator);
xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
}
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {
switch (tokenKind) {
case GT_TOKEN:
case EOF_TOKEN:
return true;
case IDENTIFIER_TOKEN:
case ASTERISK_TOKEN:
case COLON_TOKEN:
default:
return false;
}
}
private STNode parseXMLNamePatternSeparator() {
STToken token = peek();
switch (token.kind) {
case PIPE_TOKEN:
return consume();
case GT_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);
return parseXMLNamePatternSeparator();
}
}
/**
* Parse xml atomic name pattern.
* <p>
* <code>
* xml-atomic-name-pattern :=
* *
* | identifier
* | xml-namespace-prefix : identifier
* | xml-namespace-prefix : *
* </code>
*
* @return Parsed node
*/
private STNode parseXMLAtomicNamePattern() {
startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);
STNode atomicNamePattern = parseXMLAtomicNamePatternBody();
endContext();
return atomicNamePattern;
}
private STNode parseXMLAtomicNamePatternBody() {
STToken token = peek();
STNode identifier;
switch (token.kind) {
case ASTERISK_TOKEN:
return consume();
case IDENTIFIER_TOKEN:
identifier = consume();
break;
default:
recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);
return parseXMLAtomicNamePatternBody();
}
return parseXMLAtomicNameIdentifier(identifier);
}
private STNode parseXMLAtomicNameIdentifier(STNode identifier) {
STToken token = peek();
if (token.kind == SyntaxKind.COLON_TOKEN) {
STNode colon = consume();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {
STToken endToken = consume();
return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);
}
}
return STNodeFactory.createSimpleNameReferenceNode(identifier);
}
/**
* Parse xml step expression.
* <p>
* <code>xml-step-expr := expression xml-step-start</code>
*
* @param lhsExpr Preceding expression of /*, /<, or /**\/< token
* @return Parsed node
*/
private STNode parseXMLStepExpression(STNode lhsExpr) {
STNode xmlStepStart = parseXMLStepStart();
return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>
* xml-step-start :=
* xml-all-children-step
* | xml-element-children-step
* | xml-element-descendants-step
* <br/>
* xml-all-children-step := /*
* </code>
*
* @return Parsed node
*/
private STNode parseXMLStepStart() {
STToken token = peek();
STNode startToken;
switch (token.kind) {
case SLASH_ASTERISK_TOKEN:
return consume();
case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:
startToken = parseDoubleSlashDoubleAsteriskLTToken();
break;
case SLASH_LT_TOKEN:
default:
startToken = parseSlashLTToken();
break;
}
return parseXMLNamePatternChain(startToken);
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseSlashLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);
return parseSlashLTToken();
}
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseDoubleSlashDoubleAsteriskLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);
return parseDoubleSlashDoubleAsteriskLTToken();
}
}
/**
* Parse match statement.
* <p>
* <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code>
*
* @return Match statement
*/
private STNode parseMatchStatement() {
startContext(ParserRuleContext.MATCH_STMT);
STNode matchKeyword = parseMatchKeyword();
STNode actionOrExpr = parseActionOrExpression();
startContext(ParserRuleContext.MATCH_BODY);
STNode openBrace = parseOpenBrace();
List<STNode> matchClausesList = new ArrayList<>();
while (!isEndOfMatchClauses(peek().kind)) {
STNode clause = parseMatchClause();
matchClausesList.add(clause);
}
STNode matchClauses = STNodeFactory.createNodeList(matchClausesList);
if (isNodeListEmpty(matchClauses)) {
openBrace = SyntaxErrors.addDiagnostic(openBrace,
DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES);
}
STNode closeBrace = parseCloseBrace();
endContext();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace,
onFailClause);
}
/**
* Parse match keyword.
*
* @return Match keyword node
*/
private STNode parseMatchKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.MATCH_KEYWORD);
return parseMatchKeyword();
}
}
private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case TYPE_KEYWORD:
return true;
default:
return isEndOfStatements();
}
}
/**
* Parse a single match match clause.
* <p>
* <code>
* match-clause := match-pattern-list [match-guard] => block-stmt
* <br/>
* match-guard := if expression
* </code>
*
* @return A match clause
*/
private STNode parseMatchClause() {
STNode matchPatterns = parseMatchPatternList();
STNode matchGuard = parseMatchGuard();
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode blockStmt = parseBlockNode();
if (isNodeListEmpty(matchPatterns)) {
STToken identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode constantPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);
matchPatterns = STNodeFactory.createNodeList(constantPattern);
DiagnosticErrorCode errorCode = DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN;
if (matchGuard != null) {
matchGuard = SyntaxErrors.addDiagnostic(matchGuard, errorCode);
} else {
rightDoubleArrow = SyntaxErrors.addDiagnostic(rightDoubleArrow, errorCode);
}
}
return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);
}
/**
* Parse match guard.
* <p>
* <code>match-guard := if expression</code>
*
* @return Match guard
*/
private STNode parseMatchGuard() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IF_KEYWORD:
STNode ifKeyword = parseIfKeyword();
STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false);
return STNodeFactory.createMatchGuardNode(ifKeyword, expr);
case RIGHT_DOUBLE_ARROW_TOKEN:
return STNodeFactory.createEmptyNode();
default:
recover(nextToken, ParserRuleContext.OPTIONAL_MATCH_GUARD);
return parseMatchGuard();
}
}
/**
* Parse match patterns list.
* <p>
* <code>match-pattern-list := match-pattern (| match-pattern)*</code>
*
* @return Match patterns list
*/
private STNode parseMatchPatternList() {
startContext(ParserRuleContext.MATCH_PATTERN);
List<STNode> matchClauses = new ArrayList<>();
while (!isEndOfMatchPattern(peek().kind)) {
STNode clause = parseMatchPattern();
if (clause == null) {
break;
}
matchClauses.add(clause);
STNode seperator = parseMatchPatternListMemberRhs();
if (seperator == null) {
break;
}
matchClauses.add(seperator);
}
endContext();
return STNodeFactory.createNodeList(matchClauses);
}
private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case PIPE_TOKEN:
case IF_KEYWORD:
case RIGHT_DOUBLE_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse match pattern.
* <p>
* <code>
* match-pattern := var binding-pattern
* | wildcard-match-pattern
* | const-pattern
* | list-match-pattern
* | mapping-match-pattern
* | error-match-pattern
* </code>
*
* @return Match pattern
*/
private STNode parseMatchPattern() {
STToken nextToken = peek();
if (isPredeclaredIdentifier(nextToken.kind)) {
STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
}
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
return parseSimpleConstExpr();
case VAR_KEYWORD:
return parseVarTypedBindingPattern();
case OPEN_BRACKET_TOKEN:
return parseListMatchPattern();
case OPEN_BRACE_TOKEN:
return parseMappingMatchPattern();
case ERROR_KEYWORD:
return parseErrorMatchPattern();
default:
recover(nextToken, ParserRuleContext.MATCH_PATTERN_START);
return parseMatchPattern();
}
}
private STNode parseMatchPatternListMemberRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
return parsePipeToken();
case IF_KEYWORD:
case RIGHT_DOUBLE_ARROW_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS);
return parseMatchPatternListMemberRhs();
}
}
/**
* Parse var typed binding pattern.
* <p>
* <code>var binding-pattern</code>
* </p>
*
* @return Parsed typed binding pattern node
*/
private STNode parseVarTypedBindingPattern() {
STNode varKeyword = parseVarKeyword();
STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword);
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern);
}
/**
* Parse var keyword.
*
* @return Var keyword node
*/
private STNode parseVarKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.VAR_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.VAR_KEYWORD);
return parseVarKeyword();
}
}
/**
* Parse list match pattern.
* <p>
* <code>
* list-match-pattern := [ list-member-match-patterns ]
* list-member-match-patterns :=
* match-pattern (, match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* </code>
* </p>
*
* @return Parsed list match pattern node
*/
private STNode parseListMatchPattern() {
startContext(ParserRuleContext.LIST_MATCH_PATTERN);
STNode openBracketToken = parseOpenBracket();
List<STNode> matchPatternList = new ArrayList<>();
STNode listMatchPatternMemberRhs = null;
boolean isEndOfFields = false;
while (!isEndOfListMatchPattern()) {
STNode listMatchPatternMember = parseListMatchPatternMember();
matchPatternList.add(listMatchPatternMember);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
isEndOfFields = true;
break;
}
if (listMatchPatternMemberRhs != null) {
matchPatternList.add(listMatchPatternMemberRhs);
} else {
break;
}
}
while (isEndOfFields && listMatchPatternMemberRhs != null) {
updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null);
if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {
break;
}
STNode invalidField = parseListMatchPatternMember();
updateLastNodeInListWithInvalidNode(matchPatternList, invalidField,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
}
STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);
STNode closeBracketToken = parseCloseBracket();
endContext();
return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken);
}
public boolean isEndOfListMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListMatchPatternMember() {
STNode nextToken = peek();
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
default:
return parseMatchPattern();
}
}
/**
* Parse rest match pattern.
* <p>
* <code>
* rest-match-pattern := ... var variable-name
* </code>
* </p>
*
* @return Parsed rest match pattern node
*/
private STNode parseRestMatchPattern() {
startContext(ParserRuleContext.REST_MATCH_PATTERN);
STNode ellipsisToken = parseEllipsis();
STNode varKeywordToken = parseVarKeyword();
STNode variableName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);
return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);
}
private STNode parseListMatchPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);
return parseListMatchPatternMemberRhs();
}
}
/**
* Parse mapping match pattern.
* <p>
* mapping-match-pattern := { field-match-patterns }
* <br/>
* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* <br/>
* field-match-pattern := field-name : match-pattern
* <br/>
* rest-match-pattern := ... var variable-name
* </p>
*
* @return Parsed Node.
*/
private STNode parseMappingMatchPattern() {
startContext(ParserRuleContext.MAPPING_MATCH_PATTERN);
STNode openBraceToken = parseOpenBrace();
STNode fieldMatchPatterns = parseFieldMatchPatternList();
STNode closeBraceToken = parseCloseBrace();
endContext();
return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken);
}
private STNode parseFieldMatchPatternList() {
List<STNode> fieldMatchPatterns = new ArrayList<>();
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
return STNodeFactory.createEmptyNodeList();
}
fieldMatchPatterns.add(fieldMatchPatternMember);
if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
invalidateExtraFieldMatchPatterns(fieldMatchPatterns);
return STNodeFactory.createNodeList(fieldMatchPatterns);
}
return parseFieldMatchPatternList(fieldMatchPatterns);
}
private STNode parseFieldMatchPatternList(List<STNode> fieldMatchPatterns) {
while (!isEndOfMappingMatchPattern()) {
STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();
if (fieldMatchPatternRhs == null) {
break;
}
fieldMatchPatterns.add(fieldMatchPatternRhs);
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
fieldMatchPatternMember = createMissingFieldMatchPattern();
}
fieldMatchPatterns.add(fieldMatchPatternMember);
if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
invalidateExtraFieldMatchPatterns(fieldMatchPatterns);
break;
}
}
return STNodeFactory.createNodeList(fieldMatchPatterns);
}
private STNode createMissingFieldMatchPattern() {
STNode fieldName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode colon = SyntaxErrors.createMissingToken(SyntaxKind.COLON_TOKEN);
STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode matchPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);
STNode fieldMatchPatternMember = STNodeFactory.createFieldMatchPatternNode(fieldName, colon, matchPattern);
fieldMatchPatternMember = SyntaxErrors.addDiagnostic(fieldMatchPatternMember,
DiagnosticErrorCode.ERROR_MISSING_FIELD_MATCH_PATTERN_MEMBER);
return fieldMatchPatternMember;
}
/**
* Parse and invalidate all field match pattern members after a rest-match-pattern.
*
* @param fieldMatchPatterns field-match-patterns list
*/
private void invalidateExtraFieldMatchPatterns(List<STNode> fieldMatchPatterns) {
while (!isEndOfMappingMatchPattern()) {
STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();
if (fieldMatchPatternRhs == null) {
break;
}
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) fieldMatchPatternRhs).text());
} else {
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs, null);
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternMember,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
}
}
}
private STNode parseFieldMatchPatternMember() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return parseFieldMatchPattern();
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.FIELD_MATCH_PATTERNS_START);
return parseFieldMatchPatternMember();
}
}
/**
* Parse filed match pattern.
* <p>
* field-match-pattern := field-name : match-pattern
* </p>
*
* @return Parsed field match pattern node
*/
public STNode parseFieldMatchPattern() {
STNode fieldNameNode = parseVariableName();
STNode colonToken = parseColon();
STNode matchPattern = parseMatchPattern();
return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);
}
public boolean isEndOfMappingMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseFieldMatchPatternRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);
return parseFieldMatchPatternRhs();
}
}
private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr);
default:
if (isMatchPatternEnd(peek().kind)) {
return typeRefOrConstExpr;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
}
}
private boolean isMatchPatternEnd(SyntaxKind tokenKind) {
switch (tokenKind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_PAREN_TOKEN:
case PIPE_TOKEN:
case IF_KEYWORD:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse functional match pattern.
* <p>
* error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern )
* error-arg-list-match-pattern :=
* error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns]
* | [error-field-match-patterns]
* error-message-match-pattern := simple-match-pattern
* error-cause-match-pattern := simple-match-pattern | error-match-pattern
* simple-match-pattern :=
* wildcard-match-pattern
* | const-pattern
* | var variable-name
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* </p>
*
* @return Parsed functional match pattern node.
*/
private STNode parseErrorMatchPattern() {
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
STNode errorKeyword = consume();
return parseErrorMatchPattern(errorKeyword);
}
private STNode parseErrorMatchPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorMatchPattern(errorKeyword);
}
return parseErrorMatchPattern(errorKeyword, typeRef);
}
private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesisToken = parseOpenParenthesis();
STNode argListMatchPatternNode = parseErrorArgListMatchPatterns();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken,
argListMatchPatternNode, closeParenthesisToken);
}
private STNode parseErrorArgListMatchPatterns() {
List<STNode> argListMatchPatterns = new ArrayList<>();
if (isEndOfErrorFieldMatchPatterns()) {
return STNodeFactory.createNodeList(argListMatchPatterns);
}
startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG);
STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START);
endContext();
if (isSimpleMatchPattern(firstArg.kind)) {
argListMatchPatterns.add(firstArg);
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END);
if (argEnd != null) {
STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS);
if (isValidSecondArgMatchPattern(secondArg.kind)) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(secondArg);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
}
}
} else {
if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&
firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) {
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
} else {
argListMatchPatterns.add(firstArg);
}
}
parseErrorFieldMatchPatterns(argListMatchPatterns);
return STNodeFactory.createNodeList(argListMatchPatterns);
}
private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) {
switch (matchPatternKind) {
case IDENTIFIER_TOKEN:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case NULL_LITERAL:
case NIL_LITERAL:
case BOOLEAN_LITERAL:
case TYPED_BINDING_PATTERN:
case UNARY_EXPRESSION:
return true;
default:
return false;
}
}
private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case ERROR_MATCH_PATTERN:
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
return true;
default:
if (isSimpleMatchPattern(syntaxKind)) {
return true;
}
return false;
}
}
/**
* Parse error field match patterns.
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* @param argListMatchPatterns
*/
private void parseErrorFieldMatchPatterns(List<STNode> argListMatchPatterns) {
SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN;
while (!isEndOfErrorFieldMatchPatterns()) {
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN);
DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListMatchPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);
}
}
}
private boolean isEndOfErrorFieldMatchPatterns() {
return isEndOfErrorFieldBindingPatterns();
}
private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgListMatchPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListMatchPattern(ParserRuleContext context) {
STToken nextToken = peek();
if (isPredeclaredIdentifier(nextToken.kind)) {
return parseNamedArgOrSimpleMatchPattern();
}
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseMatchPattern();
case VAR_KEYWORD:
STNode varType = createBuiltinSimpleNameReference(consume());
STNode variableName = createCaptureOrWildcardBP(parseVariableName());
return STNodeFactory.createTypedBindingPatternNode(varType, variableName);
case CLOSE_PAREN_TOKEN:
return SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN);
default:
recover(nextToken, context);
return parseErrorArgListMatchPattern(context);
}
}
private STNode parseNamedArgOrSimpleMatchPattern() {
STNode constRefExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);
if (constRefExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || peek().kind != SyntaxKind.EQUAL_TOKEN) {
return constRefExpr;
}
return parseNamedArgMatchPattern(((STSimpleNameReferenceNode) constRefExpr).name);
}
/**
* Parses the next named arg match pattern.
* <br/>
* <code>named-arg-match-pattern := arg-name = match-pattern</code>
* <br/>
* <br/>
*
* @return arg match pattern list node added the new arg match pattern
*/
private STNode parseNamedArgMatchPattern(STNode identifier) {
startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);
STNode equalToken = parseAssignOp();
STNode matchPattern = parseMatchPattern();
endContext();
return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);
}
private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
default:
return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED;
}
}
/**
* Parse markdown documentation.
*
* @return markdown documentation node
*/
private STNode parseMarkdownDocumentation() {
List<STNode> markdownDocLineList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {
STToken documentationString = consume();
STNode parsedDocLines = parseDocumentationString(documentationString);
appendParsedDocumentationLines(markdownDocLineList, parsedDocLines);
nextToken = peek();
}
STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);
return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);
}
/**
* Parse documentation string.
*
* @return markdown documentation line list node
*/
private STNode parseDocumentationString(STToken documentationStringToken) {
List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());
Collection<STNodeDiagnostic> diagnostics = new ArrayList<>((documentationStringToken.diagnostics()));
CharReader charReader = CharReader.from(documentationStringToken.text());
DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics);
AbstractTokenReader tokenReader = new TokenReader(documentationLexer);
DocumentationParser documentationParser = new DocumentationParser(tokenReader);
return documentationParser.parse();
}
private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) {
List<STNode> leadingTriviaList = new ArrayList<>();
int bucketCount = leadingMinutiaeNode.bucketCount();
for (int i = 0; i < bucketCount; i++) {
leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));
}
return leadingTriviaList;
}
private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) {
int bucketCount = parsedDocLines.bucketCount();
for (int i = 0; i < bucketCount; i++) {
STNode markdownDocLine = parsedDocLines.childInBucket(i);
markdownDocLineList.add(markdownDocLine);
}
}
/**
* Parse any statement that starts with a token that has ambiguity between being
* a type-desc or an expression.
*
* @param annots Annotations
* @param qualifiers
* @return Statement node
*/
private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true);
return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);
}
private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
List<STNode> varDeclQualifiers = new ArrayList<>();
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false);
}
STNode expr = getExpression(typedBindingPatternOrExpr);
expr = getExpression(parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true));
return parseStatementStartWithExprRhs(expr);
}
private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment);
}
private STNode parseTypedBindingPatternOrExpr(List<STNode> qualifiers, boolean allowAssignment) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
if (isPredeclaredIdentifier(nextToken.kind)) {
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseTypedBPOrExprStartsWithOpenParenthesis();
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT);
}
}
/**
* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr
* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or
* the expression-rhs.
*
* @param typeOrExpr Type desc or the expression
* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a
* valid lvalue expression
* @return Typed-binding-pattern node or an expression node
*/
private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipeOrAndToken = parseBinaryOperator();
STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);
if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
STNode newTypeDesc = mergeTypes(typeOrExpr, pipeOrAndToken, typedBP.typeDescriptor);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);
}
if (peek().kind == SyntaxKind.EQUAL_TOKEN) {
return createCaptureBPWithMissingVarName(typeOrExpr, pipeOrAndToken, rhsTypedBPOrExpr);
}
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,
pipeOrAndToken, rhsTypedBPOrExpr);
case SEMICOLON_TOKEN:
if (isExpression(typeOrExpr.kind)) {
return typeOrExpr;
}
if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case EQUAL_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,
ParserRuleContext.AMBIGUOUS_STMT);
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
default:
if (isCompoundAssignment(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return typeOrExpr;
}
STToken token = peek();
SyntaxKind typeOrExprKind = typeOrExpr.kind;
if (typeOrExprKind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
typeOrExprKind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
recover(token, ParserRuleContext.BINDING_PATTERN_OR_VAR_REF_RHS);
} else {
recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS);
}
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
}
private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) {
lhsType = getTypeDescFromExpr(lhsType);
rhsType = getTypeDescFromExpr(rhsType);
STNode newTypeDesc = mergeTypes(lhsType, separatorToken, rhsType);
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
ParserRuleContext.VARIABLE_NAME);
STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP);
}
private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {
typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);
}
private STNode parseTypedBPOrExprStartsWithOpenParenthesis() {
STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();
if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {
return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);
}
return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);
}
private boolean isDefiniteTypeDesc(SyntaxKind kind) {
return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.FUTURE_TYPE_DESC) <= 0;
}
private boolean isDefiniteExpr(SyntaxKind kind) {
if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return false;
}
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0;
}
private boolean isDefiniteAction(SyntaxKind kind) {
return kind.compareTo(SyntaxKind.REMOTE_METHOD_CALL_ACTION) >= 0 &&
kind.compareTo(SyntaxKind.CLIENT_RESOURCE_ACCESS_ACTION) <= 0;
}
/**
* Parse type or expression that starts with open parenthesis. Possible options are:
* 1) () - nil type-desc or nil-literal
* 2) (T) - Parenthesized type-desc
* 3) (expr) - Parenthesized expression
* 4) (param, param, ..) - Anon function params
*
* @return Type-desc or expression node
*/
private STNode parseTypedDescOrExprStartsWithOpenParenthesis() {
STNode openParen = parseOpenParenthesis();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
STNode closeParen = parseCloseParenthesis();
return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);
}
STNode typeOrExpr = parseTypeDescOrExpr();
if (isAction(typeOrExpr)) {
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,
closeParen);
}
if (isExpression(typeOrExpr.kind)) {
startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);
return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false);
}
STNode typeDescNode = getTypeDescFromExpr(typeOrExpr);
typeDescNode = parseComplexTypeDescriptor(typeDescNode, ParserRuleContext.TYPE_DESC_IN_PARENTHESIS, false);
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen);
}
/**
* Parse type-desc or expression. This method does not handle binding patterns.
*
* @return Type-desc node or expression node
*/
private STNode parseTypeDescOrExpr() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypeDescOrExpr(typeDescQualifiers);
}
private STNode parseTypeDescOrExpr(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();
break;
case FUNCTION_KEYWORD:
typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
break;
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypeDescOrExprRhs(typeOrExpr);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());
break;
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypeDescOrExprRhs(basicLiteral);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
}
if (isDefiniteTypeDesc(typeOrExpr.kind)) {
return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseTypeDescOrExprRhs(typeOrExpr);
}
private boolean isExpression(SyntaxKind kind) {
switch (kind) {
case NUMERIC_LITERAL:
case STRING_LITERAL_TOKEN:
case NIL_LITERAL:
case NULL_LITERAL:
case BOOLEAN_LITERAL:
return true;
default:
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&
kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0;
}
}
/**
* Parse statement that starts with an empty parenthesis. Empty parenthesis can be
* 1) Nil literal
* 2) Nil type-desc
* 3) Anon-function params
*
* @param openParen Open parenthesis
* @param closeParen Close parenthesis
* @return Parsed node
*/
private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {
STToken nextToken = peek();
switch (nextToken.kind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
STNode params = STNodeFactory.createEmptyNodeList();
STNode anonFuncParam =
STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(anonFuncParam, false);
default:
return STNodeFactory.createNilLiteralNode(openParen, closeParen);
}
}
private STNode parseAnonFuncExprOrTypedBPWithFuncType(List<STNode> qualifiers) {
STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {
return exprOrTypeDesc;
}
return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);
}
/**
* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.
*
* @param qualifiers Preceding qualifiers
* @return Anon-func-expr or function-type-desc
*/
private STNode parseAnonFuncExprOrFuncTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);
STNode qualifierList;
STNode functionKeyword = parseFunctionKeyword();
STNode funcSignature;
if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) {
funcSignature = parseFuncSignature(true);
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, true);
qualifierList = nodes[0];
functionKeyword = nodes[1];
endContext();
return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature);
}
funcSignature = STNodeFactory.createEmptyNode();
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, false);
qualifierList = nodes[0];
functionKeyword = nodes[1];
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) {
ParserRuleContext currentCtx = getCurrentContext();
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.EXPRESSION_STATEMENT);
}
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);
STNode funcBody = parseAnonFuncBody(false);
STNode annots = STNodeFactory.createEmptyNodeList();
STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList,
functionKeyword, funcSignature, funcBody);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);
case IDENTIFIER_TOKEN:
default:
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,
true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
}
private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {
STToken nextToken = peek();
STNode typeDesc;
switch (nextToken.kind) {
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipeOrAndToken = parseBinaryOperator();
STNode rhsTypeDescOrExpr = parseTypeDescOrExpr();
if (isExpression(rhsTypeDescOrExpr.kind)) {
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,
pipeOrAndToken, rhsTypeDescOrExpr);
}
typeDesc = getTypeDescFromExpr(typeOrExpr);
rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);
return mergeTypes(typeDesc, pipeOrAndToken, rhsTypeDescOrExpr);
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
typeDesc = parseComplexTypeDescriptor(getTypeDescFromExpr(typeOrExpr),
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);
return typeDesc;
case SEMICOLON_TOKEN:
return getTypeDescFromExpr(typeOrExpr);
case EQUAL_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
case COMMA_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,
ParserRuleContext.AMBIGUOUS_STMT);
case ELLIPSIS_TOKEN:
STNode ellipsis = parseEllipsis();
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);
default:
if (isCompoundAssignment(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false);
}
recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS);
return parseTypeDescOrExprRhs(typeOrExpr);
}
}
private boolean isAmbiguous(STNode node) {
switch (node.kind) {
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
case BRACKETED_LIST:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case INDEXED_EXPRESSION:
STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;
if (!isAmbiguous(indexExpr.containerExpression)) {
return false;
}
STNode keys = indexExpr.keyExpression;
for (int i = 0; i < keys.bucketCount(); i++) {
STNode item = keys.childInBucket(i);
if (item.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAmbiguous(item)) {
return false;
}
}
return true;
default:
return false;
}
}
private boolean isAllBasicLiterals(STNode node) {
switch (node.kind) {
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case BRACKETED_LIST:
STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;
for (STNode member : list.members) {
if (member.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAllBasicLiterals(member)) {
return false;
}
}
return true;
case UNARY_EXPRESSION:
STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;
if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&
unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {
return false;
}
return isNumericLiteral(unaryExpr.expression);
default:
return false;
}
}
private boolean isNumericLiteral(STNode node) {
switch (node.kind) {
case NUMERIC_LITERAL:
return true;
default:
return false;
}
}
/**
* Parse binding-patterns.
* <p>
* <code>
* binding-pattern := capture-binding-pattern
* | wildcard-binding-pattern
* | list-binding-pattern
* | mapping-binding-pattern
* | functional-binding-pattern
* <br/><br/>
* <p>
* capture-binding-pattern := variable-name
* variable-name := identifier
* <br/><br/>
* <p>
* wildcard-binding-pattern := _
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* <p>
* mapping-binding-pattern := { field-binding-patterns }
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* <br/>
* rest-binding-pattern := ... variable-name
* <p>
* <br/><br/>
* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )
* <br/>
* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]
* | other-arg-binding-patterns
* <br/>
* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*
* <br/>
* positional-arg-binding-pattern := binding-pattern
* <br/>
* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]
* | [rest-binding-pattern]
* <br/>
* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*
* <br/>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return binding-pattern node
*/
private STNode parseBindingPattern() {
switch (peek().kind) {
case OPEN_BRACKET_TOKEN:
return parseListBindingPattern();
case IDENTIFIER_TOKEN:
return parseBindingPatternStartsWithIdentifier();
case OPEN_BRACE_TOKEN:
return parseMappingBindingPattern();
case ERROR_KEYWORD:
return parseErrorBindingPattern();
default:
recover(peek(), ParserRuleContext.BINDING_PATTERN);
return parseBindingPattern();
}
}
private STNode parseBindingPatternStartsWithIdentifier() {
STNode argNameOrBindingPattern =
parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
STToken secondToken = peek();
if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern);
}
if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {
STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern,
DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP);
return STNodeFactory.createCaptureBindingPatternNode(identifier);
}
return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);
}
private STNode createCaptureOrWildcardBP(STNode varName) {
STNode bindingPattern;
if (isWildcardBP(varName)) {
bindingPattern = getWildcardBindingPattern(varName);
} else {
bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);
}
return bindingPattern;
}
/**
* Parse list-binding-patterns.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return list-binding-pattern node
*/
private STNode parseListBindingPattern() {
startContext(ParserRuleContext.LIST_BINDING_PATTERN);
STNode openBracket = parseOpenBracket();
List<STNode> bindingPatternsList = new ArrayList<>();
STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);
endContext();
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) {
if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) {
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
STNode listBindingPatternMember = parseListBindingPatternMember();
bindingPatternsList.add(listBindingPatternMember);
STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) {
STNode member = firstMember;
STToken token = peek();
STNode listBindingPatternRhs = null;
while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {
listBindingPatternRhs = parseListBindingPatternMemberRhs();
if (listBindingPatternRhs == null) {
break;
}
bindingPatterns.add(listBindingPatternRhs);
member = parseListBindingPatternMember();
bindingPatterns.add(member);
token = peek();
}
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
private STNode parseListBindingPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);
return parseListBindingPatternMemberRhs();
}
}
private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse list-binding-pattern member.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return List binding pattern member
*/
private STNode parseListBindingPatternMember() {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case OPEN_BRACKET_TOKEN:
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);
return parseListBindingPatternMember();
}
}
/**
* Parse rest binding pattern.
* <p>
* <code>
* rest-binding-pattern := ... variable-name
* </code>
*
* @return Rest binding pattern node
*/
private STNode parseRestBindingPattern() {
startContext(ParserRuleContext.REST_BINDING_PATTERN);
STNode ellipsis = parseEllipsis();
STNode varName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);
return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);
}
/**
* Parse Typed-binding-pattern.
* <p>
* <code>
* typed-binding-pattern := inferable-type-descriptor binding-pattern
* <br/><br/>
* inferable-type-descriptor := type-descriptor | var
* </code>
*
* @return Typed binding pattern node
*/
private STNode parseTypedBindingPattern(ParserRuleContext context) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPattern(typeDescQualifiers, context);
}
private STNode parseTypedBindingPattern(List<STNode> qualifiers, ParserRuleContext context) {
STNode typeDesc = parseTypeDescriptor(qualifiers,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false, TypePrecedence.DEFAULT);
STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);
return typeBindingPattern;
}
/**
* Parse mapping-binding-patterns.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPattern() {
startContext(ParserRuleContext.MAPPING_BINDING_PATTERN);
STNode openBrace = parseOpenBrace();
STToken token = peek();
if (isEndOfMappingBindingPattern(token.kind)) {
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
List<STNode> bindingPatterns = new ArrayList<>();
STNode prevMember = parseMappingBindingPatternMember();
if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);
}
private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) {
STToken token = peek();
STNode mappingBindingPatternRhs = null;
while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
mappingBindingPatternRhs = parseMappingBindingPatternEnd();
if (mappingBindingPatternRhs == null) {
break;
}
bindingPatterns.add(mappingBindingPatternRhs);
prevMember = parseMappingBindingPatternMember();
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
break;
}
bindingPatterns.add(prevMember);
token = peek();
}
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
/**
* Parse mapping-binding-pattern entry.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern
* | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPatternMember() {
STToken token = peek();
switch (token.kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
default:
return parseFieldBindingPattern();
}
}
private STNode parseMappingBindingPatternEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.MAPPING_BINDING_PATTERN_END);
return parseMappingBindingPatternEnd();
}
}
/**
* Parse field-binding-pattern.
* <code>field-binding-pattern := field-name : binding-pattern | varname</code>
*
* @return field-binding-pattern node
*/
private STNode parseFieldBindingPattern() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);
return parseFieldBindingPattern(simpleNameReference);
default:
recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
return parseFieldBindingPattern();
}
}
private STNode parseFieldBindingPattern(STNode simpleNameReference) {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);
case COLON_TOKEN:
STNode colon = parseColon();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);
default:
recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_END);
return parseFieldBindingPattern(simpleNameReference);
}
}
private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || isEndOfModuleLevelNode(1);
}
private STNode parseErrorTypeDescOrErrorBP(STNode annots) {
STToken nextNextToken = peek(2);
switch (nextNextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseAsErrorBindingPattern();
case LT_TOKEN:
return parseAsErrorTypeDesc(annots);
case IDENTIFIER_TOKEN:
SyntaxKind nextNextNextTokenKind = peek(3).kind;
if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN ||
nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {
return parseAsErrorBindingPattern();
}
default:
return parseAsErrorTypeDesc(annots);
}
}
private STNode parseAsErrorBindingPattern() {
startContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(parseErrorBindingPattern());
}
private STNode parseAsErrorTypeDesc(STNode annots) {
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(getAnnotations(annots), finalKeyword);
}
/**
* Parse error binding pattern node.
* <p>
* <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code>
* <br/><br/>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* named-arg-binding-pattern := arg-name = binding-pattern
*
* @return Error binding pattern node.
*/
private STNode parseErrorBindingPattern() {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = parseErrorKeyword();
return parseErrorBindingPattern(errorKeyword);
}
private STNode parseErrorBindingPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorBindingPattern(errorKeyword);
}
return parseErrorBindingPattern(errorKeyword, typeRef);
}
private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesis = parseOpenParenthesis();
STNode argListBindingPatterns = parseErrorArgListBindingPatterns();
STNode closeParenthesis = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis,
argListBindingPatterns, closeParenthesis);
}
/**
* Parse error arg list binding pattern.
* <p>
* <code>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* <p>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* <p>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* <p>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* <p>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* <p>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return Error arg list binding patterns.
*/
private STNode parseErrorArgListBindingPatterns() {
List<STNode> argListBindingPatterns = new ArrayList<>();
if (isEndOfErrorFieldBindingPatterns()) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
private STNode parseErrorArgListBindingPatterns(List<STNode> argListBindingPatterns) {
STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true);
if (firstArg == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
switch (firstArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
case ERROR_BINDING_PATTERN:
STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier);
missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP,
DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN);
STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN);
argListBindingPatterns.add(missingErrorMsgBP);
argListBindingPatterns.add(missingComma);
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
default:
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsg(List<STNode> argListBindingPatterns) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END);
if (argEnd == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false);
assert secondArg != null;
switch (secondArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(secondArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind);
default:
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg,
DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List<STNode> argListBindingPatterns,
SyntaxKind lastValidArgKind) {
while (!isEndOfErrorFieldBindingPatterns()) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false);
assert currentArg != null;
DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListBindingPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);
}
}
return STNodeFactory.createNodeList(argListBindingPatterns);
}
private boolean isEndOfErrorFieldBindingPatterns() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgsBindingPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case IDENTIFIER_TOKEN:
STNode argNameOrSimpleBindingPattern = consume();
return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern);
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
case CLOSE_PAREN_TOKEN:
if (isFirstArg) {
return null;
}
default:
recover(peek(), context);
return parseErrorArgListBindingPattern(context, isFirstArg);
}
}
private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) {
STToken secondToken = peek();
switch (secondToken.kind) {
case EQUAL_TOKEN:
STNode equal = consume();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern,
equal, bindingPattern);
case COMMA_TOKEN:
case CLOSE_PAREN_TOKEN:
default:
return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern);
}
}
private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind,
SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_BINDING_PATTERN:
case REST_BINDING_PATTERN:
if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
default:
return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED;
}
}
/*
* This parses Typed binding patterns and deals with ambiguity between types,
* and binding patterns. An example is 'T[a]'.
* The ambiguity lies in between:
* 1) Array Type
* 2) List binding pattern
* 3) Member access expression.
*/
/**
* Parse the component after the type-desc, of a typed-binding-pattern.
*
* @param typeDesc Starting type-desc of the typed-binding-pattern
* @return Typed-binding pattern
*/
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {
return parseTypedBindingPatternTypeRhs(typeDesc, context, true);
}
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case OPEN_BRACKET_TOKEN:
STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);
assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;
return typedBindingPattern;
case CLOSE_PAREN_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
if (!isRoot) {
return typeDesc;
}
default:
recover(nextToken, ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS);
return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot);
}
}
/**
* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.
*
* @param typeDescOrExpr Type desc or the expression at the start
* @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous
* @return Parsed node
*/
private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
if (isBracketedListEnd(peek().kind)) {
return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);
}
STNode member = parseBracketedListMember(isTypedBindingPattern);
SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern);
switch (currentNodeType) {
case ARRAY_TYPE_DESC:
STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);
return typedBindingPattern;
case LIST_BINDING_PATTERN:
STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case INDEXED_EXPRESSION:
return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);
case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS:
break;
case NONE:
default:
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd != null) {
List<STNode> memberList = new ArrayList<>();
memberList.add(getBindingPattern(member, true));
memberList.add(memberEnd);
bindingPattern = parseAsListBindingPattern(openBracket, memberList);
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
}
STNode closeBracket = parseCloseBracket();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {
member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);
STNode closeBracket = parseCloseBracket();
endContext();
STNode keyExpr = STNodeFactory.createNodeList(member);
STNode memberAccessExpr =
STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);
}
private boolean isBracketedListEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse a member of an ambiguous bracketed list. This member could be:
* 1) Array length
* 2) Key expression of a member-access-expr
* 3) A member-binding pattern of a list-binding-pattern.
*
* @param isTypedBindingPattern Is this in a definite typed-binding pattern
* @return Parsed member node
*/
private STNode parseBracketedListMember(boolean isTypedBindingPattern) {
STToken nextToken = peek();
switch (nextToken.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
case STRING_LITERAL_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
case ELLIPSIS_TOKEN:
case OPEN_BRACKET_TOKEN:
return parseStatementStartBracketedListMember();
case IDENTIFIER_TOKEN:
if (isTypedBindingPattern) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
break;
default:
if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) ||
isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) {
break;
}
ParserRuleContext recoverContext =
isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH
: ParserRuleContext.BRACKETED_LIST_MEMBER;
recover(peek(), recoverContext);
return parseBracketedListMember(isTypedBindingPattern);
}
STNode expr = parseExpression();
if (isWildcardBP(expr)) {
return getWildcardBindingPattern(expr);
}
return expr;
}
/**
* Treat the current node as an array, and parse the remainder of the binding pattern.
*
* @param typeDesc Type-desc
* @param openBracket Open bracket
* @param member Member
* @return Parsed node
*/
private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {
typeDesc = getTypeDescFromExpr(typeDesc);
switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode closeBracket = parseCloseBracket();
endContext();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,
context);
}
private STNode parseBracketedListMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);
return parseBracketedListMemberEnd();
}
}
/**
* We reach here to break ambiguity of T[a]. This could be:
* 1) Array Type Desc
* 2) Member access on LHS
* 3) Typed-binding-pattern
*
* @param typeDescOrExpr Type name or the expr that precede the open-bracket.
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Open bracket
* @param isTypedBindingPattern Is this is a typed-binding-pattern.
* @return Specific node that matches to T[a], after solving ambiguity.
*/
private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
case OPEN_BRACKET_TOKEN:
if (isTypedBindingPattern) {
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
}
STNode keyExpr = getKeyExpr(member);
STNode expr =
STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);
case QUESTION_MARK_TOKEN:
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
typeDesc = parseComplexTypeDescriptor(arrayTypeDesc,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
return parseTypedBindingPatternTypeRhs(typeDesc, context);
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern);
case IN_KEYWORD:
if (context != ParserRuleContext.FOREACH_STMT &&
context != ParserRuleContext.FROM_CLAUSE &&
context != ParserRuleContext.JOIN_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case EQUAL_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
}
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
case SEMICOLON_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case CLOSE_BRACE_TOKEN:
case COMMA_TOKEN:
if (context == ParserRuleContext.AMBIGUOUS_STMT) {
keyExpr = getKeyExpr(member);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
default:
if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) {
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
break;
}
ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS;
if (isTypedBindingPattern) {
recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS;
}
recover(peek(), recoveryCtx);
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode getKeyExpr(STNode member) {
if (member == null) {
STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);
STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier);
return STNodeFactory.createNodeList(missingVarRef);
}
return STNodeFactory.createNodeList(member);
}
private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket) {
STNode bindingPatterns = STNodeFactory.createEmptyNodeList();
if (!isEmpty(member)) {
SyntaxKind memberKind = member.kind;
if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) {
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME);
STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken);
return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName);
}
STNode bindingPattern = getBindingPattern(member, true);
bindingPatterns = STNodeFactory.createNodeList(bindingPattern);
}
STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
/**
* Parse a union or intersection type-desc/binary-expression that involves ambiguous
* bracketed list in lhs.
* <p>
* e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code>
* <p>
* Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this
* is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However,
* if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes
* the type-desc, and <code>[b]</code> becomes the binding pattern.
*
* @param typeDescOrExpr Type desc or the expression
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Close bracket
* @return Parsed node
*/
private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern) {
STNode pipeOrAndToken = parseUnionOrIntersectionToken();
STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;
STNode rhsTypeDesc = rhsTypedBindingPattern.typeDescriptor;
STNode newTypeDesc = mergeTypes(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);
}
if (isTypedBindingPattern) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr);
}
STNode keyExpr = getExpression(member);
STNode containerExpr = getExpression(typeDescOrExpr);
STNode lhsExpr =
STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,
typedBindingPatternOrExpr);
}
/**
* Merges two types separated by <code>|</code> or <code>&</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param pipeOrAndToken pipe or bitwise-and token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypes(STNode lhsTypeDesc, STNode pipeOrAndToken, STNode rhsTypeDesc) {
if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {
return mergeTypesWithUnion(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
} else {
return mergeTypesWithIntersection(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
}
}
/**
* Merges two types separated by <code>|</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param pipeToken pipe token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypesWithUnion(STNode lhsTypeDesc, STNode pipeToken, STNode rhsTypeDesc) {
if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostUnionWithAUnion(lhsTypeDesc, pipeToken, rhsUnionTypeDesc);
} else {
return createUnionTypeDesc(lhsTypeDesc, pipeToken, rhsTypeDesc);
}
}
/**
* Merges two types separated by <code>&</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param bitwiseAndToken bitwise-and token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypesWithIntersection(STNode lhsTypeDesc, STNode bitwiseAndToken, STNode rhsTypeDesc) {
if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode lhsUnionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;
if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
rhsTypeDesc = replaceLeftMostIntersectionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,
bitwiseAndToken, (STIntersectionTypeDescriptorNode) rhsTypeDesc);
return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);
} else if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
rhsTypeDesc = replaceLeftMostUnionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,
bitwiseAndToken, (STUnionTypeDescriptorNode) rhsTypeDesc);
return replaceLeftMostUnionWithAUnion(lhsUnionTypeDesc.leftTypeDesc,
lhsUnionTypeDesc.pipeToken, (STUnionTypeDescriptorNode) rhsTypeDesc);
} else {
rhsTypeDesc = createIntersectionTypeDesc(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, rhsTypeDesc);
return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);
}
}
if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostUnionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsUnionTypeDesc);
} else if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode rhsIntSecTypeDesc = (STIntersectionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostIntersectionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsIntSecTypeDesc);
} else {
return createIntersectionTypeDesc(lhsTypeDesc, bitwiseAndToken, rhsTypeDesc);
}
}
private STNode replaceLeftMostUnionWithAUnion(STNode typeDesc, STNode pipeToken,
STUnionTypeDescriptorNode unionTypeDesc) {
STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostUnionWithAUnion(typeDesc, pipeToken, (STUnionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createUnionTypeDesc(typeDesc, pipeToken, leftTypeDesc);
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode replaceLeftMostUnionWithAIntersection(STNode typeDesc, STNode bitwiseAndToken,
STUnionTypeDescriptorNode unionTypeDesc) {
STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostUnionWithAIntersection(typeDesc, bitwiseAndToken,
(STUnionTypeDescriptorNode) leftTypeDesc));
}
if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,
(STIntersectionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode replaceLeftMostIntersectionWithAIntersection(STNode typeDesc,
STNode bitwiseAndToken,
STIntersectionTypeDescriptorNode intersectionTypeDesc) {
STNode leftTypeDesc = intersectionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc,
replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,
(STIntersectionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);
return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {
if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);
lhsTypeDesc = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);
} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc =
getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);
lhsTypeDesc = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,
intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);
} else {
lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket);
}
return lhsTypeDesc;
}
/**
* Parse union (|) or intersection (&) type operator.
*
* @return pipe or bitwise and token
*/
private STNode parseUnionOrIntersectionToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);
return parseUnionOrIntersectionToken();
}
}
/**
* Infer the type of the ambiguous bracketed list, based on the type of the member.
*
* @param memberNode Member node
* @return Inferred type of the bracketed list
*/
private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) {
if (isEmpty(memberNode)) {
return SyntaxKind.NONE;
}
if (isDefiniteTypeDesc(memberNode.kind)) {
return SyntaxKind.TUPLE_TYPE_DESC;
}
switch (memberNode.kind) {
case ASTERISK_LITERAL:
return SyntaxKind.ARRAY_TYPE_DESC;
case CAPTURE_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
return SyntaxKind.LIST_BINDING_PATTERN;
case QUALIFIED_NAME_REFERENCE:
case REST_TYPE:
return SyntaxKind.TUPLE_TYPE_DESC;
case NUMERIC_LITERAL:
if (isTypedBindingPattern) {
return SyntaxKind.ARRAY_TYPE_DESC;
}
return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS;
case SIMPLE_NAME_REFERENCE:
case BRACKETED_LIST:
case MAPPING_BP_OR_MAPPING_CONSTRUCTOR:
return SyntaxKind.NONE;
case ERROR_CONSTRUCTOR:
if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
default:
if (isTypedBindingPattern) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
}
}
/*
* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.
* The ambiguity lies in between:
* 1) Assignment that starts with list binding pattern
* 2) Var-decl statement that starts with tuple type
* 3) Statement that starts with list constructor, such as sync-send, etc.
*/
/**
* Parse any statement that starts with an open-bracket.
*
* @param annots Annotations attached to the statement.
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {
startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);
return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);
}
private STNode parseMemberBracketedList() {
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStatementStartsWithOpenBracket(annots, false, false);
}
/**
* The bracketed list at the start of a statement can be one of the following.
* 1) List binding pattern
* 2) Tuple type
* 3) List constructor
*
* @param isRoot Is this the root of the list
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {
startContext(ParserRuleContext.STMT_START_BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
while (!isBracketedListEnd(peek().kind)) {
STNode member = parseStatementStartBracketedListMember();
SyntaxKind currentNodeType = getStmtStartBracketedListType(member);
switch (currentNodeType) {
case TUPLE_TYPE_DESC:
member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case MEMBER_TYPE_DESC:
case REST_TYPE:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case LIST_BINDING_PATTERN:
return parseAsListBindingPattern(openBracket, memberList, member, isRoot);
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case LIST_BP_OR_LIST_CONSTRUCTOR:
return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);
case NONE:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
}
STNode closeBracket = parseCloseBracket();
STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket,
isRoot, possibleMappingField);
return bracketedList;
}
/**
* Parse a member of a list-binding-pattern, tuple-type-desc, or
* list-constructor-expr, when the parent is ambiguous.
*
* @return Parsed node
*/
private STNode parseStatementStartBracketedListMember() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseStatementStartBracketedListMember(typeDescQualifiers);
}
private STNode parseStatementStartBracketedListMember(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMemberBracketedList();
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (isWildcardBP(identifier)) {
STNode varName = ((STSimpleNameReferenceNode) identifier).name;
return getWildcardBindingPattern(varName);
}
nextToken = peek();
if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) {
return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);
case OPEN_BRACE_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMappingBindingPatterOrMappingConstructor();
case ERROR_KEYWORD:
reportInvalidQualifierList(qualifiers);
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorBindingPatternOrErrorConstructor();
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case ELLIPSIS_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseRestBindingOrSpreadMember();
case XML_KEYWORD:
case STRING_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr(qualifiers);
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrFuncTypeDesc(qualifiers);
case AT_TOKEN:
return parseTupleMember();
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER);
return parseStatementStartBracketedListMember(qualifiers);
}
}
private STNode parseRestBindingOrSpreadMember() {
STNode ellipsis = parseEllipsis();
STNode expr = parseExpression();
if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return STNodeFactory.createRestBindingPatternNode(ellipsis, expr);
} else {
return STNodeFactory.createSpreadMemberNode(ellipsis, expr);
}
}
private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
STNode member, boolean isRoot) {
memberList.add(member);
STNode memberEnd = parseBracketedListMemberEnd();
STNode tupleTypeDescOrListCons;
if (memberEnd == null) {
STNode closeBracket = parseCloseBracket();
tupleTypeDescOrListCons =
parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
} else {
memberList.add(memberEnd);
tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);
}
return tupleTypeDescOrListCons;
}
/**
* Parse tuple type desc or list constructor.
*
* @return Parsed node
*/
private STNode parseTupleTypeDescOrListConstructor(STNode annots) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);
}
private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
boolean isRoot) {
STToken nextToken = peek();
while (!isBracketedListEnd(nextToken.kind)) {
STNode member = parseTupleTypeDescOrListConstructorMember(annots);
SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);
switch (currentNodeType) {
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case REST_TYPE:
case MEMBER_TYPE_DESC:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC:
member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
nextToken = peek();
}
STNode closeBracket = parseCloseBracket();
return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
}
private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
return parseTupleTypeDescOrListConstructor(annots);
case IDENTIFIER_TOKEN:
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);
case OPEN_BRACE_TOKEN:
return parseMappingConstructorExpr();
case ERROR_KEYWORD:
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorConstructorExpr(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case XML_KEYWORD:
case STRING_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr();
case AT_TOKEN:
return parseTupleMember();
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER);
return parseTupleTypeDescOrListConstructorMember(annots);
}
}
private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {
return getStmtStartBracketedListType(memberNode);
}
private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket,
boolean isRoot) {
STNode tupleTypeOrListConst;
switch (peek().kind) {
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
if (!isRoot) {
endContext();
return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,
closeBracket);
}
default:
if (isValidExprRhsStart(peek().kind, closeBracket.kind) ||
(isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {
members = getExpressionList(members, false);
STNode memberExpressions = STNodeFactory.createNodeList(members);
tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,
memberExpressions, closeBracket);
break;
}
STNode memberTypeDescs = STNodeFactory.createNodeList(getTupleMemberList(members));
STNode tupleTypeDesc =
STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);
tupleTypeOrListConst =
parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
endContext();
if (!isRoot) {
return tupleTypeOrListConst;
}
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot);
} | class member, object member or object member descriptor.
* </p>
* <code>
* class-member := object-field | method-defn | object-type-inclusion
* <br/>
* object-member := object-field | method-defn
* <br/>
* object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion
* </code>
*
* @param context Parsing context of the object member
* @return Parsed node
*/
private STNode parseObjectMember(ParserRuleContext context) {
STNode metadata;
STToken nextToken = peek();
switch (nextToken.kind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return null;
case ASTERISK_TOKEN:
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
case FINAL_KEYWORD:
case REMOTE_KEYWORD:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
case RESOURCE_KEYWORD:
metadata = STNodeFactory.createEmptyNode();
break;
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
if (isTypeStartingToken(nextToken.kind)) {
metadata = STNodeFactory.createEmptyNode();
break;
}
ParserRuleContext recoveryCtx;
if (context == ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER) {
recoveryCtx = ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER_START;
} else {
recoveryCtx = ParserRuleContext.CLASS_MEMBER_OR_OBJECT_MEMBER_START;
}
Solution solution = recover(peek(), recoveryCtx);
if (solution.action == Action.KEEP) {
metadata = STNodeFactory.createEmptyNode();
break;
}
return parseObjectMember(context);
}
return parseObjectMemberWithoutMeta(metadata, context);
} | class member, object member or object member descriptor.
* </p>
* <code>
* class-member := object-field | method-defn | object-type-inclusion
* <br/>
* object-member := object-field | method-defn
* <br/>
* object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion
* </code>
*
* @param context Parsing context of the object member
* @return Parsed node
*/
private STNode parseObjectMember(ParserRuleContext context) {
STNode metadata;
STToken nextToken = peek();
switch (nextToken.kind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return null;
case ASTERISK_TOKEN:
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
case FINAL_KEYWORD:
case REMOTE_KEYWORD:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
case RESOURCE_KEYWORD:
metadata = STNodeFactory.createEmptyNode();
break;
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
if (isTypeStartingToken(nextToken.kind)) {
metadata = STNodeFactory.createEmptyNode();
break;
}
ParserRuleContext recoveryCtx;
if (context == ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER) {
recoveryCtx = ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER_START;
} else {
recoveryCtx = ParserRuleContext.CLASS_MEMBER_OR_OBJECT_MEMBER_START;
}
Solution solution = recover(peek(), recoveryCtx);
if (solution.action == Action.KEEP) {
metadata = STNodeFactory.createEmptyNode();
break;
}
return parseObjectMember(context);
}
return parseObjectMemberWithoutMeta(metadata, context);
} |
Actually, we require Jandex 2.1+ and re-index dependencies if necessary (https://github.com/quarkusio/quarkus/blob/master/core/deployment/src/main/java/io/quarkus/deployment/index/ApplicationArchiveBuildStep.java#L200-L202). So `hasNoArgsConstructor()` should be enough. I'll update the PR. | public void addMissingConstructors() throws Exception {
Set<ClassInfo> targetClasses = new HashSet<>();
Set<DotName> normalScopes = initNormalScopes();
for (DotName normalScope : normalScopes) {
collectTargetClasses(targetClasses, normalScope);
}
for (Iterator<ClassInfo> iterator = targetClasses.iterator(); iterator.hasNext();) {
ClassInfo targetClass = iterator.next();
if (targetClass.hasNoArgsConstructor()
|| targetClass.methods().stream().anyMatch(m -> m.name().equals("<init>") && m.parameters().isEmpty())) {
iterator.remove();
}
}
if (targetClasses.isEmpty()) {
return;
}
Set<DotName> transformedClasses = new HashSet<>();
for (ClassInfo targetClass : targetClasses) {
String superClassName;
if (targetClass.superName() == null) {
superClassName = "java/lang/Object";
} else {
ClassInfo superClass = combinedIndex.getIndex().getClassByName(targetClass.superName());
if (superClass != null) {
if (superClass.hasNoArgsConstructor() || targetClass.methods().stream()
.anyMatch(m -> m.name().equals("<init>") && m.parameters().isEmpty())) {
superClassName = superClass.name().toString().replace('.', '/');
} else {
superClassName = null;
}
} else {
superClassName = null;
}
}
if (superClassName != null) {
transformedClasses.add(targetClass.name());
LOGGER.debugf("Adding no-args constructor to %s", targetClass);
transformers.produce(new BytecodeTransformerBuildItem(targetClass.name().toString(),
new BiFunction<String, ClassVisitor, ClassVisitor>() {
@Override
public ClassVisitor apply(String className, ClassVisitor classVisitor) {
ClassVisitor cv = new ClassVisitor(Opcodes.ASM6, classVisitor) {
@Override
public void visit(int version, int access, String name, String signature, String superName,
String[] interfaces) {
super.visit(version, access, name, signature, superName, interfaces);
MethodVisitor mv = visitMethod(Modifier.PUBLIC, "<init>", "()V", null,
null);
mv.visitCode();
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, superClassName, "<init>", "()V", false);
mv.visitInsn(Opcodes.RETURN);
mv.visitMaxs(1, 1);
mv.visitEnd();
}
};
return cv;
}
}));
}
}
if (!transformedClasses.isEmpty()) {
validators.produce(new BeanDeploymentValidatorBuildItem(new BeanDeploymentValidator() {
@Override
public boolean skipValidation(InjectionTargetInfo target, ValidationRule rule) {
return ValidationRule.NO_ARGS_CONSTRUCTOR.equals(rule) && target.kind() == TargetKind.BEAN
&& transformedClasses.contains(target.asBean().getBeanClass());
}
}));
}
} | || targetClass.methods().stream().anyMatch(m -> m.name().equals("<init>") && m.parameters().isEmpty())) { | public void addMissingConstructors() throws Exception {
Set<ClassInfo> targetClasses = new HashSet<>();
Set<DotName> normalScopes = initNormalScopes();
for (DotName normalScope : normalScopes) {
collectTargetClasses(targetClasses, normalScope);
}
for (Iterator<ClassInfo> iterator = targetClasses.iterator(); iterator.hasNext();) {
ClassInfo targetClass = iterator.next();
if (targetClass.hasNoArgsConstructor()) {
iterator.remove();
}
}
if (targetClasses.isEmpty()) {
return;
}
Set<DotName> transformedClasses = new HashSet<>();
for (ClassInfo targetClass : targetClasses) {
String superClassName;
if (targetClass.superName() == null) {
superClassName = "java/lang/Object";
} else {
ClassInfo superClass = combinedIndex.getIndex().getClassByName(targetClass.superName());
if (superClass != null && superClass.hasNoArgsConstructor()) {
superClassName = superClass.name().toString().replace('.', '/');
} else {
superClassName = null;
}
}
if (superClassName != null) {
transformedClasses.add(targetClass.name());
LOGGER.debugf("Adding no-args constructor to %s", targetClass);
transformers.produce(new BytecodeTransformerBuildItem(targetClass.name().toString(),
new BiFunction<String, ClassVisitor, ClassVisitor>() {
@Override
public ClassVisitor apply(String className, ClassVisitor classVisitor) {
ClassVisitor cv = new ClassVisitor(Opcodes.ASM6, classVisitor) {
@Override
public void visit(int version, int access, String name, String signature, String superName,
String[] interfaces) {
super.visit(version, access, name, signature, superName, interfaces);
MethodVisitor mv = visitMethod(Modifier.PUBLIC, "<init>", "()V", null,
null);
mv.visitCode();
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, superClassName, "<init>", "()V", false);
mv.visitInsn(Opcodes.RETURN);
mv.visitMaxs(1, 1);
mv.visitEnd();
}
};
return cv;
}
}));
}
}
if (!transformedClasses.isEmpty()) {
validators.produce(new BeanDeploymentValidatorBuildItem(new BeanDeploymentValidator() {
@Override
public boolean skipValidation(InjectionTargetInfo target, ValidationRule rule) {
return ValidationRule.NO_ARGS_CONSTRUCTOR.equals(rule) && target.kind() == TargetKind.BEAN
&& transformedClasses.contains(target.asBean().getBeanClass());
}
}));
}
} | class NoArgsConstructorProcessor {
private static final Logger LOGGER = Logger.getLogger(NoArgsConstructorProcessor.class);
private static final int ANNOTATION = 0x00002000;
@Inject
BeanArchiveIndexBuildItem beanArchiveIndex;
@Inject
CombinedIndexBuildItem combinedIndex;
@Inject
BuildProducer<BytecodeTransformerBuildItem> transformers;
@Inject
BuildProducer<BeanDeploymentValidatorBuildItem> validators;
@BuildStep
private Set<DotName> initNormalScopes() {
Set<DotName> normalScopes = new HashSet<>();
normalScopes.add(BuiltinScope.APPLICATION.getName());
normalScopes.add(BuiltinScope.REQUEST.getName());
combinedIndex.getIndex().getAnnotations(DotName.createSimple(NormalScope.class.getName())).stream()
.filter(a -> a.target().kind() == Kind.CLASS && isAnnotation(a.target().asClass())).map(a -> a.name())
.forEach(normalScopes::add);
return normalScopes;
}
private void collectTargetClasses(Set<ClassInfo> targetClasses, DotName normalScope) {
for (AnnotationInstance annotationInstance : beanArchiveIndex.getIndex()
.getAnnotations(normalScope)) {
if (annotationInstance.target().kind() == Kind.CLASS) {
targetClasses.add(annotationInstance.target().asClass());
}
}
}
private boolean isAnnotation(ClassInfo classInfo) {
return (classInfo.flags() & ANNOTATION) != 0;
}
} | class NoArgsConstructorProcessor {
private static final Logger LOGGER = Logger.getLogger(NoArgsConstructorProcessor.class);
private static final int ANNOTATION = 0x00002000;
@Inject
BeanArchiveIndexBuildItem beanArchiveIndex;
@Inject
CombinedIndexBuildItem combinedIndex;
@Inject
BuildProducer<BytecodeTransformerBuildItem> transformers;
@Inject
BuildProducer<BeanDeploymentValidatorBuildItem> validators;
@BuildStep
private Set<DotName> initNormalScopes() {
Set<DotName> normalScopes = new HashSet<>();
normalScopes.add(BuiltinScope.APPLICATION.getName());
normalScopes.add(BuiltinScope.REQUEST.getName());
combinedIndex.getIndex().getAnnotations(DotName.createSimple(NormalScope.class.getName())).stream()
.filter(NoArgsConstructorProcessor::isTargetAnnotation)
.map(AnnotationInstance::name)
.forEach(normalScopes::add);
return normalScopes;
}
private void collectTargetClasses(Set<ClassInfo> targetClasses, DotName normalScope) {
for (AnnotationInstance annotationInstance : beanArchiveIndex.getIndex()
.getAnnotations(normalScope)) {
if (annotationInstance.target().kind() == Kind.CLASS) {
targetClasses.add(annotationInstance.target().asClass());
}
}
}
private static boolean isTargetAnnotation(AnnotationInstance annotationInstance) {
return annotationInstance.target().kind() == Kind.CLASS
&& ((annotationInstance.target().asClass().flags() & ANNOTATION) != 0);
}
} |
possible NPE if the volume of the svId not exist? | public String getStorageVolumeName(String svId) {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
return getStorageVolume(svId).getName();
}
} | return getStorageVolume(svId).getName(); | public String getStorageVolumeName(String svId) {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
StorageVolume sv = getStorageVolume(svId);
if (sv == null) {
return "";
}
return getStorageVolume(svId).getName();
}
} | class StorageVolumeMgr implements GsonPostProcessable {
private static final String ENABLED = "enabled";
public static final String DEFAULT = "default";
public static final String LOCAL = "local";
public static final String BUILTIN_STORAGE_VOLUME = "builtin_storage_volume";
@SerializedName("defaultSVId")
protected String defaultStorageVolumeId = "";
protected final ReadWriteLock rwLock = new ReentrantReadWriteLock();
@SerializedName("svToDbs")
protected Map<String, Set<Long>> storageVolumeToDbs = new HashMap<>();
@SerializedName("svToTables")
protected Map<String, Set<Long>> storageVolumeToTables = new HashMap<>();
protected Map<Long, String> dbToStorageVolume = new HashMap<>();
protected Map<Long, String> tableToStorageVolume = new HashMap<>();
protected static final Set<String> PARAM_NAMES = new HashSet<>();
static {
Field[] fields = CloudConfigurationConstants.class.getFields();
for (int i = 0; i < fields.length; ++i) {
try {
Object obj = CloudConfigurationConstants.class.newInstance();
Object value = fields[i].get(obj);
PARAM_NAMES.add((String) value);
} catch (InstantiationException | IllegalAccessException e) {
}
}
}
public String createStorageVolume(CreateStorageVolumeStmt stmt)
throws AlreadyExistsException, DdlException {
Map<String, String> params = new HashMap<>();
Optional<Boolean> enabled = parseProperties(stmt.getProperties(), params);
return createStorageVolume(stmt.getName(), stmt.getStorageVolumeType(), stmt.getStorageLocations(), params,
enabled, stmt.getComment());
}
public String createStorageVolume(String name, String svType, List<String> locations, Map<String, String> params,
Optional<Boolean> enabled, String comment)
throws DdlException, AlreadyExistsException {
try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {
validateParams(params);
if (exists(name)) {
throw new AlreadyExistsException(String.format("Storage volume '%s' already exists", name));
}
return createInternalNoLock(name, svType, locations, params, enabled, comment);
}
}
public void removeStorageVolume(DropStorageVolumeStmt stmt) throws DdlException, AnalysisException {
removeStorageVolume(stmt.getName());
}
public void removeStorageVolume(String name) throws DdlException {
try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {
StorageVolume sv = getStorageVolumeByName(name);
Preconditions.checkState(sv != null,
"Storage volume '%s' does not exist", name);
Preconditions.checkState(!defaultStorageVolumeId.equals(sv.getId()),
"default storage volume can not be removed");
Set<Long> dbs = storageVolumeToDbs.get(sv.getId());
Set<Long> tables = storageVolumeToTables.get(sv.getId());
Preconditions.checkState(dbs == null && tables == null,
"Storage volume '%s' is referenced by dbs or tables, dbs: %s, tables: %s",
name, dbs != null ? dbs.toString() : "[]", tables != null ? tables.toString() : "[]");
removeInternalNoLock(sv);
}
}
public void updateStorageVolume(AlterStorageVolumeStmt stmt) throws DdlException {
Map<String, String> params = new HashMap<>();
Optional<Boolean> enabled = parseProperties(stmt.getProperties(), params);
updateStorageVolume(stmt.getName(), params, enabled, stmt.getComment());
}
public void updateStorageVolume(String name, Map<String, String> params, Optional<Boolean> enabled, String comment)
throws DdlException {
try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {
validateParams(params);
StorageVolume sv = getStorageVolumeByName(name);
Preconditions.checkState(sv != null, "Storage volume '%s' does not exist", name);
StorageVolume copied = new StorageVolume(sv);
if (enabled.isPresent()) {
boolean enabledValue = enabled.get();
if (!enabledValue) {
Preconditions.checkState(!copied.getId().equals(defaultStorageVolumeId),
"Default volume can not be disabled");
}
copied.setEnabled(enabledValue);
}
if (!comment.isEmpty()) {
copied.setComment(comment);
}
if (!params.isEmpty()) {
copied.setCloudConfiguration(params);
}
updateInternalNoLock(copied);
}
}
public void setDefaultStorageVolume(SetDefaultStorageVolumeStmt stmt) throws AnalysisException {
setDefaultStorageVolume(stmt.getName());
}
public void setDefaultStorageVolume(String svKey) {
try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {
StorageVolume sv = getStorageVolumeByName(svKey);
Preconditions.checkState(sv != null, "Storage volume '%s' does not exist", svKey);
Preconditions.checkState(sv.getEnabled(), "Storage volume '%s' is disabled", svKey);
SetDefaultStorageVolumeLog log = new SetDefaultStorageVolumeLog(sv.getId());
GlobalStateMgr.getCurrentState().getEditLog().logSetDefaultStorageVolume(log);
this.defaultStorageVolumeId = sv.getId();
}
}
public String getDefaultStorageVolumeId() {
return defaultStorageVolumeId;
}
public boolean exists(String svKey) throws DdlException {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
StorageVolume sv = getStorageVolumeByName(svKey);
return sv != null;
}
}
private Optional<Boolean> parseProperties(Map<String, String> properties, Map<String, String> params) {
params.putAll(properties);
Optional<Boolean> enabled = Optional.empty();
if (params.containsKey(ENABLED)) {
enabled = Optional.of(Boolean.parseBoolean(params.get(ENABLED)));
params.remove(ENABLED);
}
return enabled;
}
public String getStorageVolumeIdOfTable(long tableId) {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
return tableToStorageVolume.get(tableId);
}
}
public String getStorageVolumeIdOfDb(long dbId) {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
return dbToStorageVolume.get(dbId);
}
}
public StorageVolume getDefaultStorageVolume() {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
return getStorageVolume(getDefaultStorageVolumeId());
}
}
public void replaySetDefaultStorageVolume(SetDefaultStorageVolumeLog log) {
try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {
defaultStorageVolumeId = log.getId();
}
}
public void replayCreateStorageVolume(StorageVolume sv) {
}
public void replayUpdateStorageVolume(StorageVolume sv) {
}
public void replayDropStorageVolume(DropStorageVolumeLog log) {
}
protected void validateParams(Map<String, String> params) throws DdlException {
for (String key : params.keySet()) {
if (!PARAM_NAMES.contains(key)) {
throw new DdlException("Invalid properties " + key);
}
}
}
public void save(DataOutputStream dos) throws IOException, SRMetaBlockException {
SRMetaBlockWriter writer = new SRMetaBlockWriter(dos, SRMetaBlockID.STORAGE_VOLUME_MGR, 1);
writer.writeJson(this);
writer.close();
}
public void load(SRMetaBlockReader reader)
throws SRMetaBlockEOFException, IOException, SRMetaBlockException {
StorageVolumeMgr data = reader.readJson(StorageVolumeMgr.class);
this.storageVolumeToDbs = data.storageVolumeToDbs;
this.storageVolumeToTables = data.storageVolumeToTables;
this.defaultStorageVolumeId = data.defaultStorageVolumeId;
}
@Override
public void gsonPostProcess() throws IOException {
for (Map.Entry<String, Set<Long>> entry : storageVolumeToDbs.entrySet()) {
for (Long dbId : entry.getValue()) {
dbToStorageVolume.put(dbId, entry.getKey());
}
}
for (Map.Entry<String, Set<Long>> entry : storageVolumeToTables.entrySet()) {
for (Long tableId : entry.getValue()) {
tableToStorageVolume.put(tableId, entry.getKey());
}
}
}
public abstract StorageVolume getStorageVolumeByName(String svKey);
public abstract StorageVolume getStorageVolume(String storageVolumeId);
public abstract List<String> listStorageVolumeNames() throws DdlException;
protected abstract String createInternalNoLock(String name, String svType, List<String> locations,
Map<String, String> params, Optional<Boolean> enabled, String comment)
throws DdlException;
protected abstract void updateInternalNoLock(StorageVolume sv) throws DdlException;
protected abstract void removeInternalNoLock(StorageVolume sv) throws DdlException;
public abstract boolean bindDbToStorageVolume(String svKey, long dbId) throws DdlException;
public abstract void replayBindDbToStorageVolume(String svId, long dbId);
public abstract void unbindDbToStorageVolume(long dbId);
public abstract boolean bindTableToStorageVolume(String svKey, long dbId, long tableId) throws DdlException;
public abstract void replayBindTableToStorageVolume(String svId, long tableId);
public abstract void unbindTableToStorageVolume(long tableId);
public abstract String createOrUpdateBuiltinStorageVolume() throws DdlException, AlreadyExistsException;
} | class StorageVolumeMgr implements GsonPostProcessable {
private static final String ENABLED = "enabled";
public static final String DEFAULT = "default";
public static final String LOCAL = "local";
public static final String BUILTIN_STORAGE_VOLUME = "builtin_storage_volume";
@SerializedName("defaultSVId")
protected String defaultStorageVolumeId = "";
protected final ReadWriteLock rwLock = new ReentrantReadWriteLock();
@SerializedName("svToDbs")
protected Map<String, Set<Long>> storageVolumeToDbs = new HashMap<>();
@SerializedName("svToTables")
protected Map<String, Set<Long>> storageVolumeToTables = new HashMap<>();
protected Map<Long, String> dbToStorageVolume = new HashMap<>();
protected Map<Long, String> tableToStorageVolume = new HashMap<>();
protected static final Set<String> PARAM_NAMES = new HashSet<>();
static {
Field[] fields = CloudConfigurationConstants.class.getFields();
for (int i = 0; i < fields.length; ++i) {
try {
Object obj = CloudConfigurationConstants.class.newInstance();
Object value = fields[i].get(obj);
PARAM_NAMES.add((String) value);
} catch (InstantiationException | IllegalAccessException e) {
}
}
}
public String createStorageVolume(CreateStorageVolumeStmt stmt)
throws AlreadyExistsException, DdlException {
Map<String, String> params = new HashMap<>();
Optional<Boolean> enabled = parseProperties(stmt.getProperties(), params);
return createStorageVolume(stmt.getName(), stmt.getStorageVolumeType(), stmt.getStorageLocations(), params,
enabled, stmt.getComment());
}
public String createStorageVolume(String name, String svType, List<String> locations, Map<String, String> params,
Optional<Boolean> enabled, String comment)
throws DdlException, AlreadyExistsException {
try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {
validateParams(params);
if (exists(name)) {
throw new AlreadyExistsException(String.format("Storage volume '%s' already exists", name));
}
return createInternalNoLock(name, svType, locations, params, enabled, comment);
}
}
public void removeStorageVolume(DropStorageVolumeStmt stmt) throws DdlException, AnalysisException {
removeStorageVolume(stmt.getName());
}
public void removeStorageVolume(String name) throws DdlException {
try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {
StorageVolume sv = getStorageVolumeByName(name);
Preconditions.checkState(sv != null,
"Storage volume '%s' does not exist", name);
Preconditions.checkState(!defaultStorageVolumeId.equals(sv.getId()),
"default storage volume can not be removed");
Set<Long> dbs = storageVolumeToDbs.get(sv.getId());
Set<Long> tables = storageVolumeToTables.get(sv.getId());
Preconditions.checkState(dbs == null && tables == null,
"Storage volume '%s' is referenced by dbs or tables, dbs: %s, tables: %s",
name, dbs != null ? dbs.toString() : "[]", tables != null ? tables.toString() : "[]");
removeInternalNoLock(sv);
}
}
public void updateStorageVolume(AlterStorageVolumeStmt stmt) throws DdlException {
Map<String, String> params = new HashMap<>();
Optional<Boolean> enabled = parseProperties(stmt.getProperties(), params);
updateStorageVolume(stmt.getName(), params, enabled, stmt.getComment());
}
public void updateStorageVolume(String name, Map<String, String> params, Optional<Boolean> enabled, String comment)
throws DdlException {
try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {
validateParams(params);
StorageVolume sv = getStorageVolumeByName(name);
Preconditions.checkState(sv != null, "Storage volume '%s' does not exist", name);
StorageVolume copied = new StorageVolume(sv);
if (enabled.isPresent()) {
boolean enabledValue = enabled.get();
if (!enabledValue) {
Preconditions.checkState(!copied.getId().equals(defaultStorageVolumeId),
"Default volume can not be disabled");
}
copied.setEnabled(enabledValue);
}
if (!comment.isEmpty()) {
copied.setComment(comment);
}
if (!params.isEmpty()) {
copied.setCloudConfiguration(params);
}
updateInternalNoLock(copied);
}
}
public void setDefaultStorageVolume(SetDefaultStorageVolumeStmt stmt) throws AnalysisException {
setDefaultStorageVolume(stmt.getName());
}
public void setDefaultStorageVolume(String svKey) {
try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {
StorageVolume sv = getStorageVolumeByName(svKey);
Preconditions.checkState(sv != null, "Storage volume '%s' does not exist", svKey);
Preconditions.checkState(sv.getEnabled(), "Storage volume '%s' is disabled", svKey);
SetDefaultStorageVolumeLog log = new SetDefaultStorageVolumeLog(sv.getId());
GlobalStateMgr.getCurrentState().getEditLog().logSetDefaultStorageVolume(log);
this.defaultStorageVolumeId = sv.getId();
}
}
public String getDefaultStorageVolumeId() {
return defaultStorageVolumeId;
}
public boolean exists(String svKey) throws DdlException {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
StorageVolume sv = getStorageVolumeByName(svKey);
return sv != null;
}
}
private Optional<Boolean> parseProperties(Map<String, String> properties, Map<String, String> params) {
params.putAll(properties);
Optional<Boolean> enabled = Optional.empty();
if (params.containsKey(ENABLED)) {
enabled = Optional.of(Boolean.parseBoolean(params.get(ENABLED)));
params.remove(ENABLED);
}
return enabled;
}
public String getStorageVolumeIdOfTable(long tableId) {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
return tableToStorageVolume.get(tableId);
}
}
public String getStorageVolumeIdOfDb(long dbId) {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
return dbToStorageVolume.get(dbId);
}
}
public StorageVolume getDefaultStorageVolume() {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
return getStorageVolume(getDefaultStorageVolumeId());
}
}
public void replaySetDefaultStorageVolume(SetDefaultStorageVolumeLog log) {
try (LockCloseable lock = new LockCloseable(rwLock.writeLock())) {
defaultStorageVolumeId = log.getId();
}
}
public void replayCreateStorageVolume(StorageVolume sv) {
}
public void replayUpdateStorageVolume(StorageVolume sv) {
}
public void replayDropStorageVolume(DropStorageVolumeLog log) {
}
protected void validateParams(Map<String, String> params) throws DdlException {
for (String key : params.keySet()) {
if (!PARAM_NAMES.contains(key)) {
throw new DdlException("Invalid properties " + key);
}
}
}
public void save(DataOutputStream dos) throws IOException, SRMetaBlockException {
SRMetaBlockWriter writer = new SRMetaBlockWriter(dos, SRMetaBlockID.STORAGE_VOLUME_MGR, 1);
writer.writeJson(this);
writer.close();
}
public void load(SRMetaBlockReader reader)
throws SRMetaBlockEOFException, IOException, SRMetaBlockException {
StorageVolumeMgr data = reader.readJson(StorageVolumeMgr.class);
this.storageVolumeToDbs = data.storageVolumeToDbs;
this.storageVolumeToTables = data.storageVolumeToTables;
this.defaultStorageVolumeId = data.defaultStorageVolumeId;
}
@Override
public void gsonPostProcess() throws IOException {
for (Map.Entry<String, Set<Long>> entry : storageVolumeToDbs.entrySet()) {
for (Long dbId : entry.getValue()) {
dbToStorageVolume.put(dbId, entry.getKey());
}
}
for (Map.Entry<String, Set<Long>> entry : storageVolumeToTables.entrySet()) {
for (Long tableId : entry.getValue()) {
tableToStorageVolume.put(tableId, entry.getKey());
}
}
}
public abstract StorageVolume getStorageVolumeByName(String svKey);
public abstract StorageVolume getStorageVolume(String storageVolumeId);
public abstract List<String> listStorageVolumeNames() throws DdlException;
protected abstract String createInternalNoLock(String name, String svType, List<String> locations,
Map<String, String> params, Optional<Boolean> enabled, String comment)
throws DdlException;
protected abstract void updateInternalNoLock(StorageVolume sv) throws DdlException;
protected abstract void removeInternalNoLock(StorageVolume sv) throws DdlException;
public abstract boolean bindDbToStorageVolume(String svKey, long dbId) throws DdlException;
public abstract void replayBindDbToStorageVolume(String svId, long dbId);
public abstract void unbindDbToStorageVolume(long dbId);
public abstract boolean bindTableToStorageVolume(String svKey, long dbId, long tableId) throws DdlException;
public abstract void replayBindTableToStorageVolume(String svId, long tableId);
public abstract void unbindTableToStorageVolume(long tableId);
public abstract String createOrUpdateBuiltinStorageVolume() throws DdlException, AlreadyExistsException;
} |
Can a lang lib invocation requiredArgs be empty at desugar? | private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner, foreach.pos,
VIRTUAL);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangStatementExpression statementExpression = ASTBuilderUtil.createStatementExpression(
resultVariableDefinition, resultReferenceInWhile);
statementExpression.setBType(foreach.nillableResultType);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, statementExpression, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
BLangExpression expr = valueAccessExpr.expr;
valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType);
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.setBType(type);
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
if (whileNode.onFailClause != null) {
BLangOnFailClause onFailClause = whileNode.onFailClause;
whileNode.onFailClause = null;
whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);
result = rewrite(doStmt, env);
} else {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
}
private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,
BLangOnFailClause onFailClause) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);
doBlock.scope = new Scope(env.scope.owner);
bLDo.body = doBlock;
bLDo.pos = location;
bLDo.onFailClause = onFailClause;
bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
doBlock.stmts.add(statement);
return bLDo;
}
@Override
public void visit(BLangLock lockNode) {
BLangOnFailClause currentOnFailClause = this.onFailClause;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
if (lockNode.onFailClause != null) {
blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
rewrite(lockNode.onFailClause, env);
}
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.setBType(symTable.nilType);
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.setBType(nillableError);
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError,
this.env.scope.owner, lockNode.pos, VIRTUAL);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr,
nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
unLockStmt.relatedLock = lockStmt;
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
this.onFailClause = currentOnFailClause;
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,
BLangSimpleVarRef shouldRetryRef) {
BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
trxOnFailClause.pos = pos;
trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);
trxOnFailClause.body.scope = new Scope(env.scope.owner);
trxOnFailClause.isInternal = true;
BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,
"$trxError$", symTable.errorType, null, trxOnFailErrorSym);
trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
trxOnFailError);
trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);
transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,
trxBlockId, shouldRetryRef);
BLangGroupExpr shouldNotPanic = new BLangGroupExpr();
shouldNotPanic.setBType(symTable.booleanType);
shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);
BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);
BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = caughtError;
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);
trxOnFailClause.body.stmts.add(exitIf);
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = caughtError;
failBlock.stmts.add(failStmt);
trxOnFailClause.bodyContainsFail = true;
return trxOnFailClause;
}
@Override
public void visit(BLangTransaction transactionNode) {
if (transactionNode.onFailClause != null) {
BLangOnFailClause onFailClause = transactionNode.onFailClause;
transactionNode.onFailClause = null;
transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);
result = rewrite(doStmt, env);
} else {
BLangLiteral currentTrxBlockId = this.trxBlockId;
String uniqueId = String.valueOf(++transactionBlockCount);
this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);
boolean currShouldReturnErrors = this.shouldReturnErrors;
this.shouldReturnErrors = true;
BLangOnFailClause currOnFailClause = this.onFailClause;
BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);
BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"),
env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);
shouldPanicVarSymbol.closure = true;
BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,
"$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);
BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,
shouldPanicVariable);
BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,
shouldPanicVarSymbol);
BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,
this.shouldRetryRef);
enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);
boolean userDefinedOnFailAvbl = this.onFailClause != null;
analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);
BLangBlockStmt transactionStmtBlock =
transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);
transactionStmtBlock.stmts.add(0, shouldPanicDef);
transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);
transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?
BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
result = rewrite(transactionStmtBlock, this.env);
this.shouldReturnErrors = currShouldReturnErrors;
this.trxBlockId = currentTrxBlockId;
swapAndResetEnclosingOnFail(currOnFailClause);
}
}
@Override
public void visit(BLangRollback rollbackNode) {
BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);
result = rewrite(rollbackStmtExpr, env);
}
private BLangOnFailClause createRetryInternalOnFail(Location pos,
BLangSimpleVarRef retryResultRef,
BLangSimpleVarRef retryManagerRef,
BLangSimpleVarRef shouldRetryRef,
BLangSimpleVarRef continueLoopRef,
BLangSimpleVarRef returnResult) {
BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
internalOnFail.pos = pos;
internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);
internalOnFail.body.scope = new Scope(env.scope.owner);
BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,
"$caughtError$", symTable.errorType, null, caughtErrorSym);
internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
caughtError);
env.scope.define(caughtErrorSym.name, caughtErrorSym);
BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);
BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);
internalOnFail.body.stmts.add(errorAssignment);
BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
internalOnFail.body.stmts.add(continueLoopTrue);
BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,
retryManagerRef, caughtErrorRef);
BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,
shouldRetryInvocation);
internalOnFail.body.stmts.add(shouldRetryAssignment);
BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();
shouldNotRetryCheck.setBType(symTable.booleanType);
shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);
BLangGroupExpr exitCheck = new BLangGroupExpr();
exitCheck.setBType(symTable.booleanType);
exitCheck.expression = shouldNotRetryCheck;
BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);
if (this.onFailClause != null) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = retryResultRef;
exitLogicBlock.stmts.add(failStmt);
internalOnFail.bodyContainsFail = true;
internalOnFail.body.stmts.add(exitIf);
BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();
loopContinueStmt.pos = pos;
internalOnFail.body.stmts.add(loopContinueStmt);
} else {
BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
exitLogicBlock.stmts.add(returnErrorTrue);
internalOnFail.body.stmts.add(exitIf);
}
return internalOnFail;
}
BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {
List<BType> paramTypes = new ArrayList<>();
paramTypes.add(symTable.booleanType);
BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,
null);
BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(
names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,
symTable.builtinPos, VIRTUAL);
return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,
OperatorKind.NOT, notOperatorSymbol);
}
BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null));
return lambdaFunction;
}
protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope bodyScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = bodyScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.getBType().tag == TypeTags.ARRAY
&& ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.setBType(literalExpr.getBType());
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replace("\t", "").replace("\n", "").replace("\r", "")
.replace(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {
listConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);
result = listConstructorSpreadOpExpr;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
BType listConstructorType = Types.getReferredType(listConstructor.getBType());
if (listConstructorType.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));
result = rewriteExpr(expr);
} else if (getElementType(listConstructorType).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
rewriteExprs(tableConstructorExpr.recordLiteralList);
result = tableConstructorExpr;
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
BType arrayLiteralType = Types.getReferredType(arrayLiteral.getBType());
if (arrayLiteralType.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));
return;
} else if (getElementType(arrayLiteralType).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
return;
}
List<BLangExpression> exprs = tupleLiteral.exprs;
BTupleType tupleType = (BTupleType) tupleLiteral.getBType();
List<BType> tupleMemberTypes = tupleType.tupleTypes;
int tupleMemberTypeSize = tupleMemberTypes.size();
int tupleExprSize = exprs.size();
boolean isInRestType = false;
int i = 0;
for (BLangExpression expr: exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BType spreadOpType = ((BLangListConstructorSpreadOpExpr) expr).expr.getBType();
spreadOpType = Types.getReferredType(spreadOpType);
if (spreadOpType.tag == TypeTags.ARRAY) {
BArrayType spreadOpBArray = (BArrayType) spreadOpType;
if (spreadOpBArray.size >= 0) {
i += spreadOpBArray.size;
continue;
}
} else {
BTupleType spreadOpTuple = (BTupleType) spreadOpType;
if (types.isFixedLengthTuple(spreadOpTuple)) {
i += spreadOpTuple.tupleTypes.size();
continue;
}
}
isInRestType = true;
continue;
}
BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();
BType targetType = tupleType.restType;
if (!isInRestType && i < tupleMemberTypeSize) {
targetType = tupleMemberTypes.get(i);
}
types.setImplicitCastExpr(expr, expType, targetType);
i++;
}
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
result = rewriteExpr(groupExpr.expression);
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.setBType(symTable.stringType);
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BType type = varRefExpr.getBType();
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
Types.getReferredType(varRefExpr.symbol.type).tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
if (varRefExpr.symbol.tag == SymTag.TYPE_DEF) {
type = ((BTypeDefinitionSymbol) varRefExpr.symbol).referenceType;
}
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
BType referredType = Types.getReferredType(constSymbol.literalType);
if (referredType.tag <= TypeTags.BOOLEAN || referredType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType()));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;
BLangLockStmt lockStmt = enclLocks.peek();
lockStmt.addLockVariable(symbol);
lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));
}
}
genVarRefExpr.setBType(type);
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.isLValue)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.isLValue = varRefExpr.isLValue;
genVarRefExpr.setBType(varRefExpr.symbol.type);
result = genVarRefExpr;
return;
}
genVarRefExpr.isLValue = varRefExpr.isLValue;
BType targetType = genVarRefExpr.getBType();
genVarRefExpr.setBType(genVarRefExpr.symbol.type);
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);
}
private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,
StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value));
BType refType = Types.getReferredType(varRefType);
int varRefTypeTag = refType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
result = rewriteObjectMemberAccessAsField(fieldAccessExpr);
return;
} else {
boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;
if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {
BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) refType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;
if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||
(initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {
isStoreOnCreation = true;
}
}
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false,
isStoreOnCreation);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);
}
} else if (types.isLax(refType)) {
if (!(refType.tag == TypeTags.XML || refType.tag == TypeTags.XML_ELEMENT)) {
if (refType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) refType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
xmlAccessInvocation.setBType(fieldAccessExpr.getBType());
result = xmlAccessInvocation;
return;
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.isStoreOnCreation);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.isLValue = fieldAccessExpr.isLValue;
targetVarRef.setBType(fieldAccessExpr.getBType());
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
rewriteFieldBasedAccess(fieldAccessExpr);
}
private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {
Location pos = fieldAccessExpr.pos;
BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;
BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();
String funcName = "$anon$method$delegate$" + originalMemberFuncSymbol.name.value + "$" + lambdaFunctionCount++;
BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),
Names.fromString(funcName), env.enclPkg.packageID,
originalMemberFuncSymbol.type, env.scope.owner, pos,
VIRTUAL);
funcSymbol.retType = originalMemberFuncSymbol.retType;
funcSymbol.bodyExist = true;
funcSymbol.params = new ArrayList<>();
funcSymbol.scope = new Scope(funcSymbol);
func.pos = pos;
func.name = createIdentifier(pos, funcName);
func.flagSet.add(Flag.LAMBDA);
func.flagSet.add(Flag.ANONYMOUS);
func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
func.symbol = funcSymbol;
func.setBType(funcSymbol.type);
func.closureVarSymbols = new LinkedHashSet<>();
BLangExpression receiver = fieldAccessExpr.expr;
BLangSimpleVariableDef intermediateObjDef = null;
if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;
receiverSymbol.closure = true;
func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));
} else {
BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(),
receiver, pos);
intermediateObjDef = varDef;
varDef.var.symbol.closure = true;
env.scope.define(varDef.var.symbol.name, varDef.var.symbol);
BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);
func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));
receiver = variableRef;
}
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
for (BVarSymbol param : originalMemberFuncSymbol.params) {
BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,
VIRTUAL);
fParam.pos = pos;
fParam.name = createIdentifier(pos, param.name.value);
fParam.setBType(param.type);
func.requiredParams.add(fParam);
funcSymbol.params.add(fParam.symbol);
funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);
BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);
requiredArgs.add(paramRef);
}
ArrayList<BLangExpression> restArgs = new ArrayList<>();
if (originalMemberFuncSymbol.restParam != null) {
BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
func.restParam = restParam;
BVarSymbol restSym = originalMemberFuncSymbol.restParam;
restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);
restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,
VIRTUAL);
restParam.pos = pos;
restParam.setBType(restSym.type);
funcSymbol.restParam = restParam.symbol;
funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);
BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);
BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression();
restArgExpr.expr = restArg;
restArgExpr.pos = pos;
restArgExpr.setBType(restSym.type);
restArgExpr.expectedType = restArgExpr.getBType();
restArgs.add(restArgExpr);
}
BLangIdentifier field = fieldAccessExpr.field;
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.expr = createObjectMethodInvocation(
receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);
((BLangBlockFunctionBody) func.body).addStatement(retStmt);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = func;
lambdaFunction.capturedClosureEnv = env.createClone();
env.enclPkg.functions.add(func);
env.enclPkg.topLevelNodes.add(func);
lambdaFunction.parent = env.enclInvokable;
lambdaFunction.setBType(func.getBType());
if (intermediateObjDef == null) {
return rewrite(lambdaFunction, env);
} else {
BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));
expr.setBType(lambdaFunction.getBType());
return rewrite(expr, env);
}
}
private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,
BSymbol invocableSymbol,
List<BLangExpression> requiredArgs,
List<BLangExpression> restArgs) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.name = field;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = receiver;
invocationNode.symbol = invocableSymbol;
invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = restArgs;
return invocationNode;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);
Location pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.setBType(fieldAccessType);
statementExpression.setBType(fieldAccessType);
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);
mapAccessExpr.setBType(xmlOrNil);
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
namedArgs.add(message);
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
BLangSimpleVariableDef errorDef =
createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangIndexBasedAccess targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr,
(BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.TABLE) {
targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.isLValue = indexAccessExpr.isLValue;
targetVarRef.setBType(indexAccessExpr.getBType());
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
rewriteInvocation(iExpr, false);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.size() == 1) {
errorConstructorExpr.positionalArgs.add(createNilLiteral());
}
errorConstructorExpr.positionalArgs.set(1,
addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));
rewriteExprs(errorConstructorExpr.positionalArgs);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
if (errorConstructorExpr.namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());
} else {
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (Types.getReferredType(recordLiteral.getBType()).tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());
}
recordLiteral.fields.add(member);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
}
errorConstructorExpr.errorDetail = errorDetail;
result = errorConstructorExpr;
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {
if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);
}
if (actionInvocation.async && Symbols.isFlagOn(actionInvocation.symbol.type.flags, Flags.ISOLATED)) {
addStrandAnnotationWithThreadAny(actionInvocation);
}
rewriteInvocation(actionInvocation, actionInvocation.async);
}
private void addStrandAnnotationWithThreadAny(BLangInvocation.BLangActionInvocation actionInvocation) {
if (this.strandAnnotAttachement == null) {
BLangPackage pkgNode = env.enclPkg;
List<BLangTypeDefinition> prevTypeDefinitions = new ArrayList<>(pkgNode.typeDefinitions);
this.strandAnnotAttachement =
annotationDesugar.createStrandAnnotationWithThreadAny(actionInvocation.pos, env);
addInitFunctionForRecordTypeNodeInTypeDef(pkgNode, prevTypeDefinitions);
}
actionInvocation.addAnnotationAttachment(this.strandAnnotAttachement);
((BInvokableSymbol) actionInvocation.symbol)
.addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);
}
private void rewriteInvocation(BLangInvocation invocation, boolean async) {
BLangInvocation invRef = invocation;
if (!enclLocks.isEmpty()) {
BLangLockStmt lock = enclLocks.peek();
lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);
}
reorderArguments(invocation);
rewriteExprs(invocation.requiredArgs);
if (!invocation.requiredArgs.isEmpty() && invocation.langLibInvocation) {
invocation.expr = invocation.requiredArgs.get(0);
} else {
invocation.expr = rewriteExpr(invocation.expr);
}
fixStreamTypeCastsInInvocationParams(invocation);
fixNonRestArgTypeCastInTypeParamInvocation(invocation);
rewriteExprs(invocation.restArgs);
annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,
invocation.symbol.pkgID, invocation.symbol.owner, env);
if (invocation.functionPointerInvocation) {
visitFunctionPointerInvocation(invocation);
return;
}
result = invRef;
BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;
if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {
BType retType = unifier.build(invSym.retType);
invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);
}
if (invocation.expr == null) {
fixTypeCastInTypeParamInvocation(invocation, invRef);
if (invocation.exprSymbol == null) {
return;
}
invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);
invocation.expr = rewriteExpr(invocation.expr);
}
switch (Types.getReferredType(invocation.expr.getBType()).tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!invocation.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs);
argExprs.add(0, invocation.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,
invocation.symbol, invocation.getBType(),
invocation.expr, async);
attachedFunctionInvocation.name = invocation.name;
attachedFunctionInvocation.annAttachments = invocation.annAttachments;
result = invRef = attachedFunctionInvocation;
}
break;
}
if (invocation.objectInitMethod && Symbols.isFlagOn(invocation.expr.getBType().flags, Flags.OBJECT_CTOR)) {
BObjectType initializingObject = (BObjectType) invocation.expr.getBType();
BLangClassDefinition classDef = initializingObject.classDef;
if (classDef.hasClosureVars) {
OCEDynamicEnvironmentData oceEnvData = initializingObject.classDef.oceEnvData;
if (oceEnvData.attachedFunctionInvocation == null) {
oceEnvData.attachedFunctionInvocation = (BLangAttachedFunctionInvocation) result;
}
}
}
fixTypeCastInTypeParamInvocation(invocation, invRef);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 0; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
/* This function is a workaround and need improvement
* Notes for improvement :
* 1. Both arguments are same.
* 2. Due to current type param logic we put type param flag on the original type.
* 3. Error type having Cloneable type with type param flag, change expression type by this code.
* 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest
* added never to CloneableType type param
* @typeParam type
* CloneableType Cloneable|never;
*
*/
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;
if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {
return;
}
BType originalInvType = genIExpr.getBType();
if (!genIExpr.async) {
genIExpr.setBType(returnTypeOfInvokable);
}
this.result = addConversionExprIfRequired(genIExpr, originalInvType);
}
private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
if (!params.isEmpty()) {
for (int i = 0; i < requiredArgs.size(); i++) {
BVarSymbol param = params.get(i);
if (Types.getReferredType(param.type).tag == TypeTags.STREAM) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type));
}
}
}
}
private BLangLiteral createNilLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = null;
literal.setBType(symTable.nilType);
return literal;
}
public void visit(BLangTypeInit typeInitExpr) {
if (Types.getReferredType(typeInitExpr.getBType()).tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.getBType());
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
objVarDef.var.name.pos = symTable.builtinPos;
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
BLangInvocation typeInitInvocation = typeInitExpr.initInvocation;
typeInitInvocation.exprSymbol = objVarDef.var.symbol;
typeInitInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
typeInitInvocation.objectInitMethod = true;
if (Types.getReferredType(typeInitInvocation.getBType()).tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitInvocation;
typeInitInvocation.name.value = GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.setBType(objVarRef.symbol.type);
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitInvocation.getBType(),
typeInitInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultVarRef.symbol.type);
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint;
BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);
BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();
constraintTdExpr.resolvedType = constraintType;
constraintTdExpr.setBType(constraintTdType);
BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType;
BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);
BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();
completionTdExpr.resolvedType = completionType;
completionTdExpr.setBType(completionTdType);
List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));
if (!typeInitExpr.argsExpr.isEmpty()) {
args.add(typeInitExpr.argsExpr.get(0));
}
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, args, symResolver);
streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr,
Location location) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner, location, VIRTUAL);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);
objVarDef.var = objVar;
objVarDef.setBType(objVar.getBType());
return objVarDef;
}
private BType getObjectType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorType);
errorTypeNode.pos = symTable.builtinPos;
return errorTypeNode;
}
BLangErrorType getErrorOrNillTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorOrNilType);
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef =
createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(ternaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());
expr.pos = waitExpr.pos;
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (Types.getReferredType(trapExpr.expr.getBType()).tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (isNullableBinaryExpr(binaryExpr)) {
BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);
result = rewrite(stmtExpr, env);
return;
}
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
lhsExpr = createTypeCastExpr(lhsExpr, symTable.intType);
rhsExpr = createTypeCastExpr(rhsExpr, symTable.intType);
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
rhsExpr = getModifiedIntRangeEndExpr(rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, lhsExpr, rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);
if (lhsExprTypeTag == rhsExprTypeTag) {
if (!isBinaryShiftOperator && !isArithmeticOperator) {
return;
}
if (types.isValueType(binaryExpr.lhsExpr.getBType())) {
return;
}
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&
(rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&
(lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (isArithmeticOperator) {
createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (isBinaryShiftOperator) {
createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {
createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
}
}
private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
/*
* int? x = 3;
* int? y = 5;
* int? z = x + y;
* Above is desugared to
* int? $result$;
*
* int? $lhsExprVar$ = x;
* int? $rhsExprVar$ = y;
* if (lhsVar is () or rhsVar is ()) {
* $result$ = ();
* } else {
* $result$ = $lhsExprVar$ + $rhsExprVar$;
* }
* int z = $result$;
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BUnionType exprBType = (BUnionType) binaryExpr.getBType();
BType nonNilType = exprBType.getMemberTypes().iterator().next();
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind);
boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind);
boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator;
BType rhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.rhsExpr.getBType().isNullable()) {
rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false);
} else {
rhsType = binaryExpr.rhsExpr.getBType();
}
}
BType lhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.lhsExpr.getBType().isNullable()) {
lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false);
} else {
lhsType = binaryExpr.lhsExpr.getBType();
}
}
if (binaryExpr.lhsExpr.getBType().isNullable()) {
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
}
BLangSimpleVariableDef tempVarDef = createVarDef("result",
binaryExpr.getBType(), null, binaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangSimpleVariableDef lhsVarDef = createVarDef("$lhsExprVar$", binaryExpr.lhsExpr.getBType(),
binaryExpr.lhsExpr, binaryExpr.pos);
BLangSimpleVarRef lhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, lhsVarDef.var.symbol);
blockStmt.addStatement(lhsVarDef);
BLangSimpleVariableDef rhsVarDef = createVarDef("$rhsExprVar$", binaryExpr.rhsExpr.getBType(),
binaryExpr.rhsExpr, binaryExpr.pos);
BLangSimpleVarRef rhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, rhsVarDef.var.symbol);
blockStmt.addStatement(rhsVarDef);
BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, lhsVarRef, getNillTypeNode());
typeTestExprOne.setBType(symTable.booleanType);
BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos, rhsVarRef, getNillTypeNode());
typeTestExprTwo.setBType(symTable.booleanType);
BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,
typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, lhsVarRef, rhsVarRef,
nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);
newBinaryExpr.lhsExpr = createTypeCastExpr(lhsVarRef, lhsType);
newBinaryExpr.rhsExpr = createTypeCastExpr(rhsVarRef, rhsType);
bLangAssignmentElse.expr = newBinaryExpr;
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);
ifStatement.expr = ifBlockCondition;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(binaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&
(binaryExpr.rhsExpr.getBType().isNullable() ||
binaryExpr.lhsExpr.getBType().isNullable())) {
switch (binaryExpr.getOperatorKind()) {
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
return true;
}
}
return false;
}
private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||
(TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||
(TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {
return;
}
if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);
return;
}
if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());
}
private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
}
private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&
rhsExprTypeTag == TypeTags.BYTE)) {
return;
}
if (isLhsIntegerType && !isRhsIntegerType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (!isLhsIntegerType && isRhsIntegerType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);
boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);
if (isLhsStringType && isRhsStringType) {
return;
}
if (isLhsStringType && !isRhsStringType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.stringType);
return;
}
if (!isLhsStringType && isRhsStringType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.stringType);
}
}
private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,
BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.setBType(symTable.intRangeType);
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
Location pos = elvisExpr.pos;
String resultVarName = "_$result$_";
BType resultType = elvisExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String lhsResultVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable lhsResultVar =
ASTBuilderUtil.createVariable(pos, lhsResultVarName, elvisExpr.lhsExpr.getBType(), elvisExpr.lhsExpr,
new BVarSymbol(0, names.fromString(lhsResultVarName),
this.env.scope.owner.pkgID, elvisExpr.lhsExpr.getBType(),
this.env.scope.owner, elvisExpr.pos, VIRTUAL));
BLangSimpleVariableDef lhsResultVarDef = ASTBuilderUtil.createVariableDef(pos, lhsResultVar);
BLangSimpleVarRef lhsResultVarRef = ASTBuilderUtil.createVariableRef(pos, lhsResultVar.symbol);
BLangAssignment nilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef, elvisExpr.rhsExpr);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(nilAssignment);
BLangAssignment notNilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(lhsResultVarRef, resultType));
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(pos);
elseBody.addStatement(notNilAssignment);
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, lhsResultVarRef, getNillTypeNode()), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(lhsResultVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (isNullableUnaryExpr(unaryExpr)) {
BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);
result = rewrite(statementExpression, env);
return;
}
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
OperatorKind opKind = unaryExpr.operator;
if (opKind == OperatorKind.ADD || opKind == OperatorKind.SUB) {
createTypeCastExprForUnaryPlusAndMinus(unaryExpr);
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
private void createTypeCastExprForUnaryPlusAndMinus(BLangUnaryExpr unaryExpr) {
BLangExpression expr = unaryExpr.expr;
if (TypeTags.isIntegerTypeTag(expr.getBType().tag)) {
return;
}
unaryExpr.expr = createTypeCastExpr(expr, unaryExpr.getBType());
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final Location pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == Types.getReferredType(unaryExpr.getBType()).tag) {
binaryExpr.setBType(symTable.byteType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.setBType(symTable.intType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {
/*
* int? x = 3;
* int? y = +x;
*
*
* Above is desugared to
* int? $result$;
* if (x is ()) {
* $result$ = ();
* } else {
* $result$ = +x;
* }
* int y = $result$
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BUnionType exprBType = (BUnionType) unaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
BLangSimpleVariableDef tempVarDef = createVarDef("$result",
unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr,
getNillTypeNode());
typeTestExpr.setBType(symTable.booleanType);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);
bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,
nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);
ifStatement.expr = typeTestExpr;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(unaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {
switch (unaryExpr.operator) {
case ADD:
case SUB:
case BITWISE_COMPLEMENT:
return true;
}
}
return false;
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
BType targetType = conversionExpr.targetType;
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.setBType(bLangArrowFunction.body.expr.getBType());
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.setBType(bLangArrowFunction.funcType);
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value),
new Name(funcNode.name.originalValue),
env.enclPkg.symbol.pkgID,
bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true,
bLangArrowFunction.pos, VIRTUAL);
funcSymbol.originalName = new Name(funcNode.name.originalValue);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.getBType();
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.setBType(
new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null));
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
List<BLangXMLNS> prevInlineNamespaces = this.inlineXMLNamespaces;
if (isVisitingQuery && this.inlineXMLNamespaces != null) {
xmlElementLiteral.inlineNamespaces.addAll(this.inlineXMLNamespaces);
}
this.inlineXMLNamespaces = xmlElementLiteral.inlineNamespaces;
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
this.inlineXMLNamespaces = prevInlineNamespaces;
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {
for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {
rewriteExpr(xmlItem);
}
result = xmlSequenceLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
/**
* The raw template literal gets desugared to a type init expression. For each literal, a new object class type
* def is generated from the object type. The type init expression creates an instance of this generated object
* type. For example, consider the following statements:
* string name = "Pubudu";
* 'object:RawTemplate rt = `Hello ${name}!`;
*
* The raw template literal above is desugared to:
* type RawTemplate$Impl$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
*
*
* 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);
*
* @param rawTemplateLiteral The raw template literal to be desugared.
*/
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
Location pos = rawTemplateLiteral.pos;
BObjectType objType = (BObjectType) Types.getReferredType(rawTemplateLiteral.getBType());
BLangClassDefinition objClassDef =
desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);
BObjectType classObjType = (BObjectType) objClassDef.getBType();
BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol;
BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);
insertionsList.exprs.addAll(rawTemplateLiteral.insertions);
insertionsList.expectedType = insertionsSym.type;
BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);
typeNewExpr.argsExpr.add(insertionsList);
typeNewExpr.initInvocation.argExprs.add(insertionsList);
typeNewExpr.initInvocation.requiredArgs.add(insertionsList);
result = rewriteExpr(typeNewExpr);
}
/**
* This method desugars a raw template literal object class for the provided raw template object type as follows.
* A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;
* is desugared to,
* type $anonType$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
* @param strings The string portions of the literal
* @param objectType The abstract object type for which an object class needs to be generated
* @param pos The diagnostic position info for the type node
* @return Returns the generated concrete object class def
*/
private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType,
Location pos) {
BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;
Name objectClassName = names.fromString(
anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));
BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,
env.enclPkg.packageID, null, env.enclPkg.symbol,
pos, VIRTUAL, false);
classTSymbol.flags |= Flags.CLASS;
BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);
objectClassType.fields = objectType.fields;
classTSymbol.type = objectClassType;
objectClassType.typeIdSet.add(objectType.typeIdSet);
BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);
classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);
BType stringsType = objectClassType.fields.get("strings").symbol.type;
BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);
stringsList.exprs.addAll(strings);
stringsList.expectedType = stringsType;
classDef.fields.get(0).expr = stringsList;
BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);
classDef.initFunction = userDefinedInitFunction;
env.enclPkg.functions.add(userDefinedInitFunction);
env.enclPkg.topLevelNodes.add(userDefinedInitFunction);
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
SemanticAnalyzer.AnalyzerData data = new SemanticAnalyzer.AnalyzerData(env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, data);
classDef.generatedInitFunction = tempGeneratedInitFunction;
env.enclPkg.functions.add(classDef.generatedInitFunction);
env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);
return rewrite(classDef, env);
}
/**
* Creates a user-defined init() method for the provided object type node. If there are fields without default
* values specified in the type node, this will add parameters for those fields in the init() method and assign the
* param values to the respective fields in the method body.
*
* @param classDefn The object type node for which the init() method is generated
* @param env The symbol env for the object type node
* @return The generated init() method
*/
private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env,
names, Names.USER_DEFINED_INIT_SUFFIX,
symTable, classDefn.getBType());
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), classDefn.pos);
classDefn.initFunction = initFunction;
initFunction.returnTypeNode.setBType(symTable.nilType);
BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;
BInvokableType initFnType = (BInvokableType) initFunction.getBType();
for (BLangSimpleVariable field : classDefn.fields) {
if (field.expr != null) {
continue;
}
BVarSymbol fieldSym = field.symbol;
BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,
initFunction.symbol, classDefn.pos, VIRTUAL);
BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,
fieldSym.type, null, paramSym);
param.flagSet.add(Flag.FINAL);
initFunction.symbol.scope.define(paramSym.name, paramSym);
initFunction.symbol.params.add(paramSym);
initFnType.paramTypes.add(param.getBType());
initFunction.requiredParams.add(param);
BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);
BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),
initFunction.receiver.symbol, field.name);
initFuncBody.addStatement(fieldInit);
}
return initFunction;
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType());
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
BInvokableSymbol isTransactionalSymbol =
(BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);
result = ASTBuilderUtil
.createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),
Collections.emptyList(), symResolver);
}
@Override
public void visit(BLangCommitExpr commitExpr) {
BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangFail failNode) {
if (this.onFailClause != null && !this.isVisitingQuery) {
if (this.onFailClause.bodyContainsFail) {
result = rewriteNestedOnFail(this.onFailClause, failNode);
} else {
result = createOnFailInvocation(onFailClause, failNode);
}
} else {
BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));
stmt.desugared = true;
result = stmt;
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTableAccessExpr tableKeyAccessExpr) {
result = tableKeyAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType());
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
if (checkedExpr.isRedundantChecking) {
result = rewriteExpr(checkedExpr.expr);
return;
}
Location pos = checkedExpr.pos;
String resultVarName = "_$result$_";
BType resultType = checkedExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String checkedExprVarName = GEN_VAR_PREFIX.value;
BType checkedExprType = checkedExpr.expr.getBType();
BLangSimpleVariable checkedExprVar =
ASTBuilderUtil.createVariable(pos, checkedExprVarName, checkedExprType,
checkedExpr.expr, new BVarSymbol(0, names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExprType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(pos, checkedExprVar);
BLangSimpleVarRef checkedExprVarRef = ASTBuilderUtil.createVariableRef(pos, checkedExprVar.symbol);
BLangAssignment successAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(checkedExprVarRef, resultType));
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(successAssignment);
BLangBlockStmt elseBody = getSafeErrorAssignment(pos, checkedExprVarRef, this.env.enclInvokable.symbol,
checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangValueType checkedExprTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
checkedExprTypeNode.setBType(resultType);
checkedExprTypeNode.typeKind = resultType.getKind();
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, checkedExprVarRef, checkedExprTypeNode), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(checkedExprVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceClass.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangObjectConstructorExpression bLangObjectConstructorExpression) {
visit(bLangObjectConstructorExpression.classNode);
bLangObjectConstructorExpression.classNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(bLangObjectConstructorExpression.typeInit);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.setBType(annotAccessExpr.getBType());
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),
binaryExpr.rhsExpr.getBType()),
annotAccessExpr.getBType(), null), null,
symTable.builtinPos, VIRTUAL);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.getBType())) {
expr = addConversionExprIfRequired(expr, symTable.anyType);
}
if (typeTestExpr.isNegation) {
BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,
typeTestExpr.expr, typeTestExpr.typeNode);
BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
bLangGroupExpr.expression = bLangTypeTestExpr;
bLangGroupExpr.setBType(typeTestExpr.getBType());
BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,
typeTestExpr.getBType(),
OperatorKind.NOT, null);
result = rewriteExpr(unaryExpr);
return;
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
List<BLangStatement> getVisibleXMLNSStmts(SymbolEnv env) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
return nameBXMLNSSymbolMap.keySet().stream()
.map(key -> this.stmtsToBePropagatedToQuery.get(key))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
@Override
public void visit(BLangQueryAction queryAction) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
BType refType = Types.getReferredType(constSymbol.literalType);
if (refType.tag <= TypeTags.BOOLEAN || refType.tag == TypeTags.NIL) {
if (refType.tag != TypeTags.NIL && (constSymbol.value == null ||
constSymbol.value.value == null)) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangDynamicArgExpr dynamicParamExpr) {
dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);
dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);
result = dynamicParamExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.setBType(iteratorInvokableSymbol.retType);
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol =
getNextFunc((BObjectType) Types.getReferredType(iteratorSymbol.type)).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.setBType(nextFuncSymbol.retType);
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,
BVarSymbol resultSymbol) {
return getFieldAccessExpression(location, "value", varType, resultSymbol);
}
BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,
BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.setBType(varType);
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
protected BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.setBType(retType);
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,
names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.setBType(new BArrayType(symTable.anyType));
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangValueExpression expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.setBType(iExpr.symbol.type);
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(),
expr.getBType(),
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(Location pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.intType);
return literal;
}
private BLangLiteral createByteLiteral(Location pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
if (types.isSameType(expr.getBType(), targetType)) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.setBType(targetType);
conversionExpr.targetType = targetType;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getElementType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.ARRAY) {
return bType;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
Location invPos = invokableNode.pos;
Location returnStmtPos;
if (invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {
returnStmtPos = null;
} else {
returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(),
invPos.lineRange().endLine().line(),
invPos.lineRange().endLine().line(),
invPos.lineRange().startLine().offset(),
invPos.lineRange().startLine().offset(), 0, 0);
}
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || Types.getReferredType(symbol.type).tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangSimpleVarRef varargRef = null;
BLangBlockStmt blockStmt = null;
BType varargVarType = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
Location varargExpPos = expr.pos;
varargVarType = expr.getBType();
String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.setBType(varargVarType);
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.setBType(arrayType);
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.setBType(firstNonRestArg.getBType());
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
restArgs.remove(0);
return;
}
BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
if (Types.getReferredType(restArgsExpression.getBType()).tag == TypeTags.RECORD) {
BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);
restArgs.add(expr);
return;
}
Location pos = restArgsExpression.pos;
BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();
newArrayLiteral.setBType(restParamType);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i",
foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.getBType(),
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);
BType refType = Types.getReferredType(varargVarType);
if (refType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) refType;
if (arrayType.state == BArrayState.CLOSED &&
arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {
valueExpr.setBType(restParamType.eType);
} else {
valueExpr.setBType(arrayType.eType);
}
} else {
valueExpr.setBType(symTable.anyOrErrorType);
}
BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
List.of(pushExpr),
restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
foreach.body = foreachBody;
BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);
newArrayBlockStmt.addStatement(varDef);
newArrayBlockStmt.addStatement(foreach);
BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);
newArrayStmtExpression.setBType(restParamType);
restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.setBType(restParamType);
BType elemType = restParamType.eType;
Location pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.setBType(restParamType);
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new LinkedHashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>();
BLangRecordLiteral incRecordParamAllowAdditionalFields = null;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = Types.getReferredType(varargRef.getBType());
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.remove(param.name.value));
} else if (param.getFlags().contains(Flag.INCLUDED)) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
BType paramType = param.type;
recordLiteral.setBType(paramType);
args.add(recordLiteral);
incRecordLiterals.add(recordLiteral);
if (((BRecordType) Types.getReferredType(paramType)).restFieldType != symTable.noType) {
incRecordParamAllowAdditionalFields = recordLiteral;
}
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.setBType(param.type);
args.add(expr);
} else {
if (Types.getReferredType(varargRef.getBType()).tag == TypeTags.RECORD) {
if (param.isDefaultable) {
BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,
List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);
BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));
BLangIndexBasedAccess memberAccessExpr =
ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);
BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);
BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type,
hasKeyInvocation, memberAccessExpr, ignoreExpr);
args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr));
} else {
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(varargRef,
ASTBuilderUtil.createIdentifier(param.pos, param.name.value));
fieldBasedAccessExpression.setBType(param.type);
args.add(fieldBasedAccessExpression);
}
} else {
BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));
BType memberAccessExprType = tupleTypedVararg ?
((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType;
args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType,
varargRef, indexExpr), param.type));
varargIndex++;
}
}
}
if (namedArgs.size() > 0) {
setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);
}
iExpr.requiredArgs = args;
}
private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs,
List<BLangRecordLiteral> incRecordLiterals,
BLangRecordLiteral incRecordParamAllowAdditionalFields) {
for (String name : namedArgs.keySet()) {
boolean isAdditionalField = true;
BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);
for (BLangRecordLiteral recordLiteral : incRecordLiterals) {
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(recordLiteral.getBType())).fields;
if (fields.containsKey(name) &&
Types.getReferredType(fields.get(name).type).tag != TypeTags.NEVER) {
isAdditionalField = false;
createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);
break;
}
}
if (isAdditionalField) {
createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);
}
}
}
private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,
BLangNamedArgsExpression expr) {
BLangSimpleVarRef varRef = new BLangSimpleVarRef();
varRef.variableName = expr.name;
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.
createBLangRecordKeyValue(varRef, expr.expr);
recordLiteral.fields.add(recordKeyValueField);
}
private BLangBlockStmt getSafeErrorAssignment(Location location, BLangSimpleVarRef ref,
BSymbol invokableSymbol,
List<BType> equivalentErrorTypes,
boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = Types.getReferredType(((BInvokableType) invokableSymbol.type).retType);
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable errorVar =
ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType,
createTypeCastExpr(ref, symTable.errorType),
new BVarSymbol(0, names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, location, VIRTUAL));
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(location);
BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(location, errorVar);
blockStmt.addStatement(errorVarDef);
BLangVariableReference errorVarRef = ASTBuilderUtil.createVariableRef(location, errorVar.symbol);
if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = location;
failStmt.expr = errorVarRef;
blockStmt.addStatement(failStmt);
if (returnOnError && this.shouldReturnErrors) {
BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(errorVarRef, env));
errorReturn.desugared = true;
failStmt.exprStmt = errorReturn;
}
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = location;
panicNode.expr = errorVarRef;
blockStmt.addStatement(panicNode);
}
return blockStmt;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.getBType();
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
BLangExpression impConversionExpr = expr.impConversionExpr;
expr.impConversionExpr = null;
return impConversionExpr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.setBType(lhsType);
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,
VIRTUAL);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,
recordSymbol, bindingPatternVariable.pos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BRecordType) recordVariable.restParam.getBType()).restFieldType :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++),
env.enclPkg.symbol.pkgID,
null, null, errorVariable.pos, VIRTUAL);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,
errorVariable.pos);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
recordTypeNode.initFunction = TypeDefBuilderHelper
.createInitFunctionForRecordType(recordTypeNode, env, names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(detailType, detailType.tsymbol,
recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(errorType, errorTypeSymbol,
createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.getBType();
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID, symTable.pureType, null,
field.valueBindingPattern.pos, VIRTUAL);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.getBType(),
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo, Location pos) {
BRecordType detailRecordType = createAnonRecordType(pos);
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,
detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);
detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordType.tsymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BRecordType createAnonRecordType(Location pos) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),
env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(errorType);
return errorTypeNode;
}
private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.setBType(symTable.anyType);
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),
expression.getBType());
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),
binaryExpr, env);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.setBType(variable.getBType());
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol selfSymbol) {
return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,
variable.name);
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,
BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,
BLangIdentifier fieldName) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);
fieldAccess.symbol = fieldSymbol;
fieldAccess.setBType(fieldType);
fieldAccess.isStoreOnCreation = true;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = expr;
assignmentStmt.pos = function.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.isLValue || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.getBType();
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.getBType(),
this.env.scope.owner, accessExpr.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar);
BLangMatchStatement matchStmt = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matchStmt));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.setBType(originalExprType);
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successClause = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = Types.getReferredType(accessExpr.originalType);
if (TypeTags.isXMLTypeTag(originalType.tag) || isMapJson(originalType)) {
accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));
} else {
accessExpr.setBType(originalType);
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType());
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangExpression matchExpr = accessExpr.expr;
BType matchExprType = accessExpr.expr.getBType();
Location pos = accessExpr.pos;
BLangMatchStatement matchStmt = ASTBuilderUtil.createMatchStatement(matchExpr, pos);
boolean isAllTypesRecords = false;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>();
BType referredType = Types.getReferredType(matchExpr.getBType());
if (referredType.tag == TypeTags.UNION) {
memTypes = new LinkedHashSet<>(((BUnionType) referredType).getMemberTypes());
isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);
}
if (accessExpr.nilSafeNavigation) {
matchStmt.addMatchClause(getMatchNullClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
memTypes.remove(symTable.nilType);
}
if (accessExpr.errorSafeNavigation) {
matchStmt.addMatchClause(getMatchErrorClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
matchStmt.pos = pos;
memTypes.remove(symTable.errorType);
}
BLangMatchClause successClause = null;
Name field = getFieldName(accessExpr);
if (field == Names.EMPTY) {
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
if (isAllTypesRecords) {
for (BType memberType : memTypes) {
BRecordType recordType = (BRecordType) Types.getReferredType(memberType);
if (recordType.fields.containsKey(field.value) || !recordType.sealed) {
successClause = getSuccessPatternClause(memberType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
}
}
matchStmt.addMatchClause(getMatchAllAndNilReturnClause(matchExpr, tempResultVar));
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
}
private boolean isMapJson(BType originalType) {
return originalType.tag == TypeTags.MAP && ((BMapType) originalType).getConstraint().tag == TypeTags.JSON;
}
private void pushToMatchStatementStack(BLangMatchStatement matchStmt, BLangMatchClause successClause,
Location pos) {
this.matchStmtStack.push(matchStmt);
if (this.successClause != null) {
this.successClause.blockStmt = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(matchStmt));
}
this.successClause = successClause;
}
private Name getFieldName(BLangAccessExpression accessExpr) {
Name field = Names.EMPTY;
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);
} else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;
if (indexBasedExpression.getKind() == NodeKind.LITERAL) {
field = new Name(((BLangLiteral) indexBasedExpression).value.toString());
}
}
return field;
}
private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) {
for (BType memType : memTypes) {
int typeTag = Types.getReferredType(memType).tag;
if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {
return false;
}
}
return true;
}
private BLangMatchClause getMatchErrorClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
Location pos = matchExpr.pos;
BVarSymbol errorPatternVarSymbol = new BVarSymbol(0, Names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(errorPatternVarSymbol, errorPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, errorPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getErrorTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchNullClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
Location pos = matchExpr.pos;
BVarSymbol nullPatternVarSymbol = new BVarSymbol(0, Names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(nullPatternVarSymbol, nullPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, nullPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getNillTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchAllAndNilReturnClause(BLangExpression matchExpr,
BLangSimpleVariable tempResultVar) {
Location pos = matchExpr.pos;
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, createLiteral(pos, symTable.nilType,
Names.NIL_VALUE));
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangWildCardMatchPattern wildCardMatchPattern = ASTBuilderUtil.createWildCardMatchPattern(matchExpr);
wildCardMatchPattern.setBType(symTable.anyType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, null, wildCardMatchPattern);
}
private BLangMatchClause getSuccessPatternClause(BType type, BLangExpression matchExpr,
BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
type = types.getSafeType(type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
Location pos = accessExpr.pos;
BVarSymbol successPatternSymbol;
if (Types.getReferredType(type).tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
} else {
successPatternSymbol = new BVarSymbol(0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
BLangSimpleVarRef successPatternVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos,
successPatternVar.symbol);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(successPatternSymbol, successPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;
}
if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;
}
tempAccessExpr.expr = addConversionExprIfRequired(successPatternVarRef, type);
tempAccessExpr.errorSafeNavigation = false;
tempAccessExpr.nilSafeNavigation = false;
accessExpr.cloneRef = null;
if (TypeTags.isXMLTypeTag(Types.getReferredType(tempAccessExpr.expr.getBType()).tag)) {
tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,
symTable.nilType));
} else {
tempAccessExpr.setBType(accessExpr.originalType);
}
tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, this.env.scope,
Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, successPatternVarRef, createTypeNode(type));
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.setBType(symTable.nilType);
return nillTypeNode;
}
BLangValueType createTypeNode(BType type) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.typeKind = type.getKind();
typeNode.setBType(type);
return typeNode;
}
private BLangValueExpression cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangExpression varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.isLValue = originalAccessExpr.isLValue;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.setBType(originalAccessExpr.originalType);
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.booleanType);
literal.pos = symTable.builtinPos;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
case TypeTags.TYPEREFDESC:
return isDefaultableMappingType(Types.getReferredType(type));
default:
return false;
}
}
private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {
BType returnType = symTable.nilType;
if (classDefinition.initFunction != null) {
returnType = classDefinition.initFunction.getBType().getReturnType();
}
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(null, classDefinition.symbol,
env, names, GENERATED_INIT_SUFFIX,
classDefinition.getBType(), returnType);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), null);
classDefinition.generatedInitFunction = initFunction;
initFunction.returnTypeNode.setBType(returnType);
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null,
symTable.builtinPos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(binaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {
switch (expression.getKind()) {
case TYPE_INIT_EXPR:
case RECORD_LITERAL_EXPR:
case OBJECT_CTOR_EXPRESSION:
return true;
case CHECK_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);
case TYPE_CONVERSION_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);
default:
return false;
}
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.getBType();
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.getBType();
Location pos = mappingConstructorExpr.pos;
List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size());
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression origKey = key.expr;
BLangExpression keyExpr;
if (key.computedKey) {
keyExpr = origKey;
} else {
keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,
StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :
((BLangLiteral) origKey);
}
BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =
ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),
rewriteExpr(keyValueField.valueExpr));
rewrittenField.pos = keyValueField.pos;
rewrittenField.key.pos = key.pos;
rewrittenFields.add(rewrittenField);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(
rewriteExpr(createStringLiteral(pos,
StringEscapeUtils.unescapeJava(varRefField.variableName.value))),
rewriteExpr(varRefField)));
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
spreadOpField.expr = rewriteExpr(spreadOpField.expr);
rewrittenFields.add(spreadOpField);
}
}
fields.clear();
BType refType = Types.getReferredType(type);
return refType.tag == TypeTags.RECORD ?
new BLangStructLiteral(pos, type, refType.tsymbol, rewrittenFields) :
new BLangMapLiteral(pos, type, rewrittenFields);
}
protected void addTransactionInternalModuleImport() {
if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));
importDcl.pkgNameComps = pkgNameComps;
importDcl.pos = env.enclPkg.symbol.pos;
importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);
importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx");
importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "");
importDcl.symbol = symTable.internalTransactionModuleSymbol;
env.enclPkg.imports.add(importDcl);
env.enclPkg.symbol.imports.add(importDcl.symbol);
}
}
} | if (!invocation.requiredArgs.isEmpty() && invocation.langLibInvocation) { | private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner, foreach.pos,
VIRTUAL);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangStatementExpression statementExpression = ASTBuilderUtil.createStatementExpression(
resultVariableDefinition, resultReferenceInWhile);
statementExpression.setBType(foreach.nillableResultType);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, statementExpression, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
BLangExpression expr = valueAccessExpr.expr;
valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType);
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.setBType(type);
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
if (whileNode.onFailClause != null) {
BLangOnFailClause onFailClause = whileNode.onFailClause;
whileNode.onFailClause = null;
whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);
result = rewrite(doStmt, env);
} else {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
}
private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,
BLangOnFailClause onFailClause) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);
doBlock.scope = new Scope(env.scope.owner);
bLDo.body = doBlock;
bLDo.pos = location;
bLDo.onFailClause = onFailClause;
bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
doBlock.stmts.add(statement);
return bLDo;
}
@Override
public void visit(BLangLock lockNode) {
BLangOnFailClause currentOnFailClause = this.onFailClause;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
if (lockNode.onFailClause != null) {
blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
rewrite(lockNode.onFailClause, env);
}
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.setBType(symTable.nilType);
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.setBType(nillableError);
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError,
this.env.scope.owner, lockNode.pos, VIRTUAL);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr,
nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
unLockStmt.relatedLock = lockStmt;
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
this.onFailClause = currentOnFailClause;
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,
BLangSimpleVarRef shouldRetryRef) {
BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
trxOnFailClause.pos = pos;
trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);
trxOnFailClause.body.scope = new Scope(env.scope.owner);
trxOnFailClause.isInternal = true;
BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,
"$trxError$", symTable.errorType, null, trxOnFailErrorSym);
trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
trxOnFailError);
trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);
transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,
trxBlockId, shouldRetryRef);
BLangGroupExpr shouldNotPanic = new BLangGroupExpr();
shouldNotPanic.setBType(symTable.booleanType);
shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);
BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);
BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = caughtError;
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);
trxOnFailClause.body.stmts.add(exitIf);
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = caughtError;
failBlock.stmts.add(failStmt);
trxOnFailClause.bodyContainsFail = true;
return trxOnFailClause;
}
@Override
public void visit(BLangTransaction transactionNode) {
if (transactionNode.onFailClause != null) {
BLangOnFailClause onFailClause = transactionNode.onFailClause;
transactionNode.onFailClause = null;
transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);
result = rewrite(doStmt, env);
} else {
BLangLiteral currentTrxBlockId = this.trxBlockId;
String uniqueId = String.valueOf(++transactionBlockCount);
this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);
boolean currShouldReturnErrors = this.shouldReturnErrors;
this.shouldReturnErrors = true;
BLangOnFailClause currOnFailClause = this.onFailClause;
BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);
BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"),
env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);
shouldPanicVarSymbol.closure = true;
BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,
"$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);
BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,
shouldPanicVariable);
BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,
shouldPanicVarSymbol);
BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,
this.shouldRetryRef);
enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);
boolean userDefinedOnFailAvbl = this.onFailClause != null;
analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);
BLangBlockStmt transactionStmtBlock =
transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);
transactionStmtBlock.stmts.add(0, shouldPanicDef);
transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);
transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?
BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
result = rewrite(transactionStmtBlock, this.env);
this.shouldReturnErrors = currShouldReturnErrors;
this.trxBlockId = currentTrxBlockId;
swapAndResetEnclosingOnFail(currOnFailClause);
}
}
@Override
public void visit(BLangRollback rollbackNode) {
BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);
result = rewrite(rollbackStmtExpr, env);
}
private BLangOnFailClause createRetryInternalOnFail(Location pos,
BLangSimpleVarRef retryResultRef,
BLangSimpleVarRef retryManagerRef,
BLangSimpleVarRef shouldRetryRef,
BLangSimpleVarRef continueLoopRef,
BLangSimpleVarRef returnResult) {
BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
internalOnFail.pos = pos;
internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);
internalOnFail.body.scope = new Scope(env.scope.owner);
BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,
"$caughtError$", symTable.errorType, null, caughtErrorSym);
internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
caughtError);
env.scope.define(caughtErrorSym.name, caughtErrorSym);
BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);
BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);
internalOnFail.body.stmts.add(errorAssignment);
BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
internalOnFail.body.stmts.add(continueLoopTrue);
BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,
retryManagerRef, caughtErrorRef);
BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,
shouldRetryInvocation);
internalOnFail.body.stmts.add(shouldRetryAssignment);
BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();
shouldNotRetryCheck.setBType(symTable.booleanType);
shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);
BLangGroupExpr exitCheck = new BLangGroupExpr();
exitCheck.setBType(symTable.booleanType);
exitCheck.expression = shouldNotRetryCheck;
BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);
if (this.onFailClause != null) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = retryResultRef;
exitLogicBlock.stmts.add(failStmt);
internalOnFail.bodyContainsFail = true;
internalOnFail.body.stmts.add(exitIf);
BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();
loopContinueStmt.pos = pos;
internalOnFail.body.stmts.add(loopContinueStmt);
} else {
BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
exitLogicBlock.stmts.add(returnErrorTrue);
internalOnFail.body.stmts.add(exitIf);
}
return internalOnFail;
}
BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {
List<BType> paramTypes = new ArrayList<>();
paramTypes.add(symTable.booleanType);
BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,
null);
BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(
names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,
symTable.builtinPos, VIRTUAL);
return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,
OperatorKind.NOT, notOperatorSymbol);
}
BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null));
return lambdaFunction;
}
protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope bodyScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = bodyScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.getBType().tag == TypeTags.ARRAY
&& ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.setBType(literalExpr.getBType());
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replace("\t", "").replace("\n", "").replace("\r", "")
.replace(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorSpreadOpExpr listConstructorSpreadOpExpr) {
listConstructorSpreadOpExpr.expr = rewriteExpr(listConstructorSpreadOpExpr.expr);
result = listConstructorSpreadOpExpr;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
BType listConstructorType = Types.getReferredType(listConstructor.getBType());
if (listConstructorType.tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));
result = rewriteExpr(expr);
} else if (getElementType(listConstructorType).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructorType.tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
rewriteExprs(tableConstructorExpr.recordLiteralList);
result = tableConstructorExpr;
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
BType arrayLiteralType = Types.getReferredType(arrayLiteral.getBType());
if (arrayLiteralType.tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));
return;
} else if (getElementType(arrayLiteralType).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
return;
}
List<BLangExpression> exprs = tupleLiteral.exprs;
BTupleType tupleType = (BTupleType) tupleLiteral.getBType();
List<BType> tupleMemberTypes = tupleType.tupleTypes;
int tupleMemberTypeSize = tupleMemberTypes.size();
int tupleExprSize = exprs.size();
boolean isInRestType = false;
int i = 0;
for (BLangExpression expr: exprs) {
if (expr.getKind() == NodeKind.LIST_CONSTRUCTOR_SPREAD_OP) {
BType spreadOpType = ((BLangListConstructorSpreadOpExpr) expr).expr.getBType();
spreadOpType = Types.getReferredType(spreadOpType);
if (spreadOpType.tag == TypeTags.ARRAY) {
BArrayType spreadOpBArray = (BArrayType) spreadOpType;
if (spreadOpBArray.size >= 0) {
i += spreadOpBArray.size;
continue;
}
} else {
BTupleType spreadOpTuple = (BTupleType) spreadOpType;
if (types.isFixedLengthTuple(spreadOpTuple)) {
i += spreadOpTuple.tupleTypes.size();
continue;
}
}
isInRestType = true;
continue;
}
BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();
BType targetType = tupleType.restType;
if (!isInRestType && i < tupleMemberTypeSize) {
targetType = tupleMemberTypes.get(i);
}
types.setImplicitCastExpr(expr, expType, targetType);
i++;
}
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
result = rewriteExpr(groupExpr.expression);
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.setBType(symTable.stringType);
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BType type = varRefExpr.getBType();
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
Types.getReferredType(varRefExpr.symbol.type).tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
if (varRefExpr.symbol.tag == SymTag.TYPE_DEF) {
type = ((BTypeDefinitionSymbol) varRefExpr.symbol).referenceType;
}
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
BType referredType = Types.getReferredType(constSymbol.literalType);
if (referredType.tag <= TypeTags.BOOLEAN || referredType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType()));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;
BLangLockStmt lockStmt = enclLocks.peek();
lockStmt.addLockVariable(symbol);
lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));
}
}
genVarRefExpr.setBType(type);
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.isLValue)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.isLValue = varRefExpr.isLValue;
genVarRefExpr.setBType(varRefExpr.symbol.type);
result = genVarRefExpr;
return;
}
genVarRefExpr.isLValue = varRefExpr.isLValue;
BType targetType = genVarRefExpr.getBType();
genVarRefExpr.setBType(genVarRefExpr.symbol.type);
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);
}
private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,
StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value));
BType refType = Types.getReferredType(varRefType);
int varRefTypeTag = refType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
result = rewriteObjectMemberAccessAsField(fieldAccessExpr);
return;
} else {
boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;
if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {
BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) refType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;
if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||
(initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {
isStoreOnCreation = true;
}
}
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false,
isStoreOnCreation);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) refType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);
}
} else if (types.isLax(refType)) {
if (!(refType.tag == TypeTags.XML || refType.tag == TypeTags.XML_ELEMENT)) {
if (refType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) refType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
xmlAccessInvocation.setBType(fieldAccessExpr.getBType());
result = xmlAccessInvocation;
return;
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.isStoreOnCreation);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.isLValue = fieldAccessExpr.isLValue;
targetVarRef.setBType(fieldAccessExpr.getBType());
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
rewriteFieldBasedAccess(fieldAccessExpr);
}
private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {
Location pos = fieldAccessExpr.pos;
BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;
BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();
String funcName = "$anon$method$delegate$" + originalMemberFuncSymbol.name.value + "$" + lambdaFunctionCount++;
BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),
Names.fromString(funcName), env.enclPkg.packageID,
originalMemberFuncSymbol.type, env.scope.owner, pos,
VIRTUAL);
funcSymbol.retType = originalMemberFuncSymbol.retType;
funcSymbol.bodyExist = true;
funcSymbol.params = new ArrayList<>();
funcSymbol.scope = new Scope(funcSymbol);
func.pos = pos;
func.name = createIdentifier(pos, funcName);
func.flagSet.add(Flag.LAMBDA);
func.flagSet.add(Flag.ANONYMOUS);
func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
func.symbol = funcSymbol;
func.setBType(funcSymbol.type);
func.closureVarSymbols = new LinkedHashSet<>();
BLangExpression receiver = fieldAccessExpr.expr;
BLangSimpleVariableDef intermediateObjDef = null;
if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;
receiverSymbol.closure = true;
func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));
} else {
BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(),
receiver, pos);
intermediateObjDef = varDef;
varDef.var.symbol.closure = true;
env.scope.define(varDef.var.symbol.name, varDef.var.symbol);
BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);
func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));
receiver = variableRef;
}
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
for (BVarSymbol param : originalMemberFuncSymbol.params) {
BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,
VIRTUAL);
fParam.pos = pos;
fParam.name = createIdentifier(pos, param.name.value);
fParam.setBType(param.type);
func.requiredParams.add(fParam);
funcSymbol.params.add(fParam.symbol);
funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);
BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);
requiredArgs.add(paramRef);
}
ArrayList<BLangExpression> restArgs = new ArrayList<>();
if (originalMemberFuncSymbol.restParam != null) {
BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
func.restParam = restParam;
BVarSymbol restSym = originalMemberFuncSymbol.restParam;
restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);
restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,
VIRTUAL);
restParam.pos = pos;
restParam.setBType(restSym.type);
funcSymbol.restParam = restParam.symbol;
funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);
BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);
BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression();
restArgExpr.expr = restArg;
restArgExpr.pos = pos;
restArgExpr.setBType(restSym.type);
restArgExpr.expectedType = restArgExpr.getBType();
restArgs.add(restArgExpr);
}
BLangIdentifier field = fieldAccessExpr.field;
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.expr = createObjectMethodInvocation(
receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);
((BLangBlockFunctionBody) func.body).addStatement(retStmt);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = func;
lambdaFunction.capturedClosureEnv = env.createClone();
env.enclPkg.functions.add(func);
env.enclPkg.topLevelNodes.add(func);
lambdaFunction.parent = env.enclInvokable;
lambdaFunction.setBType(func.getBType());
if (intermediateObjDef == null) {
return rewrite(lambdaFunction, env);
} else {
BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));
expr.setBType(lambdaFunction.getBType());
return rewrite(expr, env);
}
}
private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,
BSymbol invocableSymbol,
List<BLangExpression> requiredArgs,
List<BLangExpression> restArgs) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.name = field;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = receiver;
invocationNode.symbol = invocableSymbol;
invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = restArgs;
return invocationNode;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);
Location pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.setBType(fieldAccessType);
statementExpression.setBType(fieldAccessType);
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);
mapAccessExpr.setBType(xmlOrNil);
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
namedArgs.add(message);
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
BLangSimpleVariableDef errorDef =
createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangIndexBasedAccess targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType effectiveType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());
BType varRefType = Types.getReferredType(effectiveType);
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr,
(BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.TABLE) {
targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.isLValue = indexAccessExpr.isLValue;
targetVarRef.setBType(indexAccessExpr.getBType());
result = targetVarRef;
}
@Override
public void visit(BLangInvocation iExpr) {
rewriteInvocation(iExpr, false);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.size() == 1) {
errorConstructorExpr.positionalArgs.add(createNilLiteral());
}
errorConstructorExpr.positionalArgs.set(1,
addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));
rewriteExprs(errorConstructorExpr.positionalArgs);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
if (errorConstructorExpr.namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());
} else {
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (Types.getReferredType(recordLiteral.getBType()).tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());
}
recordLiteral.fields.add(member);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),
((BErrorType) Types.getReferredType(errorConstructorExpr.getBType())).detailType);
}
errorConstructorExpr.errorDetail = errorDetail;
result = errorConstructorExpr;
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {
if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);
}
if (actionInvocation.async && Symbols.isFlagOn(actionInvocation.symbol.type.flags, Flags.ISOLATED)) {
addStrandAnnotationWithThreadAny(actionInvocation);
}
rewriteInvocation(actionInvocation, actionInvocation.async);
}
private void addStrandAnnotationWithThreadAny(BLangInvocation.BLangActionInvocation actionInvocation) {
if (this.strandAnnotAttachement == null) {
BLangPackage pkgNode = env.enclPkg;
List<BLangTypeDefinition> prevTypeDefinitions = new ArrayList<>(pkgNode.typeDefinitions);
this.strandAnnotAttachement =
annotationDesugar.createStrandAnnotationWithThreadAny(actionInvocation.pos, env);
addInitFunctionForRecordTypeNodeInTypeDef(pkgNode, prevTypeDefinitions);
}
actionInvocation.addAnnotationAttachment(this.strandAnnotAttachement);
((BInvokableSymbol) actionInvocation.symbol)
.addAnnotation(this.strandAnnotAttachement.annotationAttachmentSymbol);
}
private void rewriteInvocation(BLangInvocation invocation, boolean async) {
BLangInvocation invRef = invocation;
if (!enclLocks.isEmpty()) {
BLangLockStmt lock = enclLocks.peek();
lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);
}
reorderArguments(invocation);
rewriteExprs(invocation.requiredArgs);
if (invocation.langLibInvocation && !invocation.requiredArgs.isEmpty()) {
invocation.expr = invocation.requiredArgs.get(0);
} else {
invocation.expr = rewriteExpr(invocation.expr);
}
fixStreamTypeCastsInInvocationParams(invocation);
fixNonRestArgTypeCastInTypeParamInvocation(invocation);
rewriteExprs(invocation.restArgs);
annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,
invocation.symbol.pkgID, invocation.symbol.owner, env);
if (invocation.functionPointerInvocation) {
visitFunctionPointerInvocation(invocation);
return;
}
result = invRef;
BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;
if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {
BType retType = unifier.build(invSym.retType);
invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);
}
if (invocation.expr == null) {
fixTypeCastInTypeParamInvocation(invocation, invRef);
if (invocation.exprSymbol == null) {
return;
}
invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);
invocation.expr = rewriteExpr(invocation.expr);
}
switch (Types.getReferredType(invocation.expr.getBType()).tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!invocation.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs);
argExprs.add(0, invocation.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,
invocation.symbol, invocation.getBType(),
invocation.expr, async);
attachedFunctionInvocation.name = invocation.name;
attachedFunctionInvocation.annAttachments = invocation.annAttachments;
result = invRef = attachedFunctionInvocation;
}
break;
}
if (invocation.objectInitMethod && Symbols.isFlagOn(invocation.expr.getBType().flags, Flags.OBJECT_CTOR)) {
BObjectType initializingObject = (BObjectType) invocation.expr.getBType();
BLangClassDefinition classDef = initializingObject.classDef;
if (classDef.hasClosureVars) {
OCEDynamicEnvironmentData oceEnvData = initializingObject.classDef.oceEnvData;
if (oceEnvData.attachedFunctionInvocation == null) {
oceEnvData.attachedFunctionInvocation = (BLangAttachedFunctionInvocation) result;
}
}
}
fixTypeCastInTypeParamInvocation(invocation, invRef);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 0; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
/* This function is a workaround and need improvement
* Notes for improvement :
* 1. Both arguments are same.
* 2. Due to current type param logic we put type param flag on the original type.
* 3. Error type having Cloneable type with type param flag, change expression type by this code.
* 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest
* added never to CloneableType type param
* @typeParam type
* CloneableType Cloneable|never;
*
*/
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;
if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {
return;
}
BType originalInvType = genIExpr.getBType();
if (!genIExpr.async) {
genIExpr.setBType(returnTypeOfInvokable);
}
this.result = addConversionExprIfRequired(genIExpr, originalInvType);
}
private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
if (!params.isEmpty()) {
for (int i = 0; i < requiredArgs.size(); i++) {
BVarSymbol param = params.get(i);
if (Types.getReferredType(param.type).tag == TypeTags.STREAM) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type));
}
}
}
}
private BLangLiteral createNilLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = null;
literal.setBType(symTable.nilType);
return literal;
}
public void visit(BLangTypeInit typeInitExpr) {
if (Types.getReferredType(typeInitExpr.getBType()).tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.getBType());
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
objVarDef.var.name.pos = symTable.builtinPos;
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
BLangInvocation typeInitInvocation = typeInitExpr.initInvocation;
typeInitInvocation.exprSymbol = objVarDef.var.symbol;
typeInitInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
typeInitInvocation.objectInitMethod = true;
if (Types.getReferredType(typeInitInvocation.getBType()).tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitInvocation;
typeInitInvocation.name.value = GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.setBType(objVarRef.symbol.type);
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitInvocation.getBType(),
typeInitInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultVarRef.symbol.type);
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint;
BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);
BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();
constraintTdExpr.resolvedType = constraintType;
constraintTdExpr.setBType(constraintTdType);
BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType;
BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);
BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();
completionTdExpr.resolvedType = completionType;
completionTdExpr.setBType(completionTdType);
List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));
if (!typeInitExpr.argsExpr.isEmpty()) {
args.add(typeInitExpr.argsExpr.get(0));
}
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, args, symResolver);
streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr,
Location location) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner, location, VIRTUAL);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);
objVarDef.var = objVar;
objVarDef.setBType(objVar.getBType());
return objVarDef;
}
private BType getObjectType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorType);
errorTypeNode.pos = symTable.builtinPos;
return errorTypeNode;
}
BLangErrorType getErrorOrNillTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorOrNilType);
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef =
createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(ternaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());
expr.pos = waitExpr.pos;
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (Types.getReferredType(trapExpr.expr.getBType()).tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (isNullableBinaryExpr(binaryExpr)) {
BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);
result = rewrite(stmtExpr, env);
return;
}
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
BLangExpression lhsExpr = binaryExpr.lhsExpr;
BLangExpression rhsExpr = binaryExpr.rhsExpr;
lhsExpr = createTypeCastExpr(lhsExpr, symTable.intType);
rhsExpr = createTypeCastExpr(rhsExpr, symTable.intType);
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
rhsExpr = getModifiedIntRangeEndExpr(rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, lhsExpr, rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);
if (lhsExprTypeTag == rhsExprTypeTag) {
if (!isBinaryShiftOperator && !isArithmeticOperator) {
return;
}
if (types.isValueType(binaryExpr.lhsExpr.getBType())) {
return;
}
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&
(rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&
(lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (isArithmeticOperator) {
createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (isBinaryShiftOperator) {
createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {
createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
}
}
private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
/*
* int? x = 3;
* int? y = 5;
* int? z = x + y;
* Above is desugared to
* int? $result$;
*
* int? $lhsExprVar$ = x;
* int? $rhsExprVar$ = y;
* if (lhsVar is () or rhsVar is ()) {
* $result$ = ();
* } else {
* $result$ = $lhsExprVar$ + $rhsExprVar$;
* }
* int z = $result$;
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BUnionType exprBType = (BUnionType) binaryExpr.getBType();
BType nonNilType = exprBType.getMemberTypes().iterator().next();
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind);
boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind);
boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator;
BType rhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.rhsExpr.getBType().isNullable()) {
rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false);
} else {
rhsType = binaryExpr.rhsExpr.getBType();
}
}
BType lhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.lhsExpr.getBType().isNullable()) {
lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false);
} else {
lhsType = binaryExpr.lhsExpr.getBType();
}
}
if (binaryExpr.lhsExpr.getBType().isNullable()) {
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
}
BLangSimpleVariableDef tempVarDef = createVarDef("result",
binaryExpr.getBType(), null, binaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangSimpleVariableDef lhsVarDef = createVarDef("$lhsExprVar$", binaryExpr.lhsExpr.getBType(),
binaryExpr.lhsExpr, binaryExpr.pos);
BLangSimpleVarRef lhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, lhsVarDef.var.symbol);
blockStmt.addStatement(lhsVarDef);
BLangSimpleVariableDef rhsVarDef = createVarDef("$rhsExprVar$", binaryExpr.rhsExpr.getBType(),
binaryExpr.rhsExpr, binaryExpr.pos);
BLangSimpleVarRef rhsVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, rhsVarDef.var.symbol);
blockStmt.addStatement(rhsVarDef);
BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, lhsVarRef, getNillTypeNode());
typeTestExprOne.setBType(symTable.booleanType);
BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos, rhsVarRef, getNillTypeNode());
typeTestExprTwo.setBType(symTable.booleanType);
BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,
typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, lhsVarRef, rhsVarRef,
nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);
newBinaryExpr.lhsExpr = createTypeCastExpr(lhsVarRef, lhsType);
newBinaryExpr.rhsExpr = createTypeCastExpr(rhsVarRef, rhsType);
bLangAssignmentElse.expr = newBinaryExpr;
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);
ifStatement.expr = ifBlockCondition;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(binaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&
(binaryExpr.rhsExpr.getBType().isNullable() ||
binaryExpr.lhsExpr.getBType().isNullable())) {
switch (binaryExpr.getOperatorKind()) {
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
return true;
}
}
return false;
}
private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||
(TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||
(TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {
return;
}
if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);
return;
}
if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());
}
private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
}
private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&
rhsExprTypeTag == TypeTags.BYTE)) {
return;
}
if (isLhsIntegerType && !isRhsIntegerType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (!isLhsIntegerType && isRhsIntegerType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);
boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);
if (isLhsStringType && isRhsStringType) {
return;
}
if (isLhsStringType && !isRhsStringType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.stringType);
return;
}
if (!isLhsStringType && isRhsStringType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.stringType);
}
}
private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,
BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.setBType(symTable.intRangeType);
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = Types.getReferredType(binaryExpr.rhsExpr.getBType()).tag;
int lhsExprTypeTag = Types.getReferredType(binaryExpr.lhsExpr.getBType()).tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
Location pos = elvisExpr.pos;
String resultVarName = "_$result$_";
BType resultType = elvisExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String lhsResultVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable lhsResultVar =
ASTBuilderUtil.createVariable(pos, lhsResultVarName, elvisExpr.lhsExpr.getBType(), elvisExpr.lhsExpr,
new BVarSymbol(0, names.fromString(lhsResultVarName),
this.env.scope.owner.pkgID, elvisExpr.lhsExpr.getBType(),
this.env.scope.owner, elvisExpr.pos, VIRTUAL));
BLangSimpleVariableDef lhsResultVarDef = ASTBuilderUtil.createVariableDef(pos, lhsResultVar);
BLangSimpleVarRef lhsResultVarRef = ASTBuilderUtil.createVariableRef(pos, lhsResultVar.symbol);
BLangAssignment nilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef, elvisExpr.rhsExpr);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(nilAssignment);
BLangAssignment notNilAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(lhsResultVarRef, resultType));
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(pos);
elseBody.addStatement(notNilAssignment);
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, lhsResultVarRef, getNillTypeNode()), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(lhsResultVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (isNullableUnaryExpr(unaryExpr)) {
BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);
result = rewrite(statementExpression, env);
return;
}
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
OperatorKind opKind = unaryExpr.operator;
if (opKind == OperatorKind.ADD || opKind == OperatorKind.SUB) {
createTypeCastExprForUnaryPlusAndMinus(unaryExpr);
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
private void createTypeCastExprForUnaryPlusAndMinus(BLangUnaryExpr unaryExpr) {
BLangExpression expr = unaryExpr.expr;
if (TypeTags.isIntegerTypeTag(expr.getBType().tag)) {
return;
}
unaryExpr.expr = createTypeCastExpr(expr, unaryExpr.getBType());
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final Location pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == Types.getReferredType(unaryExpr.getBType()).tag) {
binaryExpr.setBType(symTable.byteType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.setBType(symTable.intType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {
/*
* int? x = 3;
* int? y = +x;
*
*
* Above is desugared to
* int? $result$;
* if (x is ()) {
* $result$ = ();
* } else {
* $result$ = +x;
* }
* int y = $result$
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BUnionType exprBType = (BUnionType) unaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
BLangSimpleVariableDef tempVarDef = createVarDef("$result",
unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr,
getNillTypeNode());
typeTestExpr.setBType(symTable.booleanType);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);
bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,
nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);
ifStatement.expr = typeTestExpr;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(unaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {
switch (unaryExpr.operator) {
case ADD:
case SUB:
case BITWISE_COMPLEMENT:
return true;
}
}
return false;
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
BType targetType = conversionExpr.targetType;
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.setBType(bLangArrowFunction.body.expr.getBType());
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.setBType(bLangArrowFunction.funcType);
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value),
new Name(funcNode.name.originalValue),
env.enclPkg.symbol.pkgID,
bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true,
bLangArrowFunction.pos, VIRTUAL);
funcSymbol.originalName = new Name(funcNode.name.originalValue);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.getBType();
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.setBType(
new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null));
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
List<BLangXMLNS> prevInlineNamespaces = this.inlineXMLNamespaces;
if (isVisitingQuery && this.inlineXMLNamespaces != null) {
xmlElementLiteral.inlineNamespaces.addAll(this.inlineXMLNamespaces);
}
this.inlineXMLNamespaces = xmlElementLiteral.inlineNamespaces;
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
this.inlineXMLNamespaces = prevInlineNamespaces;
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {
for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {
rewriteExpr(xmlItem);
}
result = xmlSequenceLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
/**
* The raw template literal gets desugared to a type init expression. For each literal, a new object class type
* def is generated from the object type. The type init expression creates an instance of this generated object
* type. For example, consider the following statements:
* string name = "Pubudu";
* 'object:RawTemplate rt = `Hello ${name}!`;
*
* The raw template literal above is desugared to:
* type RawTemplate$Impl$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
*
*
* 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);
*
* @param rawTemplateLiteral The raw template literal to be desugared.
*/
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
Location pos = rawTemplateLiteral.pos;
BObjectType objType = (BObjectType) Types.getReferredType(rawTemplateLiteral.getBType());
BLangClassDefinition objClassDef =
desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);
BObjectType classObjType = (BObjectType) objClassDef.getBType();
BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol;
BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);
insertionsList.exprs.addAll(rawTemplateLiteral.insertions);
insertionsList.expectedType = insertionsSym.type;
BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);
typeNewExpr.argsExpr.add(insertionsList);
typeNewExpr.initInvocation.argExprs.add(insertionsList);
typeNewExpr.initInvocation.requiredArgs.add(insertionsList);
result = rewriteExpr(typeNewExpr);
}
/**
* This method desugars a raw template literal object class for the provided raw template object type as follows.
* A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;
* is desugared to,
* type $anonType$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
* @param strings The string portions of the literal
* @param objectType The abstract object type for which an object class needs to be generated
* @param pos The diagnostic position info for the type node
* @return Returns the generated concrete object class def
*/
private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType,
Location pos) {
BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;
Name objectClassName = names.fromString(
anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));
BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,
env.enclPkg.packageID, null, env.enclPkg.symbol,
pos, VIRTUAL, false);
classTSymbol.flags |= Flags.CLASS;
BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);
objectClassType.fields = objectType.fields;
classTSymbol.type = objectClassType;
objectClassType.typeIdSet.add(objectType.typeIdSet);
BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);
classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);
BType stringsType = objectClassType.fields.get("strings").symbol.type;
BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);
stringsList.exprs.addAll(strings);
stringsList.expectedType = stringsType;
classDef.fields.get(0).expr = stringsList;
BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);
classDef.initFunction = userDefinedInitFunction;
env.enclPkg.functions.add(userDefinedInitFunction);
env.enclPkg.topLevelNodes.add(userDefinedInitFunction);
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
SemanticAnalyzer.AnalyzerData data = new SemanticAnalyzer.AnalyzerData(env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, data);
classDef.generatedInitFunction = tempGeneratedInitFunction;
env.enclPkg.functions.add(classDef.generatedInitFunction);
env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);
return rewrite(classDef, env);
}
/**
* Creates a user-defined init() method for the provided object type node. If there are fields without default
* values specified in the type node, this will add parameters for those fields in the init() method and assign the
* param values to the respective fields in the method body.
*
* @param classDefn The object type node for which the init() method is generated
* @param env The symbol env for the object type node
* @return The generated init() method
*/
private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env,
names, Names.USER_DEFINED_INIT_SUFFIX,
symTable, classDefn.getBType());
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), classDefn.pos);
classDefn.initFunction = initFunction;
initFunction.returnTypeNode.setBType(symTable.nilType);
BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;
BInvokableType initFnType = (BInvokableType) initFunction.getBType();
for (BLangSimpleVariable field : classDefn.fields) {
if (field.expr != null) {
continue;
}
BVarSymbol fieldSym = field.symbol;
BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,
initFunction.symbol, classDefn.pos, VIRTUAL);
BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,
fieldSym.type, null, paramSym);
param.flagSet.add(Flag.FINAL);
initFunction.symbol.scope.define(paramSym.name, paramSym);
initFunction.symbol.params.add(paramSym);
initFnType.paramTypes.add(param.getBType());
initFunction.requiredParams.add(param);
BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);
BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),
initFunction.receiver.symbol, field.name);
initFuncBody.addStatement(fieldInit);
}
return initFunction;
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType());
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
BInvokableSymbol isTransactionalSymbol =
(BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);
result = ASTBuilderUtil
.createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),
Collections.emptyList(), symResolver);
}
@Override
public void visit(BLangCommitExpr commitExpr) {
BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangFail failNode) {
if (this.onFailClause != null && !this.isVisitingQuery) {
if (this.onFailClause.bodyContainsFail) {
result = rewriteNestedOnFail(this.onFailClause, failNode);
} else {
result = createOnFailInvocation(onFailClause, failNode);
}
} else {
BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));
stmt.desugared = true;
result = stmt;
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTableAccessExpr tableKeyAccessExpr) {
result = tableKeyAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType());
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
if (checkedExpr.isRedundantChecking) {
result = rewriteExpr(checkedExpr.expr);
return;
}
Location pos = checkedExpr.pos;
String resultVarName = "_$result$_";
BType resultType = checkedExpr.getBType();
BLangSimpleVariable resultVar =
ASTBuilderUtil.createVariable(pos, resultVarName, resultType, null,
new BVarSymbol(0, names.fromString(resultVarName),
this.env.scope.owner.pkgID, resultType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef resultVarDef = ASTBuilderUtil.createVariableDef(pos, resultVar);
resultVarDef.desugared = true;
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(pos, resultVar.symbol);
String checkedExprVarName = GEN_VAR_PREFIX.value;
BType checkedExprType = checkedExpr.expr.getBType();
BLangSimpleVariable checkedExprVar =
ASTBuilderUtil.createVariable(pos, checkedExprVarName, checkedExprType,
checkedExpr.expr, new BVarSymbol(0, names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExprType,
this.env.scope.owner, pos, VIRTUAL));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(pos, checkedExprVar);
BLangSimpleVarRef checkedExprVarRef = ASTBuilderUtil.createVariableRef(pos, checkedExprVar.symbol);
BLangAssignment successAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultVarRef,
createTypeCastExpr(checkedExprVarRef, resultType));
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(pos);
ifBody.addStatement(successAssignment);
BLangBlockStmt elseBody = getSafeErrorAssignment(pos, checkedExprVarRef, this.env.enclInvokable.symbol,
checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangValueType checkedExprTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
checkedExprTypeNode.setBType(resultType);
checkedExprTypeNode.typeKind = resultType.getKind();
BLangIf ifStmt = ASTBuilderUtil.createIfElseStmt(pos,
createTypeCheckExpr(pos, checkedExprVarRef, checkedExprTypeNode), ifBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(pos, new ArrayList<>() {{
add(resultVarDef);
add(checkedExprVarDef);
add(ifStmt);
}});
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultType);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceClass.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangObjectConstructorExpression bLangObjectConstructorExpression) {
visit(bLangObjectConstructorExpression.classNode);
bLangObjectConstructorExpression.classNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(bLangObjectConstructorExpression.typeInit);
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.setBType(annotAccessExpr.getBType());
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),
binaryExpr.rhsExpr.getBType()),
annotAccessExpr.getBType(), null), null,
symTable.builtinPos, VIRTUAL);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.getBType())) {
expr = addConversionExprIfRequired(expr, symTable.anyType);
}
if (typeTestExpr.isNegation) {
BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,
typeTestExpr.expr, typeTestExpr.typeNode);
BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
bLangGroupExpr.expression = bLangTypeTestExpr;
bLangGroupExpr.setBType(typeTestExpr.getBType());
BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,
typeTestExpr.getBType(),
OperatorKind.NOT, null);
result = rewriteExpr(unaryExpr);
return;
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
List<BLangStatement> getVisibleXMLNSStmts(SymbolEnv env) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
return nameBXMLNSSymbolMap.keySet().stream()
.map(key -> this.stmtsToBePropagatedToQuery.get(key))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
@Override
public void visit(BLangQueryAction queryAction) {
boolean prevIsVisitingQuery = this.isVisitingQuery;
this.isVisitingQuery = true;
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env, getVisibleXMLNSStmts(env));
result = rewrite(stmtExpr, env);
this.isVisitingQuery = prevIsVisitingQuery;
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
BType refType = Types.getReferredType(constSymbol.literalType);
if (refType.tag <= TypeTags.BOOLEAN || refType.tag == TypeTags.NIL) {
if (refType.tag != TypeTags.NIL && (constSymbol.value == null ||
constSymbol.value.value == null)) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangDynamicArgExpr dynamicParamExpr) {
dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);
dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);
result = dynamicParamExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.setBType(iteratorInvokableSymbol.retType);
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol =
getNextFunc((BObjectType) Types.getReferredType(iteratorSymbol.type)).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.setBType(nextFuncSymbol.retType);
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,
BVarSymbol resultSymbol) {
return getFieldAccessExpression(location, "value", varType, resultSymbol);
}
BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,
BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.setBType(varType);
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
protected BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.setBType(retType);
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,
names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.setBType(new BArrayType(symTable.anyType));
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangValueExpression expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.setBType(iExpr.symbol.type);
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(),
expr.getBType(),
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(Location pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.intType);
return literal;
}
private BLangLiteral createByteLiteral(Location pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
if (types.isSameType(expr.getBType(), targetType)) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.setBType(targetType);
conversionExpr.targetType = targetType;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getElementType(BType bType) {
BType type = Types.getReferredType(bType);
if (type.tag != TypeTags.ARRAY) {
return bType;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.symbol.type.getReturnType().isNullable() && (funcBody.stmts.size() < 1
|| funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
Location invPos = invokableNode.pos;
Location returnStmtPos;
if (invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {
returnStmtPos = null;
} else {
returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(),
invPos.lineRange().endLine().line(),
invPos.lineRange().endLine().line(),
invPos.lineRange().startLine().offset(),
invPos.lineRange().startLine().offset(), 0, 0);
}
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || Types.getReferredType(symbol.type).tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangSimpleVarRef varargRef = null;
BLangBlockStmt blockStmt = null;
BType varargVarType = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
Location varargExpPos = expr.pos;
varargVarType = expr.getBType();
String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.setBType(varargVarType);
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.setBType(arrayType);
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.setBType(firstNonRestArg.getBType());
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
restArgs.remove(0);
return;
}
BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
if (Types.getReferredType(restArgsExpression.getBType()).tag == TypeTags.RECORD) {
BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);
restArgs.add(expr);
return;
}
Location pos = restArgsExpression.pos;
BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();
newArrayLiteral.setBType(restParamType);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i",
foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.getBType(),
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);
BType refType = Types.getReferredType(varargVarType);
if (refType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) refType;
if (arrayType.state == BArrayState.CLOSED &&
arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {
valueExpr.setBType(restParamType.eType);
} else {
valueExpr.setBType(arrayType.eType);
}
} else {
valueExpr.setBType(symTable.anyOrErrorType);
}
BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
List.of(pushExpr),
restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
foreach.body = foreachBody;
BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);
newArrayBlockStmt.addStatement(varDef);
newArrayBlockStmt.addStatement(foreach);
BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);
newArrayStmtExpression.setBType(restParamType);
restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.setBType(restParamType);
BType elemType = restParamType.eType;
Location pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.setBType(restParamType);
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new LinkedHashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>();
BLangRecordLiteral incRecordParamAllowAdditionalFields = null;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = Types.getReferredType(varargRef.getBType());
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.remove(param.name.value));
} else if (param.getFlags().contains(Flag.INCLUDED)) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
BType paramType = param.type;
recordLiteral.setBType(paramType);
args.add(recordLiteral);
incRecordLiterals.add(recordLiteral);
if (((BRecordType) Types.getReferredType(paramType)).restFieldType != symTable.noType) {
incRecordParamAllowAdditionalFields = recordLiteral;
}
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.setBType(param.type);
args.add(expr);
} else {
if (Types.getReferredType(varargRef.getBType()).tag == TypeTags.RECORD) {
if (param.isDefaultable) {
BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,
List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);
BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));
BLangIndexBasedAccess memberAccessExpr =
ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);
BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);
BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type,
hasKeyInvocation, memberAccessExpr, ignoreExpr);
args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr));
} else {
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(varargRef,
ASTBuilderUtil.createIdentifier(param.pos, param.name.value));
fieldBasedAccessExpression.setBType(param.type);
args.add(fieldBasedAccessExpression);
}
} else {
BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));
BType memberAccessExprType = tupleTypedVararg ?
((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType;
args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType,
varargRef, indexExpr), param.type));
varargIndex++;
}
}
}
if (namedArgs.size() > 0) {
setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);
}
iExpr.requiredArgs = args;
}
private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs,
List<BLangRecordLiteral> incRecordLiterals,
BLangRecordLiteral incRecordParamAllowAdditionalFields) {
for (String name : namedArgs.keySet()) {
boolean isAdditionalField = true;
BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);
for (BLangRecordLiteral recordLiteral : incRecordLiterals) {
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(recordLiteral.getBType())).fields;
if (fields.containsKey(name) &&
Types.getReferredType(fields.get(name).type).tag != TypeTags.NEVER) {
isAdditionalField = false;
createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);
break;
}
}
if (isAdditionalField) {
createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);
}
}
}
private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,
BLangNamedArgsExpression expr) {
BLangSimpleVarRef varRef = new BLangSimpleVarRef();
varRef.variableName = expr.name;
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.
createBLangRecordKeyValue(varRef, expr.expr);
recordLiteral.fields.add(recordKeyValueField);
}
private BLangBlockStmt getSafeErrorAssignment(Location location, BLangSimpleVarRef ref,
BSymbol invokableSymbol,
List<BType> equivalentErrorTypes,
boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = Types.getReferredType(((BInvokableType) invokableSymbol.type).retType);
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable errorVar =
ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType,
createTypeCastExpr(ref, symTable.errorType),
new BVarSymbol(0, names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, location, VIRTUAL));
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(location);
BLangSimpleVariableDef errorVarDef = ASTBuilderUtil.createVariableDef(location, errorVar);
blockStmt.addStatement(errorVarDef);
BLangVariableReference errorVarRef = ASTBuilderUtil.createVariableRef(location, errorVar.symbol);
if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = location;
failStmt.expr = errorVarRef;
blockStmt.addStatement(failStmt);
if (returnOnError && this.shouldReturnErrors) {
BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location, rewrite(errorVarRef, env));
errorReturn.desugared = true;
failStmt.exprStmt = errorReturn;
}
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = location;
panicNode.expr = errorVarRef;
blockStmt.addStatement(panicNode);
}
return blockStmt;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.getBType();
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
BLangExpression impConversionExpr = expr.impConversionExpr;
expr.impConversionExpr = null;
return impConversionExpr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.setBType(lhsType);
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,
VIRTUAL);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,
recordSymbol, bindingPatternVariable.pos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BRecordType) recordVariable.restParam.getBType()).restFieldType :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++),
env.enclPkg.symbol.pkgID,
null, null, errorVariable.pos, VIRTUAL);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,
errorVariable.pos);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
recordTypeNode.initFunction = TypeDefBuilderHelper
.createInitFunctionForRecordType(recordTypeNode, env, names, symTable);
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(detailType, detailType.tsymbol,
recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.createTypeDefinitionForTSymbol(errorType, errorTypeSymbol,
createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.getBType();
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID, symTable.pureType, null,
field.valueBindingPattern.pos, VIRTUAL);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.getBType(),
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo, Location pos) {
BRecordType detailRecordType = createAnonRecordType(pos);
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,
detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);
detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordType.tsymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BRecordType createAnonRecordType(Location pos) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),
env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(errorType);
return errorTypeNode;
}
private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.setBType(symTable.anyType);
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),
expression.getBType());
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),
binaryExpr, env);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.setBType(variable.getBType());
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol selfSymbol) {
return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,
variable.name);
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,
BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,
BLangIdentifier fieldName) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);
fieldAccess.symbol = fieldSymbol;
fieldAccess.setBType(fieldType);
fieldAccess.isStoreOnCreation = true;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = expr;
assignmentStmt.pos = function.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.isLValue || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.getBType();
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.getBType(),
this.env.scope.owner, accessExpr.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar);
BLangMatchStatement matchStmt = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matchStmt));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.setBType(originalExprType);
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successClause = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = Types.getReferredType(accessExpr.originalType);
if (TypeTags.isXMLTypeTag(originalType.tag) || isMapJson(originalType)) {
accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));
} else {
accessExpr.setBType(originalType);
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType());
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangExpression matchExpr = accessExpr.expr;
BType matchExprType = accessExpr.expr.getBType();
Location pos = accessExpr.pos;
BLangMatchStatement matchStmt = ASTBuilderUtil.createMatchStatement(matchExpr, pos);
boolean isAllTypesRecords = false;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>();
BType referredType = Types.getReferredType(matchExpr.getBType());
if (referredType.tag == TypeTags.UNION) {
memTypes = new LinkedHashSet<>(((BUnionType) referredType).getMemberTypes());
isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);
}
if (accessExpr.nilSafeNavigation) {
matchStmt.addMatchClause(getMatchNullClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
memTypes.remove(symTable.nilType);
}
if (accessExpr.errorSafeNavigation) {
matchStmt.addMatchClause(getMatchErrorClause(matchExpr, tempResultVar));
matchStmt.setBType(type);
matchStmt.pos = pos;
memTypes.remove(symTable.errorType);
}
BLangMatchClause successClause = null;
Name field = getFieldName(accessExpr);
if (field == Names.EMPTY) {
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
if (isAllTypesRecords) {
for (BType memberType : memTypes) {
BRecordType recordType = (BRecordType) Types.getReferredType(memberType);
if (recordType.fields.containsKey(field.value) || !recordType.sealed) {
successClause = getSuccessPatternClause(memberType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
}
}
matchStmt.addMatchClause(getMatchAllAndNilReturnClause(matchExpr, tempResultVar));
pushToMatchStatementStack(matchStmt, successClause, pos);
return;
}
successClause = getSuccessPatternClause(matchExprType, matchExpr, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.addMatchClause(successClause);
pushToMatchStatementStack(matchStmt, successClause, pos);
}
private boolean isMapJson(BType originalType) {
return originalType.tag == TypeTags.MAP && ((BMapType) originalType).getConstraint().tag == TypeTags.JSON;
}
private void pushToMatchStatementStack(BLangMatchStatement matchStmt, BLangMatchClause successClause,
Location pos) {
this.matchStmtStack.push(matchStmt);
if (this.successClause != null) {
this.successClause.blockStmt = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(matchStmt));
}
this.successClause = successClause;
}
private Name getFieldName(BLangAccessExpression accessExpr) {
Name field = Names.EMPTY;
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);
} else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;
if (indexBasedExpression.getKind() == NodeKind.LITERAL) {
field = new Name(((BLangLiteral) indexBasedExpression).value.toString());
}
}
return field;
}
private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) {
for (BType memType : memTypes) {
int typeTag = Types.getReferredType(memType).tag;
if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {
return false;
}
}
return true;
}
private BLangMatchClause getMatchErrorClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
Location pos = matchExpr.pos;
BVarSymbol errorPatternVarSymbol = new BVarSymbol(0, Names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(errorPatternVarSymbol, errorPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, errorPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getErrorTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchNullClause(BLangExpression matchExpr, BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
Location pos = matchExpr.pos;
BVarSymbol nullPatternVarSymbol = new BVarSymbol(0, Names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(nullPatternVarSymbol, nullPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(pos, nullPatternVarSymbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, assignmentRhsExpr, getNillTypeNode());
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
private BLangMatchClause getMatchAllAndNilReturnClause(BLangExpression matchExpr,
BLangSimpleVariable tempResultVar) {
Location pos = matchExpr.pos;
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pos, tempResultVarRef, createLiteral(pos, symTable.nilType,
Names.NIL_VALUE));
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(pos, this.env.scope, Lists.of(assignmentStmt));
BLangWildCardMatchPattern wildCardMatchPattern = ASTBuilderUtil.createWildCardMatchPattern(matchExpr);
wildCardMatchPattern.setBType(symTable.anyType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, null, wildCardMatchPattern);
}
private BLangMatchClause getSuccessPatternClause(BType type, BLangExpression matchExpr,
BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar, boolean liftError) {
type = types.getSafeType(type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
Location pos = accessExpr.pos;
BVarSymbol successPatternSymbol;
if (Types.getReferredType(type).tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
} else {
successPatternSymbol = new BVarSymbol(0, Names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, symTable.anyOrErrorType, this.env.scope.owner, pos, VIRTUAL);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
BLangSimpleVarRef successPatternVarRef = ASTBuilderUtil.createVariableRef(accessExpr.pos,
successPatternVar.symbol);
BLangCaptureBindingPattern captureBindingPattern =
ASTBuilderUtil.createCaptureBindingPattern(successPatternSymbol, successPatternVarName);
BLangVarBindingPatternMatchPattern varBindingPatternMatchPattern =
ASTBuilderUtil.createVarBindingPatternMatchPattern(captureBindingPattern, matchExpr);
BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;
}
if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;
}
tempAccessExpr.expr = addConversionExprIfRequired(successPatternVarRef, type);
tempAccessExpr.errorSafeNavigation = false;
tempAccessExpr.nilSafeNavigation = false;
accessExpr.cloneRef = null;
if (TypeTags.isXMLTypeTag(Types.getReferredType(tempAccessExpr.expr.getBType()).tag)) {
tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,
symTable.nilType));
} else {
tempAccessExpr.setBType(accessExpr.originalType);
}
tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr);
BLangBlockStmt clauseBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, this.env.scope,
Lists.of(assignmentStmt));
BLangExpression matchGuard = ASTBuilderUtil.createTypeTestExpr(pos, successPatternVarRef, createTypeNode(type));
matchGuard.setBType(symTable.booleanType);
return ASTBuilderUtil.createMatchClause(matchExpr, clauseBody, matchGuard, varBindingPatternMatchPattern);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.setBType(symTable.nilType);
return nillTypeNode;
}
BLangValueType createTypeNode(BType type) {
BLangValueType typeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
typeNode.typeKind = type.getKind();
typeNode.setBType(type);
return typeNode;
}
private BLangValueExpression cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangExpression varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.isLValue = originalAccessExpr.isLValue;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.setBType(originalAccessExpr.originalType);
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.booleanType);
literal.pos = symTable.builtinPos;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
case TypeTags.TYPEREFDESC:
return isDefaultableMappingType(Types.getReferredType(type));
default:
return false;
}
}
private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {
BType returnType = symTable.nilType;
if (classDefinition.initFunction != null) {
returnType = classDefinition.initFunction.getBType().getReturnType();
}
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(null, classDefinition.symbol,
env, names, GENERATED_INIT_SUFFIX,
classDefinition.getBType(), returnType);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), null);
classDefinition.generatedInitFunction = initFunction;
initFunction.returnTypeNode.setBType(returnType);
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null,
symTable.builtinPos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(binaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {
switch (expression.getKind()) {
case TYPE_INIT_EXPR:
case RECORD_LITERAL_EXPR:
case OBJECT_CTOR_EXPRESSION:
return true;
case CHECK_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);
case TYPE_CONVERSION_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);
default:
return false;
}
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.getBType();
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.getBType();
Location pos = mappingConstructorExpr.pos;
List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size());
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression origKey = key.expr;
BLangExpression keyExpr;
if (key.computedKey) {
keyExpr = origKey;
} else {
keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,
StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :
((BLangLiteral) origKey);
}
BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =
ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),
rewriteExpr(keyValueField.valueExpr));
rewrittenField.pos = keyValueField.pos;
rewrittenField.key.pos = key.pos;
rewrittenFields.add(rewrittenField);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(
rewriteExpr(createStringLiteral(pos,
StringEscapeUtils.unescapeJava(varRefField.variableName.value))),
rewriteExpr(varRefField)));
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
spreadOpField.expr = rewriteExpr(spreadOpField.expr);
rewrittenFields.add(spreadOpField);
}
}
fields.clear();
BType refType = Types.getReferredType(type);
return refType.tag == TypeTags.RECORD ?
new BLangStructLiteral(pos, type, refType.tsymbol, rewrittenFields) :
new BLangMapLiteral(pos, type, rewrittenFields);
}
protected void addTransactionInternalModuleImport() {
if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));
importDcl.pkgNameComps = pkgNameComps;
importDcl.pos = env.enclPkg.symbol.pos;
importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);
importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx");
importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "");
importDcl.symbol = symTable.internalTransactionModuleSymbol;
env.enclPkg.imports.add(importDcl);
env.enclPkg.symbol.imports.add(importDcl.symbol);
}
}
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} |
It would be safer to use `cacheMaxSize <= 0 || cacheExpireMs <= 0`. | public void open(FunctionContext context) {
LOG.info("start open ...");
Configuration config = prepareRuntimeConfiguration();
CompletableFuture<AsyncConnection> asyncConnectionFuture = ConnectionFactory.createAsyncConnection(config);
try {
asyncConnection = asyncConnectionFuture.get();
table = asyncConnection.getTable(TableName.valueOf(hTableName), (ExecutorService) Executors.directExecutor());
this.cache = cacheMaxSize == -1 || cacheExpireMs == 0 ? null : CacheBuilder.newBuilder()
.recordStats()
.expireAfterWrite(cacheExpireMs, TimeUnit.MILLISECONDS)
.maximumSize(cacheMaxSize)
.build();
if (cache != null && context != null) {
context.getMetricGroup().gauge("lookupCacheHitRate", (Gauge<Double>) () -> cache.stats().hitRate());
}
} catch (InterruptedException | ExecutionException e) {
LOG.error("Exception while creating connection to HBase.", e);
throw new RuntimeException("Cannot create connection to HBase.", e);
}
this.serde = new HBaseSerde(hbaseTableSchema, nullStringLiteral);
LOG.info("end open.");
} | this.cache = cacheMaxSize == -1 || cacheExpireMs == 0 ? null : CacheBuilder.newBuilder() | public void open(FunctionContext context) {
LOG.info("start open ...");
final ExecutorService threadPool =
Executors.newFixedThreadPool(
THREAD_POOL_SIZE,
new ExecutorThreadFactory(
"hbase-aysnc-lookup-worker", Threads.LOGGING_EXCEPTION_HANDLER));
Configuration config = prepareRuntimeConfiguration();
CompletableFuture<AsyncConnection> asyncConnectionFuture =
ConnectionFactory.createAsyncConnection(config);
try {
asyncConnection = asyncConnectionFuture.get();
table = asyncConnection.getTable(TableName.valueOf(hTableName), threadPool);
this.cache =
cacheMaxSize <= 0 || cacheExpireMs <= 0
? null
: CacheBuilder.newBuilder()
.recordStats()
.expireAfterWrite(cacheExpireMs, TimeUnit.MILLISECONDS)
.maximumSize(cacheMaxSize)
.build();
if (cache != null && context != null) {
context.getMetricGroup()
.gauge("lookupCacheHitRate", (Gauge<Double>) () -> cache.stats().hitRate());
}
} catch (InterruptedException | ExecutionException e) {
LOG.error("Exception while creating connection to HBase.", e);
throw new RuntimeException("Cannot create connection to HBase.", e);
}
this.serde = new HBaseSerde(hbaseTableSchema, nullStringLiteral);
LOG.info("end open.");
} | class HBaseRowDataAsyncLookupFunction extends AsyncTableFunction<RowData> {
private static final Logger LOG = LoggerFactory.getLogger(HBaseRowDataAsyncLookupFunction.class);
private static final long serialVersionUID = 1L;
private final String hTableName;
private final byte[] serializedConfig;
private final HBaseTableSchema hbaseTableSchema;
private final String nullStringLiteral;
private transient AsyncConnection asyncConnection;
private transient AsyncTable<ScanResultConsumer> table;
private transient HBaseSerde serde;
private final long cacheMaxSize;
private final long cacheExpireMs;
private final int maxRetryTimes;
private transient Cache<Object, RowData> cache;
public HBaseRowDataAsyncLookupFunction(
Configuration configuration,
String hTableName,
HBaseTableSchema hbaseTableSchema,
String nullStringLiteral, HBaseLookupOptions lookupOptions) {
this.serializedConfig = HBaseConfigurationUtil.serializeConfiguration(configuration);
this.hTableName = hTableName;
this.hbaseTableSchema = hbaseTableSchema;
this.nullStringLiteral = nullStringLiteral;
this.cacheMaxSize = lookupOptions.getCacheMaxSize();
this.cacheExpireMs = lookupOptions.getCacheExpireMs();
this.maxRetryTimes = lookupOptions.getMaxRetryTimes();
}
@Override
/**
* The invoke entry point of lookup function.
* @param feature The result or exception is returned.
* @param rowKey the lookup key. Currently only support single rowkey.
*/
public void eval(CompletableFuture<Collection<RowData>> feature, Object rowKey) {
int currentRetry = 0;
if (cache != null){
RowData cacheRowData = cache.getIfPresent(rowKey);
if (cacheRowData != null){
if (cacheRowData.getArity() == 0){
feature.complete(Collections.emptyList());
} else {
feature.complete(Collections.singletonList(cacheRowData));
}
return;
}
}
fetchResult(feature, currentRetry, rowKey);
}
/**
* Execute async fetch result .
* @param resultFuture The result or exception is returned.
* @param currentRetry Current number of retries.
* @param rowKey the lookup key.
*/
private void fetchResult(CompletableFuture<Collection<RowData>> resultFuture, int currentRetry, Object rowKey){
Get get = serde.createGet(rowKey);
CompletableFuture<Result> responseFuture = table.get(get);
responseFuture.whenCompleteAsync(
(result, throwable) -> {
if (throwable != null) {
if (throwable instanceof TableNotFoundException) {
LOG.error("Table '{}' not found ", hTableName, throwable);
resultFuture.completeExceptionally(
new RuntimeException("HBase table '" + hTableName + "' not found.", throwable));
} else {
LOG.error(String.format("HBase asyncLookup error, retry times = %d", currentRetry), throwable);
if (currentRetry >= maxRetryTimes) {
resultFuture.completeExceptionally(throwable);
} else {
try {
Thread.sleep(1000 * currentRetry);
} catch (InterruptedException e1) {
resultFuture.completeExceptionally(e1);
}
fetchResult(resultFuture, currentRetry + 1, rowKey);
}
}
} else {
if (result.isEmpty()) {
resultFuture.complete(Collections.emptyList());
if (cache != null) {
cache.put(rowKey, new GenericRowData(0));
}
} else {
if (cache != null){
RowData rowData = serde.convertToRow(result, false);
resultFuture.complete(Collections.singletonList(rowData));
cache.put(rowKey, rowData);
} else {
resultFuture.complete(Collections.singletonList(serde.convertToRow(result, true)));
}
}
}
});
}
private Configuration prepareRuntimeConfiguration() {
Configuration runtimeConfig = HBaseConfigurationUtil.deserializeConfiguration(
serializedConfig,
HBaseConfigurationUtil.getHBaseConfiguration());
if (StringUtils.isNullOrWhitespaceOnly(runtimeConfig.get(HConstants.ZOOKEEPER_QUORUM))) {
LOG.error("can not connect to HBase without {} configuration", HConstants.ZOOKEEPER_QUORUM);
throw new IllegalArgumentException("check HBase configuration failed, lost: '" + HConstants.ZOOKEEPER_QUORUM + "'!");
}
return runtimeConfig;
}
@Override
public void close() {
LOG.info("start close ...");
if (null != asyncConnection) {
try {
asyncConnection.close();
asyncConnection = null;
} catch (IOException e) {
LOG.warn("exception when close connection", e);
}
}
LOG.info("end close.");
}
@VisibleForTesting
public String getHTableName() {
return hTableName;
}
} | class HBaseRowDataAsyncLookupFunction extends AsyncTableFunction<RowData> {
private static final Logger LOG =
LoggerFactory.getLogger(HBaseRowDataAsyncLookupFunction.class);
private static final long serialVersionUID = 1L;
private final String hTableName;
private final byte[] serializedConfig;
private final HBaseTableSchema hbaseTableSchema;
private final String nullStringLiteral;
private transient AsyncConnection asyncConnection;
private transient AsyncTable<ScanResultConsumer> table;
private transient HBaseSerde serde;
private final long cacheMaxSize;
private final long cacheExpireMs;
private final int maxRetryTimes;
private transient Cache<Object, RowData> cache;
/** The size for thread pool. */
private static final int THREAD_POOL_SIZE = 16;
public HBaseRowDataAsyncLookupFunction(
Configuration configuration,
String hTableName,
HBaseTableSchema hbaseTableSchema,
String nullStringLiteral,
HBaseLookupOptions lookupOptions) {
this.serializedConfig = HBaseConfigurationUtil.serializeConfiguration(configuration);
this.hTableName = hTableName;
this.hbaseTableSchema = hbaseTableSchema;
this.nullStringLiteral = nullStringLiteral;
this.cacheMaxSize = lookupOptions.getCacheMaxSize();
this.cacheExpireMs = lookupOptions.getCacheExpireMs();
this.maxRetryTimes = lookupOptions.getMaxRetryTimes();
}
@Override
/**
* The invoke entry point of lookup function.
*
* @param future The result or exception is returned.
* @param rowKey the lookup key. Currently only support single rowkey.
*/
public void eval(CompletableFuture<Collection<RowData>> future, Object rowKey) {
int currentRetry = 0;
if (cache != null) {
RowData cacheRowData = cache.getIfPresent(rowKey);
if (cacheRowData != null) {
if (cacheRowData.getArity() == 0) {
future.complete(Collections.emptyList());
} else {
future.complete(Collections.singletonList(cacheRowData));
}
return;
}
}
fetchResult(future, currentRetry, rowKey);
}
/**
* Execute async fetch result .
*
* @param resultFuture The result or exception is returned.
* @param currentRetry Current number of retries.
* @param rowKey the lookup key.
*/
private void fetchResult(
CompletableFuture<Collection<RowData>> resultFuture, int currentRetry, Object rowKey) {
Get get = serde.createGet(rowKey);
CompletableFuture<Result> responseFuture = table.get(get);
responseFuture.whenCompleteAsync(
(result, throwable) -> {
if (throwable != null) {
if (throwable instanceof TableNotFoundException) {
LOG.error("Table '{}' not found ", hTableName, throwable);
resultFuture.completeExceptionally(
new RuntimeException(
"HBase table '" + hTableName + "' not found.",
throwable));
} else {
LOG.error(
String.format(
"HBase asyncLookup error, retry times = %d",
currentRetry),
throwable);
if (currentRetry >= maxRetryTimes) {
resultFuture.completeExceptionally(throwable);
} else {
try {
Thread.sleep(1000 * currentRetry);
} catch (InterruptedException e1) {
resultFuture.completeExceptionally(e1);
}
fetchResult(resultFuture, currentRetry + 1, rowKey);
}
}
} else {
if (result.isEmpty()) {
resultFuture.complete(Collections.emptyList());
if (cache != null) {
cache.put(rowKey, new GenericRowData(0));
}
} else {
if (cache != null) {
RowData rowData = serde.convertToNewRow(result);
resultFuture.complete(Collections.singletonList(rowData));
cache.put(rowKey, rowData);
} else {
resultFuture.complete(
Collections.singletonList(serde.convertToNewRow(result)));
}
}
}
});
}
private Configuration prepareRuntimeConfiguration() {
Configuration runtimeConfig =
HBaseConfigurationUtil.deserializeConfiguration(
serializedConfig, HBaseConfigurationUtil.getHBaseConfiguration());
if (StringUtils.isNullOrWhitespaceOnly(runtimeConfig.get(HConstants.ZOOKEEPER_QUORUM))) {
LOG.error(
"can not connect to HBase without {} configuration",
HConstants.ZOOKEEPER_QUORUM);
throw new IllegalArgumentException(
"check HBase configuration failed, lost: '"
+ HConstants.ZOOKEEPER_QUORUM
+ "'!");
}
return runtimeConfig;
}
@Override
public void close() {
LOG.info("start close ...");
if (null != table) {
table = null;
}
if (null != asyncConnection) {
try {
asyncConnection.close();
asyncConnection = null;
} catch (IOException e) {
LOG.warn("exception when close connection", e);
}
}
LOG.info("end close.");
}
@VisibleForTesting
public String getHTableName() {
return hTableName;
}
} |
Okay, I added a test for this case as well. But this required adding the MonitoringInfoSpec for SampledByteCount, which I was going to do in this PR which is queued up after this one. But I have pulled it into this PR. https://github.com/apache/beam/pull/8416/files | public void testMonitoringInfosArePopulatedForUserDistributions() {
MetricsContainerImpl testObject = new MetricsContainerImpl("step1");
DistributionCell c1 = testObject.getDistribution(MetricName.named("ns", "name1"));
DistributionCell c2 = testObject.getDistribution(MetricName.named("ns", "name2"));
c1.update(5L);
c2.update(4L);
SimpleMonitoringInfoBuilder builder1 = new SimpleMonitoringInfoBuilder();
builder1
.setUrn(MonitoringInfoConstants.Urns.USER_DISTRIBUTION_COUNTER)
.setLabel(MonitoringInfoConstants.Labels.NAMESPACE, "ns")
.setLabel(MonitoringInfoConstants.Labels.NAME, "name1")
.setLabel(MonitoringInfoConstants.Labels.PTRANSFORM, "step1")
.setInt64DistributionValue(DistributionData.create(5, 1, 5, 5));
SimpleMonitoringInfoBuilder builder2 = new SimpleMonitoringInfoBuilder();
builder2
.setUrn(MonitoringInfoConstants.Urns.USER_DISTRIBUTION_COUNTER)
.setLabel(MonitoringInfoConstants.Labels.NAMESPACE, "ns")
.setLabel(MonitoringInfoConstants.Labels.NAME, "name2")
.setLabel(MonitoringInfoConstants.Labels.PTRANSFORM, "step1")
.setInt64DistributionValue(DistributionData.create(4, 1, 4, 4));
ArrayList<MonitoringInfo> actualMonitoringInfos = new ArrayList<MonitoringInfo>();
for (MonitoringInfo mi : testObject.getMonitoringInfos()) {
actualMonitoringInfos.add(SimpleMonitoringInfoBuilder.copyAndClearTimestamp(mi));
}
assertThat(actualMonitoringInfos, containsInAnyOrder(builder1.build(), builder2.build()));
} | DistributionCell c2 = testObject.getDistribution(MetricName.named("ns", "name2")); | public void testMonitoringInfosArePopulatedForUserDistributions() {
MetricsContainerImpl testObject = new MetricsContainerImpl("step1");
DistributionCell c1 = testObject.getDistribution(MetricName.named("ns", "name1"));
DistributionCell c2 = testObject.getDistribution(MetricName.named("ns", "name2"));
c1.update(5L);
c2.update(4L);
SimpleMonitoringInfoBuilder builder1 = new SimpleMonitoringInfoBuilder();
builder1
.setUrn(MonitoringInfoConstants.Urns.USER_DISTRIBUTION_COUNTER)
.setLabel(MonitoringInfoConstants.Labels.NAMESPACE, "ns")
.setLabel(MonitoringInfoConstants.Labels.NAME, "name1")
.setLabel(MonitoringInfoConstants.Labels.PTRANSFORM, "step1")
.setInt64DistributionValue(DistributionData.create(5, 1, 5, 5));
SimpleMonitoringInfoBuilder builder2 = new SimpleMonitoringInfoBuilder();
builder2
.setUrn(MonitoringInfoConstants.Urns.USER_DISTRIBUTION_COUNTER)
.setLabel(MonitoringInfoConstants.Labels.NAMESPACE, "ns")
.setLabel(MonitoringInfoConstants.Labels.NAME, "name2")
.setLabel(MonitoringInfoConstants.Labels.PTRANSFORM, "step1")
.setInt64DistributionValue(DistributionData.create(4, 1, 4, 4));
ArrayList<MonitoringInfo> actualMonitoringInfos = new ArrayList<MonitoringInfo>();
for (MonitoringInfo mi : testObject.getMonitoringInfos()) {
actualMonitoringInfos.add(SimpleMonitoringInfoBuilder.copyAndClearTimestamp(mi));
}
assertThat(actualMonitoringInfos, containsInAnyOrder(builder1.build(), builder2.build()));
} | class MetricsContainerImplTest {
@Test
public void testCounterDeltas() {
MetricsContainerImpl container = new MetricsContainerImpl("step1");
CounterCell c1 = container.getCounter(MetricName.named("ns", "name1"));
CounterCell c2 = container.getCounter(MetricName.named("ns", "name2"));
assertThat(
"All counters should start out dirty",
container.getUpdates().counterUpdates(),
containsInAnyOrder(metricUpdate("name1", 0L), metricUpdate("name2", 0L)));
container.commitUpdates();
assertThat(
"After commit no counters should be dirty",
container.getUpdates().counterUpdates(),
emptyIterable());
c1.inc(5L);
c2.inc(4L);
assertThat(
container.getUpdates().counterUpdates(),
containsInAnyOrder(metricUpdate("name1", 5L), metricUpdate("name2", 4L)));
assertThat(
"Since we haven't committed, updates are still included",
container.getUpdates().counterUpdates(),
containsInAnyOrder(metricUpdate("name1", 5L), metricUpdate("name2", 4L)));
container.commitUpdates();
assertThat(
"After commit there are no updates",
container.getUpdates().counterUpdates(),
emptyIterable());
c1.inc(8L);
assertThat(container.getUpdates().counterUpdates(), contains(metricUpdate("name1", 13L)));
CounterCell dne = container.tryGetCounter(MetricName.named("ns", "dne"));
assertEquals(dne, null);
}
@Test
public void testCounterCumulatives() {
MetricsContainerImpl container = new MetricsContainerImpl("step1");
CounterCell c1 = container.getCounter(MetricName.named("ns", "name1"));
CounterCell c2 = container.getCounter(MetricName.named("ns", "name2"));
c1.inc(2L);
c2.inc(4L);
c1.inc(3L);
container.getUpdates();
container.commitUpdates();
assertThat(
"Committing updates shouldn't affect cumulative counter values",
container.getCumulative().counterUpdates(),
containsInAnyOrder(metricUpdate("name1", 5L), metricUpdate("name2", 4L)));
c1.inc(8L);
assertThat(
container.getCumulative().counterUpdates(),
containsInAnyOrder(metricUpdate("name1", 13L), metricUpdate("name2", 4L)));
CounterCell readC1 = container.tryGetCounter(MetricName.named("ns", "name1"));
assertEquals(13L, (long) readC1.getCumulative());
}
@Test
public void testDistributionDeltas() {
MetricsContainerImpl container = new MetricsContainerImpl("step1");
DistributionCell c1 = container.getDistribution(MetricName.named("ns", "name1"));
DistributionCell c2 = container.getDistribution(MetricName.named("ns", "name2"));
assertThat(
"Initial update includes initial zero-values",
container.getUpdates().distributionUpdates(),
containsInAnyOrder(
metricUpdate("name1", DistributionData.EMPTY),
metricUpdate("name2", DistributionData.EMPTY)));
container.commitUpdates();
assertThat(
"No updates after commit", container.getUpdates().distributionUpdates(), emptyIterable());
c1.update(5L);
c2.update(4L);
assertThat(
container.getUpdates().distributionUpdates(),
containsInAnyOrder(
metricUpdate("name1", DistributionData.create(5, 1, 5, 5)),
metricUpdate("name2", DistributionData.create(4, 1, 4, 4))));
assertThat(
"Updates stay the same without commit",
container.getUpdates().distributionUpdates(),
containsInAnyOrder(
metricUpdate("name1", DistributionData.create(5, 1, 5, 5)),
metricUpdate("name2", DistributionData.create(4, 1, 4, 4))));
container.commitUpdates();
assertThat(
"No updatess after commit", container.getUpdates().distributionUpdates(), emptyIterable());
c1.update(8L);
c1.update(4L);
assertThat(
container.getUpdates().distributionUpdates(),
contains(metricUpdate("name1", DistributionData.create(17, 3, 4, 8))));
container.commitUpdates();
DistributionCell dne = container.tryGetDistribution(MetricName.named("ns", "dne"));
assertEquals(dne, null);
}
@Test
public void testMonitoringInfosArePopulatedForUserCounters() {
MetricsContainerImpl testObject = new MetricsContainerImpl("step1");
CounterCell c1 = testObject.getCounter(MetricName.named("ns", "name1"));
CounterCell c2 = testObject.getCounter(MetricName.named("ns", "name2"));
c1.inc(2L);
c2.inc(4L);
c1.inc(3L);
SimpleMonitoringInfoBuilder builder1 = new SimpleMonitoringInfoBuilder();
builder1
.setUrn(MonitoringInfoConstants.Urns.USER_COUNTER)
.setLabel(MonitoringInfoConstants.Labels.NAMESPACE, "ns")
.setLabel(MonitoringInfoConstants.Labels.NAME, "name1")
.setInt64Value(5)
.setLabel(MonitoringInfoConstants.Labels.PTRANSFORM, "step1");
SimpleMonitoringInfoBuilder builder2 = new SimpleMonitoringInfoBuilder();
builder2
.setUrn(MonitoringInfoConstants.Urns.USER_COUNTER)
.setLabel(MonitoringInfoConstants.Labels.NAMESPACE, "ns")
.setLabel(MonitoringInfoConstants.Labels.NAME, "name2")
.setInt64Value(4)
.setLabel(MonitoringInfoConstants.Labels.PTRANSFORM, "step1");
ArrayList<MonitoringInfo> actualMonitoringInfos = new ArrayList<MonitoringInfo>();
for (MonitoringInfo mi : testObject.getMonitoringInfos()) {
actualMonitoringInfos.add(SimpleMonitoringInfoBuilder.copyAndClearTimestamp(mi));
}
assertThat(actualMonitoringInfos, containsInAnyOrder(builder1.build(), builder2.build()));
}
@Test
@Test
public void testMonitoringInfosArePopulatedForABeamCounter() {
MetricsContainerImpl testObject = new MetricsContainerImpl("step1");
HashMap<String, String> labels = new HashMap<String, String>();
labels.put(MonitoringInfoConstants.Labels.PCOLLECTION, "pcollection");
MetricName name =
MonitoringInfoMetricName.named(MonitoringInfoConstants.Urns.ELEMENT_COUNT, labels);
CounterCell c1 = testObject.getCounter(name);
c1.inc(2L);
SimpleMonitoringInfoBuilder builder1 = new SimpleMonitoringInfoBuilder();
builder1.setUrn(MonitoringInfoConstants.Urns.ELEMENT_COUNT);
builder1.setLabel(MonitoringInfoConstants.Labels.PCOLLECTION, "pcollection");
builder1.setInt64Value(2);
ArrayList<MonitoringInfo> actualMonitoringInfos = new ArrayList<MonitoringInfo>();
for (MonitoringInfo mi : testObject.getMonitoringInfos()) {
actualMonitoringInfos.add(SimpleMonitoringInfoBuilder.copyAndClearTimestamp(mi));
}
assertThat(actualMonitoringInfos, containsInAnyOrder(builder1.build()));
}
@Test
public void testEquals() {
MetricsContainerImpl metricsContainerImpl = new MetricsContainerImpl("stepName");
MetricsContainerImpl equal = new MetricsContainerImpl("stepName");
Assert.assertEquals(metricsContainerImpl, equal);
Assert.assertEquals(metricsContainerImpl.hashCode(), equal.hashCode());
}
@Test
public void testNotEquals() {
MetricsContainerImpl metricsContainerImpl = new MetricsContainerImpl("stepName");
Assert.assertNotEquals(metricsContainerImpl, new Object());
MetricsContainerImpl differentStepName = new MetricsContainerImpl("DIFFERENT");
Assert.assertNotEquals(metricsContainerImpl, differentStepName);
Assert.assertNotEquals(metricsContainerImpl.hashCode(), differentStepName.hashCode());
MetricsContainerImpl differentCounters = new MetricsContainerImpl("stepName");
differentCounters.getCounter(MetricName.named("namespace", "name"));
Assert.assertNotEquals(metricsContainerImpl, differentCounters);
Assert.assertNotEquals(metricsContainerImpl.hashCode(), differentCounters.hashCode());
MetricsContainerImpl differentDistributions = new MetricsContainerImpl("stepName");
differentDistributions.getDistribution(MetricName.named("namespace", "name"));
Assert.assertNotEquals(metricsContainerImpl, differentDistributions);
Assert.assertNotEquals(metricsContainerImpl.hashCode(), differentDistributions.hashCode());
MetricsContainerImpl differentGauges = new MetricsContainerImpl("stepName");
differentGauges.getGauge(MetricName.named("namespace", "name"));
Assert.assertNotEquals(metricsContainerImpl, differentGauges);
Assert.assertNotEquals(metricsContainerImpl.hashCode(), differentGauges.hashCode());
}
} | class MetricsContainerImplTest {
@Test
public void testCounterDeltas() {
MetricsContainerImpl container = new MetricsContainerImpl("step1");
CounterCell c1 = container.getCounter(MetricName.named("ns", "name1"));
CounterCell c2 = container.getCounter(MetricName.named("ns", "name2"));
assertThat(
"All counters should start out dirty",
container.getUpdates().counterUpdates(),
containsInAnyOrder(metricUpdate("name1", 0L), metricUpdate("name2", 0L)));
container.commitUpdates();
assertThat(
"After commit no counters should be dirty",
container.getUpdates().counterUpdates(),
emptyIterable());
c1.inc(5L);
c2.inc(4L);
assertThat(
container.getUpdates().counterUpdates(),
containsInAnyOrder(metricUpdate("name1", 5L), metricUpdate("name2", 4L)));
assertThat(
"Since we haven't committed, updates are still included",
container.getUpdates().counterUpdates(),
containsInAnyOrder(metricUpdate("name1", 5L), metricUpdate("name2", 4L)));
container.commitUpdates();
assertThat(
"After commit there are no updates",
container.getUpdates().counterUpdates(),
emptyIterable());
c1.inc(8L);
assertThat(container.getUpdates().counterUpdates(), contains(metricUpdate("name1", 13L)));
CounterCell dne = container.tryGetCounter(MetricName.named("ns", "dne"));
assertEquals(dne, null);
}
@Test
public void testCounterCumulatives() {
MetricsContainerImpl container = new MetricsContainerImpl("step1");
CounterCell c1 = container.getCounter(MetricName.named("ns", "name1"));
CounterCell c2 = container.getCounter(MetricName.named("ns", "name2"));
c1.inc(2L);
c2.inc(4L);
c1.inc(3L);
container.getUpdates();
container.commitUpdates();
assertThat(
"Committing updates shouldn't affect cumulative counter values",
container.getCumulative().counterUpdates(),
containsInAnyOrder(metricUpdate("name1", 5L), metricUpdate("name2", 4L)));
c1.inc(8L);
assertThat(
container.getCumulative().counterUpdates(),
containsInAnyOrder(metricUpdate("name1", 13L), metricUpdate("name2", 4L)));
CounterCell readC1 = container.tryGetCounter(MetricName.named("ns", "name1"));
assertEquals(13L, (long) readC1.getCumulative());
}
@Test
public void testDistributionDeltas() {
MetricsContainerImpl container = new MetricsContainerImpl("step1");
DistributionCell c1 = container.getDistribution(MetricName.named("ns", "name1"));
DistributionCell c2 = container.getDistribution(MetricName.named("ns", "name2"));
assertThat(
"Initial update includes initial zero-values",
container.getUpdates().distributionUpdates(),
containsInAnyOrder(
metricUpdate("name1", DistributionData.EMPTY),
metricUpdate("name2", DistributionData.EMPTY)));
container.commitUpdates();
assertThat(
"No updates after commit", container.getUpdates().distributionUpdates(), emptyIterable());
c1.update(5L);
c2.update(4L);
assertThat(
container.getUpdates().distributionUpdates(),
containsInAnyOrder(
metricUpdate("name1", DistributionData.create(5, 1, 5, 5)),
metricUpdate("name2", DistributionData.create(4, 1, 4, 4))));
assertThat(
"Updates stay the same without commit",
container.getUpdates().distributionUpdates(),
containsInAnyOrder(
metricUpdate("name1", DistributionData.create(5, 1, 5, 5)),
metricUpdate("name2", DistributionData.create(4, 1, 4, 4))));
container.commitUpdates();
assertThat(
"No updatess after commit", container.getUpdates().distributionUpdates(), emptyIterable());
c1.update(8L);
c1.update(4L);
assertThat(
container.getUpdates().distributionUpdates(),
contains(metricUpdate("name1", DistributionData.create(17, 3, 4, 8))));
container.commitUpdates();
DistributionCell dne = container.tryGetDistribution(MetricName.named("ns", "dne"));
assertEquals(dne, null);
}
@Test
public void testMonitoringInfosArePopulatedForUserCounters() {
MetricsContainerImpl testObject = new MetricsContainerImpl("step1");
CounterCell c1 = testObject.getCounter(MetricName.named("ns", "name1"));
CounterCell c2 = testObject.getCounter(MetricName.named("ns", "name2"));
c1.inc(2L);
c2.inc(4L);
c1.inc(3L);
SimpleMonitoringInfoBuilder builder1 = new SimpleMonitoringInfoBuilder();
builder1
.setUrn(MonitoringInfoConstants.Urns.USER_COUNTER)
.setLabel(MonitoringInfoConstants.Labels.NAMESPACE, "ns")
.setLabel(MonitoringInfoConstants.Labels.NAME, "name1")
.setInt64Value(5)
.setLabel(MonitoringInfoConstants.Labels.PTRANSFORM, "step1");
SimpleMonitoringInfoBuilder builder2 = new SimpleMonitoringInfoBuilder();
builder2
.setUrn(MonitoringInfoConstants.Urns.USER_COUNTER)
.setLabel(MonitoringInfoConstants.Labels.NAMESPACE, "ns")
.setLabel(MonitoringInfoConstants.Labels.NAME, "name2")
.setInt64Value(4)
.setLabel(MonitoringInfoConstants.Labels.PTRANSFORM, "step1");
ArrayList<MonitoringInfo> actualMonitoringInfos = new ArrayList<MonitoringInfo>();
for (MonitoringInfo mi : testObject.getMonitoringInfos()) {
actualMonitoringInfos.add(SimpleMonitoringInfoBuilder.copyAndClearTimestamp(mi));
}
assertThat(actualMonitoringInfos, containsInAnyOrder(builder1.build(), builder2.build()));
}
@Test
@Test
public void testMonitoringInfosArePopulatedForSystemDistributions() {
MetricsContainerImpl testObject = new MetricsContainerImpl("step1");
HashMap<String, String> labels = new HashMap<>();
labels.put(MonitoringInfoConstants.Labels.PCOLLECTION, "pcoll1");
DistributionCell c1 =
testObject.getDistribution(
MonitoringInfoMetricName.named(MonitoringInfoConstants.Urns.SAMPLED_BYTE_SIZE, labels));
c1.update(5L);
SimpleMonitoringInfoBuilder builder1 = new SimpleMonitoringInfoBuilder();
builder1
.setUrn(MonitoringInfoConstants.Urns.SAMPLED_BYTE_SIZE)
.setLabel(MonitoringInfoConstants.Labels.PCOLLECTION, "pcoll1")
.setInt64DistributionValue(DistributionData.create(5, 1, 5, 5));
ArrayList<MonitoringInfo> actualMonitoringInfos = new ArrayList<MonitoringInfo>();
for (MonitoringInfo mi : testObject.getMonitoringInfos()) {
actualMonitoringInfos.add(SimpleMonitoringInfoBuilder.copyAndClearTimestamp(mi));
}
assertThat(actualMonitoringInfos, containsInAnyOrder(builder1.build()));
}
@Test
public void testMonitoringInfosArePopulatedForABeamCounter() {
MetricsContainerImpl testObject = new MetricsContainerImpl("step1");
HashMap<String, String> labels = new HashMap<String, String>();
labels.put(MonitoringInfoConstants.Labels.PCOLLECTION, "pcollection");
MetricName name =
MonitoringInfoMetricName.named(MonitoringInfoConstants.Urns.ELEMENT_COUNT, labels);
CounterCell c1 = testObject.getCounter(name);
c1.inc(2L);
SimpleMonitoringInfoBuilder builder1 = new SimpleMonitoringInfoBuilder();
builder1.setUrn(MonitoringInfoConstants.Urns.ELEMENT_COUNT);
builder1.setLabel(MonitoringInfoConstants.Labels.PCOLLECTION, "pcollection");
builder1.setInt64Value(2);
ArrayList<MonitoringInfo> actualMonitoringInfos = new ArrayList<MonitoringInfo>();
for (MonitoringInfo mi : testObject.getMonitoringInfos()) {
actualMonitoringInfos.add(SimpleMonitoringInfoBuilder.copyAndClearTimestamp(mi));
}
assertThat(actualMonitoringInfos, containsInAnyOrder(builder1.build()));
}
@Test
public void testEquals() {
MetricsContainerImpl metricsContainerImpl = new MetricsContainerImpl("stepName");
MetricsContainerImpl equal = new MetricsContainerImpl("stepName");
Assert.assertEquals(metricsContainerImpl, equal);
Assert.assertEquals(metricsContainerImpl.hashCode(), equal.hashCode());
}
@Test
public void testNotEquals() {
MetricsContainerImpl metricsContainerImpl = new MetricsContainerImpl("stepName");
Assert.assertNotEquals(metricsContainerImpl, new Object());
MetricsContainerImpl differentStepName = new MetricsContainerImpl("DIFFERENT");
Assert.assertNotEquals(metricsContainerImpl, differentStepName);
Assert.assertNotEquals(metricsContainerImpl.hashCode(), differentStepName.hashCode());
MetricsContainerImpl differentCounters = new MetricsContainerImpl("stepName");
differentCounters.getCounter(MetricName.named("namespace", "name"));
Assert.assertNotEquals(metricsContainerImpl, differentCounters);
Assert.assertNotEquals(metricsContainerImpl.hashCode(), differentCounters.hashCode());
MetricsContainerImpl differentDistributions = new MetricsContainerImpl("stepName");
differentDistributions.getDistribution(MetricName.named("namespace", "name"));
Assert.assertNotEquals(metricsContainerImpl, differentDistributions);
Assert.assertNotEquals(metricsContainerImpl.hashCode(), differentDistributions.hashCode());
MetricsContainerImpl differentGauges = new MetricsContainerImpl("stepName");
differentGauges.getGauge(MetricName.named("namespace", "name"));
Assert.assertNotEquals(metricsContainerImpl, differentGauges);
Assert.assertNotEquals(metricsContainerImpl.hashCode(), differentGauges.hashCode());
}
} |
If no transformers are registered, this method should return the original set of annotations immediately. | public Set<AnnotationInstance> applyTransformers(Type type, AnnotationTarget target, Set<AnnotationInstance> qualifiers) {
TransformationContextImpl transformationContext = new TransformationContextImpl(target, qualifiers,
annotationStore);
for (InjectionPointsTransformer transformer : transformers) {
if (transformer.appliesTo(type)) {
transformer.transform(transformationContext);
}
}
return transformationContext.getQualifiers();
} | transformer.transform(transformationContext); | public Set<AnnotationInstance> applyTransformers(Type type, AnnotationTarget target, Set<AnnotationInstance> qualifiers) {
if (transformers.isEmpty()) {
return qualifiers;
}
TransformationContextImpl transformationContext = new TransformationContextImpl(target, qualifiers,
annotationStore);
for (InjectionPointsTransformer transformer : transformers) {
if (transformer.appliesTo(type)) {
transformer.transform(transformationContext);
}
}
return transformationContext.getQualifiers();
} | class InjectionPointModifier {
private List<InjectionPointsTransformer> transformers;
private BuildExtension.BuildContext buildContext;
private AnnotationStore annotationStore;
InjectionPointModifier(List<InjectionPointsTransformer> tranformers, BuildExtension.BuildContext buildContext) {
this.buildContext = buildContext;
this.transformers = tranformers;
this.annotationStore = buildContext != null ? buildContext.get(BuildExtension.Key.ANNOTATION_STORE) : null;
}
class TransformationContextImpl implements InjectionPointsTransformer.TransformationContext {
private AnnotationTarget target;
private Set<AnnotationInstance> qualifiers;
private AnnotationStore annotationStore;
TransformationContextImpl(AnnotationTarget target, Set<AnnotationInstance> qualifiers,
AnnotationStore annotationStore) {
this.target = target;
this.qualifiers = qualifiers;
this.annotationStore = annotationStore;
}
@Override
public AnnotationTarget getTarget() {
return target;
}
@Override
public Set<AnnotationInstance> getQualifiers() {
return qualifiers;
}
@Override
public Collection<AnnotationInstance> getAllAnnotations() {
if (annotationStore == null) {
throw new IllegalStateException(
"Attempted to use TransformationContext
}
return annotationStore.getAnnotations(getTarget());
}
@Override
public InjectionPointsTransformer.Transformation transform() {
return new InjectionPointsTransformer.Transformation(this);
}
@Override
public <V> V get(BuildExtension.Key<V> key) {
return buildContext.get(key);
}
@Override
public <V> V put(BuildExtension.Key<V> key, V value) {
return buildContext.put(key, value);
}
public void setQualifiers(Set<AnnotationInstance> qualifiers) {
this.qualifiers = qualifiers;
}
}
} | class InjectionPointModifier {
private List<InjectionPointsTransformer> transformers;
private BuildExtension.BuildContext buildContext;
private AnnotationStore annotationStore;
InjectionPointModifier(List<InjectionPointsTransformer> transformers, BuildExtension.BuildContext buildContext) {
this.buildContext = buildContext;
this.transformers = transformers;
this.annotationStore = buildContext != null ? buildContext.get(BuildExtension.Key.ANNOTATION_STORE) : null;
}
class TransformationContextImpl implements InjectionPointsTransformer.TransformationContext {
private AnnotationTarget target;
private Set<AnnotationInstance> qualifiers;
private AnnotationStore annotationStore;
TransformationContextImpl(AnnotationTarget target, Set<AnnotationInstance> qualifiers,
AnnotationStore annotationStore) {
this.target = target;
this.qualifiers = qualifiers;
this.annotationStore = annotationStore;
}
@Override
public AnnotationTarget getTarget() {
return target;
}
@Override
public Set<AnnotationInstance> getQualifiers() {
return qualifiers;
}
@Override
public Collection<AnnotationInstance> getAllAnnotations() {
if (annotationStore == null) {
throw new IllegalStateException(
"Attempted to use TransformationContext
}
return annotationStore.getAnnotations(getTarget());
}
@Override
public InjectionPointsTransformer.Transformation transform() {
return new InjectionPointsTransformer.Transformation(this);
}
@Override
public <V> V get(BuildExtension.Key<V> key) {
return buildContext.get(key);
}
@Override
public <V> V put(BuildExtension.Key<V> key, V value) {
return buildContext.put(key, value);
}
public void setQualifiers(Set<AnnotationInstance> qualifiers) {
this.qualifiers = qualifiers;
}
}
} |
Nothing else @menghaoranss. Will make that commit soon | public void watch(final String key, final DataChangedEventListener dataChangedEventListener) {
Watch.Listener listener = Watch.listener(response -> {
for (WatchEvent each : response.getEvents()) {
ChangedType changedType = getEventChangedType(each);
if (ChangedType.IGNORED != changedType) {
dataChangedEventListener.onChange(new DataChangedEvent(each.getKeyValue().getKey().toString(StandardCharsets.UTF_8)
, each.getKeyValue().getValue().toString(StandardCharsets.UTF_8), changedType));
}
}
});
client.getWatchClient().watch(ByteSequence.from(key, StandardCharsets.UTF_8), listener);
} | , each.getKeyValue().getValue().toString(StandardCharsets.UTF_8), changedType)); | public void watch(final String key, final DataChangedEventListener dataChangedEventListener) {
Watch.Listener listener = Watch.listener(response -> {
for (WatchEvent each : response.getEvents()) {
ChangedType changedType = getEventChangedType(each);
if (ChangedType.IGNORED != changedType) {
dataChangedEventListener.onChange(new DataChangedEvent(each.getKeyValue().getKey().toString(StandardCharsets.UTF_8),
each.getKeyValue().getValue().toString(StandardCharsets.UTF_8), changedType));
}
}
});
client.getWatchClient().watch(ByteSequence.from(key, StandardCharsets.UTF_8), listener);
} | class EtcdRepository implements ConfigurationRepository, RegistryRepository {
private Client client;
@Getter
@Setter
private Properties props = new Properties();
private EtcdProperties etcdProperties;
@Override
public void init(final String name, final GovernanceCenterConfiguration config) {
etcdProperties = new EtcdProperties(props);
client = Client.builder().endpoints(Util.toURIs(Splitter.on(",").trimResults().splitToList(config.getServerLists()))).namespace(ByteSequence.from(name, StandardCharsets.UTF_8)).build();
}
@SneakyThrows({InterruptedException.class, ExecutionException.class})
@Override
public String get(final String key) {
List<KeyValue> keyValues = client.getKVClient().get(ByteSequence.from(key, StandardCharsets.UTF_8)).get().getKvs();
return keyValues.isEmpty() ? null : keyValues.iterator().next().getValue().toString(StandardCharsets.UTF_8);
}
@SneakyThrows({InterruptedException.class, ExecutionException.class})
@Override
public List<String> getChildrenKeys(final String key) {
String prefix = key + PATH_SEPARATOR;
ByteSequence prefixByteSequence = ByteSequence.from(prefix, StandardCharsets.UTF_8);
GetOption getOption = GetOption.newBuilder().withPrefix(prefixByteSequence).withSortField(GetOption.SortTarget.KEY).withSortOrder(GetOption.SortOrder.ASCEND).build();
List<KeyValue> keyValues = client.getKVClient().get(prefixByteSequence, getOption).get().getKvs();
return keyValues.stream().map(e -> getSubNodeKeyName(prefix, e.getKey().toString(StandardCharsets.UTF_8))).distinct().collect(Collectors.toList());
}
private String getSubNodeKeyName(final String prefix, final String fullPath) {
String pathWithoutPrefix = fullPath.substring(prefix.length());
return pathWithoutPrefix.contains(PATH_SEPARATOR) ? pathWithoutPrefix.substring(0, pathWithoutPrefix.indexOf(PATH_SEPARATOR)) : pathWithoutPrefix;
}
@SneakyThrows({InterruptedException.class, ExecutionException.class})
@Override
public void persist(final String key, final String value) {
client.getKVClient().put(ByteSequence.from(key, StandardCharsets.UTF_8), ByteSequence.from(value, StandardCharsets.UTF_8)).get();
}
@SneakyThrows({InterruptedException.class, ExecutionException.class})
@Override
public void persistEphemeral(final String key, final String value) {
long leaseId = client.getLeaseClient().grant(etcdProperties.getValue(EtcdPropertyKey.TIME_TO_LIVE_SECONDS)).get().getID();
client.getLeaseClient().keepAlive(leaseId, Observers.observer(response -> { }));
client.getKVClient().put(ByteSequence.from(key, StandardCharsets.UTF_8), ByteSequence.from(value, StandardCharsets.UTF_8), PutOption.newBuilder().withLeaseId(leaseId).build()).get();
}
@Override
public void delete(final String key) {
client.getKVClient().delete(ByteSequence.from(key, StandardCharsets.UTF_8));
}
@Override
private ChangedType getEventChangedType(final WatchEvent event) {
switch (event.getEventType()) {
case PUT:
return ChangedType.UPDATED;
case DELETE:
return ChangedType.DELETED;
default:
return ChangedType.IGNORED;
}
}
@Override
public void close() {
client.close();
}
@Override
public String getType() {
return "etcd";
}
} | class EtcdRepository implements ConfigurationRepository, RegistryRepository {
private Client client;
@Getter
@Setter
private Properties props = new Properties();
private EtcdProperties etcdProperties;
@Override
public void init(final String name, final GovernanceCenterConfiguration config) {
etcdProperties = new EtcdProperties(props);
client = Client.builder().endpoints(Util.toURIs(Splitter.on(",").trimResults().splitToList(config.getServerLists()))).namespace(ByteSequence.from(name, StandardCharsets.UTF_8)).build();
}
@SneakyThrows({InterruptedException.class, ExecutionException.class})
@Override
public String get(final String key) {
List<KeyValue> keyValues = client.getKVClient().get(ByteSequence.from(key, StandardCharsets.UTF_8)).get().getKvs();
return keyValues.isEmpty() ? null : keyValues.iterator().next().getValue().toString(StandardCharsets.UTF_8);
}
@SneakyThrows({InterruptedException.class, ExecutionException.class})
@Override
public List<String> getChildrenKeys(final String key) {
String prefix = key + PATH_SEPARATOR;
ByteSequence prefixByteSequence = ByteSequence.from(prefix, StandardCharsets.UTF_8);
GetOption getOption = GetOption.newBuilder().withPrefix(prefixByteSequence).withSortField(GetOption.SortTarget.KEY).withSortOrder(GetOption.SortOrder.ASCEND).build();
List<KeyValue> keyValues = client.getKVClient().get(prefixByteSequence, getOption).get().getKvs();
return keyValues.stream().map(e -> getSubNodeKeyName(prefix, e.getKey().toString(StandardCharsets.UTF_8))).distinct().collect(Collectors.toList());
}
private String getSubNodeKeyName(final String prefix, final String fullPath) {
String pathWithoutPrefix = fullPath.substring(prefix.length());
return pathWithoutPrefix.contains(PATH_SEPARATOR) ? pathWithoutPrefix.substring(0, pathWithoutPrefix.indexOf(PATH_SEPARATOR)) : pathWithoutPrefix;
}
@SneakyThrows({InterruptedException.class, ExecutionException.class})
@Override
public void persist(final String key, final String value) {
client.getKVClient().put(ByteSequence.from(key, StandardCharsets.UTF_8), ByteSequence.from(value, StandardCharsets.UTF_8)).get();
}
@SneakyThrows({InterruptedException.class, ExecutionException.class})
@Override
public void persistEphemeral(final String key, final String value) {
long leaseId = client.getLeaseClient().grant(etcdProperties.getValue(EtcdPropertyKey.TIME_TO_LIVE_SECONDS)).get().getID();
client.getLeaseClient().keepAlive(leaseId, Observers.observer(response -> { }));
client.getKVClient().put(ByteSequence.from(key, StandardCharsets.UTF_8), ByteSequence.from(value, StandardCharsets.UTF_8), PutOption.newBuilder().withLeaseId(leaseId).build()).get();
}
@Override
public void delete(final String key) {
client.getKVClient().delete(ByteSequence.from(key, StandardCharsets.UTF_8));
}
@Override
private ChangedType getEventChangedType(final WatchEvent event) {
switch (event.getEventType()) {
case PUT:
return ChangedType.UPDATED;
case DELETE:
return ChangedType.DELETED;
default:
return ChangedType.IGNORED;
}
}
@Override
public void close() {
client.close();
}
@Override
public String getType() {
return "etcd";
}
} |
If a ballerina test fails it will anyway have the mentioned text in the program output. But, if it fails due to a compilation error, this fails to capture that. So, I'll add an else statement to capture unforeseen errors. | public static void assertForTestFailures(String programOutput, String errMessage) throws BallerinaTestException {
if (programOutput.contains("error: there are test failures")) {
throw new BallerinaTestException("Test failed due to " + errMessage + " in test framework");
}
} | if (programOutput.contains("error: there are test failures")) { | public static void assertForTestFailures(String programOutput, String errMessage) throws BallerinaTestException {
if (programOutput.contains("error: there are test failures")) {
throw new BallerinaTestException("Test failed due to " + errMessage + " in test framework");
} else if (programOutput.contains("error: compilation contains errors")) {
throw new BallerinaTestException("Test failed due to a compilation error with following output\n" +
programOutput);
}
} | class AssertionUtils {
} | class AssertionUtils {
} |
```suggestion // as "." are ignored. This is to be consistent with the "ballerina test" command, which only executes tests ``` | public void execute(BuildContext buildContext) {
Path sourceRootPath = buildContext.get(BuildContextField.SOURCE_ROOT);
Map<BLangPackage, TestarinaClassLoader> programFileMap = new HashMap<>();
List<BLangPackage> moduleBirMap = buildContext.getModules();
for (BLangPackage bLangPackage : moduleBirMap) {
PackageID packageID = bLangPackage.packageID;
if (!buildContext.moduleDependencyPathMap.containsKey(packageID)) {
continue;
}
Path jarPath = buildContext.getTestJarPathFromTargetCache(packageID);
Path modulejarPath = buildContext.getJarPathFromTargetCache(packageID);
if (Files.notExists(jarPath)) {
jarPath = modulejarPath;
}
HashSet<Path> moduleDependencies = buildContext.moduleDependencyPathMap.get(packageID).platformLibs;
HashSet<Path> dependencyJarPaths = new HashSet<>(moduleDependencies);
if (bLangPackage.containsTestablePkg()) {
for (BLangTestablePackage testablePackage : bLangPackage.getTestablePkgs()) {
updateDependencyJarPaths(testablePackage.symbol.imports, buildContext, dependencyJarPaths);
}
}
TestarinaClassLoader classLoader = new TestarinaClassLoader(jarPath, dependencyJarPaths);
programFileMap.put(bLangPackage, classLoader);
}
if (programFileMap.size() > 0) {
TesterinaUtils.listTestGroups(sourceRootPath, programFileMap, buildContext.out(), buildContext.err());
}
} | public void execute(BuildContext buildContext) {
Path sourceRootPath = buildContext.get(BuildContextField.SOURCE_ROOT);
Map<BLangPackage, TestarinaClassLoader> programFileMap = new HashMap<>();
List<BLangPackage> moduleBirMap = buildContext.getModules();
for (BLangPackage bLangPackage : moduleBirMap) {
PackageID packageID = bLangPackage.packageID;
if (!buildContext.moduleDependencyPathMap.containsKey(packageID)) {
continue;
}
Path jarPath = buildContext.getTestJarPathFromTargetCache(packageID);
Path modulejarPath = buildContext.getJarPathFromTargetCache(packageID);
if (Files.notExists(jarPath)) {
jarPath = modulejarPath;
}
HashSet<Path> moduleDependencies = buildContext.moduleDependencyPathMap.get(packageID).platformLibs;
HashSet<Path> dependencyJarPaths = new HashSet<>(moduleDependencies);
if (bLangPackage.containsTestablePkg()) {
for (BLangTestablePackage testablePackage : bLangPackage.getTestablePkgs()) {
updateDependencyJarPaths(testablePackage.symbol.imports, buildContext, dependencyJarPaths);
}
}
TestarinaClassLoader classLoader = new TestarinaClassLoader(jarPath, dependencyJarPaths);
programFileMap.put(bLangPackage, classLoader);
}
if (programFileMap.size() > 0) {
TesterinaUtils.listTestGroups(sourceRootPath, programFileMap, buildContext.out(), buildContext.err());
}
} | class ListTestGroupsTask implements Task {
@Override
} | class ListTestGroupsTask implements Task {
@Override
} | |
Normally, Context, like the name describes, does not do actions | public Collection<Integer> transform(Transformation<?> transformation) {
return streamGraphGenerator.transform(transformation);
} | return streamGraphGenerator.transform(transformation); | public Collection<Integer> transform(Transformation<?> transformation) {
return streamGraphGenerator.transform(transformation);
} | class ContextImpl implements TransformationTranslator.Context {
private final StreamGraphGenerator streamGraphGenerator;
private final StreamGraph streamGraph;
private final String slotSharingGroup;
private final ReadableConfig config;
public ContextImpl(
final StreamGraphGenerator streamGraphGenerator,
final StreamGraph streamGraph,
final String slotSharingGroup,
final ReadableConfig config) {
this.streamGraphGenerator = checkNotNull(streamGraphGenerator);
this.streamGraph = checkNotNull(streamGraph);
this.slotSharingGroup = checkNotNull(slotSharingGroup);
this.config = checkNotNull(config);
}
@Override
public StreamGraph getStreamGraph() {
return streamGraph;
}
@Override
public Collection<Integer> getStreamNodeIds(final Transformation<?> transformation) {
checkNotNull(transformation);
final Collection<Integer> ids =
streamGraphGenerator.alreadyTransformed.get(transformation);
checkState(
ids != null,
"Parent transformation \"" + transformation + "\" has not been transformed.");
return ids;
}
@Override
public String getSlotSharingGroup() {
return slotSharingGroup;
}
@Override
public long getDefaultBufferTimeout() {
return streamGraphGenerator.defaultBufferTimeout;
}
@Override
public ReadableConfig getGraphGeneratorConfig() {
return config;
}
@Override
} | class ContextImpl implements TransformationTranslator.Context {
private final StreamGraphGenerator streamGraphGenerator;
private final StreamGraph streamGraph;
private final String slotSharingGroup;
private final ReadableConfig config;
public ContextImpl(
final StreamGraphGenerator streamGraphGenerator,
final StreamGraph streamGraph,
final String slotSharingGroup,
final ReadableConfig config) {
this.streamGraphGenerator = checkNotNull(streamGraphGenerator);
this.streamGraph = checkNotNull(streamGraph);
this.slotSharingGroup = checkNotNull(slotSharingGroup);
this.config = checkNotNull(config);
}
@Override
public StreamGraph getStreamGraph() {
return streamGraph;
}
@Override
public Collection<Integer> getStreamNodeIds(final Transformation<?> transformation) {
checkNotNull(transformation);
final Collection<Integer> ids =
streamGraphGenerator.alreadyTransformed.get(transformation);
checkState(
ids != null,
"Parent transformation \"" + transformation + "\" has not been transformed.");
return ids;
}
@Override
public String getSlotSharingGroup() {
return slotSharingGroup;
}
@Override
public long getDefaultBufferTimeout() {
return streamGraphGenerator.defaultBufferTimeout;
}
@Override
public ReadableConfig getGraphGeneratorConfig() {
return config;
}
@Override
} |
Would it be possible to add test case for bw compatible code path? | public Params decode(InputStream inStream) throws IOException {
String prefix = STRING_CODER.decode(inStream);
String shardTemplate = STRING_CODER.decode(inStream);
String suffix = STRING_CODER.decode(inStream);
ResourceId baseFilename;
if (inStream.available() > 0) {
baseFilename = FileSystems.matchNewResource(prefix, BOOLEAN_CODER.decode(inStream));
} else {
baseFilename = FileBasedSink.convertToFileResourceIfPossible(prefix);
}
return new Params()
.withBaseFilename(baseFilename)
.withShardTemplate(shardTemplate)
.withSuffix(suffix);
} | baseFilename = FileBasedSink.convertToFileResourceIfPossible(prefix); | public Params decode(InputStream inStream) throws IOException {
ResourceId prefix =
FileBasedSink.convertToFileResourceIfPossible(STRING_CODER.decode(inStream));
String shardTemplate = STRING_CODER.decode(inStream);
String suffix = STRING_CODER.decode(inStream);
return new Params()
.withBaseFilename(prefix)
.withShardTemplate(shardTemplate)
.withSuffix(suffix);
} | class ParamsCoder extends AtomicCoder<Params> {
private static final ParamsCoder INSTANCE = new ParamsCoder();
private static final Coder<String> STRING_CODER = StringUtf8Coder.of();
private static final Coder<Boolean> BOOLEAN_CODER = BooleanCoder.of();
public static ParamsCoder of() {
return INSTANCE;
}
@Override
public void encode(Params value, OutputStream outStream) throws IOException {
if (value == null) {
throw new CoderException("cannot encode a null value");
}
STRING_CODER.encode(value.baseFilename.get().toString(), outStream);
STRING_CODER.encode(value.shardTemplate, outStream);
STRING_CODER.encode(value.suffix, outStream);
BOOLEAN_CODER.encode(value.baseFilename.get().isDirectory(), outStream);
}
@Override
} | class ParamsCoder extends AtomicCoder<Params> {
private static final ParamsCoder INSTANCE = new ParamsCoder();
private static final Coder<String> STRING_CODER = StringUtf8Coder.of();
public static ParamsCoder of() {
return INSTANCE;
}
@Override
public void encode(Params value, OutputStream outStream) throws IOException {
if (value == null) {
throw new CoderException("cannot encode a null value");
}
STRING_CODER.encode(value.baseFilename.get().toString(), outStream);
STRING_CODER.encode(value.shardTemplate, outStream);
STRING_CODER.encode(value.suffix, outStream);
}
@Override
} |
I think we can dlog here since rest param can only be of array type? | public void visit(BLangTupleVarRef varRefExpr) {
List<BType> results = new ArrayList<>();
for (int i = 0; i < varRefExpr.expressions.size(); i++) {
((BLangVariableReference) varRefExpr.expressions.get(i)).lhsVar = true;
results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType));
}
BTupleType actualType = new BTupleType(results);
if (varRefExpr.restParam != null) {
BLangExpression restExpr = (BLangExpression) varRefExpr.restParam;
((BLangVariableReference) restExpr).lhsVar = true;
BType checkedType = checkExpr(restExpr, env, symTable.noType);
if (checkedType.tag == TypeTags.ARRAY) {
actualType.restType = ((BArrayType) checkedType).eType;
} else {
actualType.restType = checkedType;
}
}
resultType = types.checkType(varRefExpr, actualType, expType);
} | actualType.restType = checkedType; | public void visit(BLangTupleVarRef varRefExpr) {
List<BType> results = new ArrayList<>();
for (int i = 0; i < varRefExpr.expressions.size(); i++) {
((BLangVariableReference) varRefExpr.expressions.get(i)).lhsVar = true;
results.add(checkExpr(varRefExpr.expressions.get(i), env, symTable.noType));
}
BTupleType actualType = new BTupleType(results);
if (varRefExpr.restParam != null) {
BLangExpression restExpr = (BLangExpression) varRefExpr.restParam;
((BLangVariableReference) restExpr).lhsVar = true;
BType checkedType = checkExpr(restExpr, env, symTable.noType);
if (checkedType.tag != TypeTags.ARRAY) {
dlog.error(varRefExpr.pos, DiagnosticCode.INVALID_TYPE_FOR_REST_DESCRIPTOR, checkedType);
resultType = symTable.semanticError;
return;
}
actualType.restType = ((BArrayType) checkedType).eType;
}
resultType = types.checkType(varRefExpr, actualType, expType);
} | class TypeChecker extends BLangNodeVisitor {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY =
new CompilerContext.Key<>();
private static final String TABLE_TNAME = "table";
private Names names;
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private SymbolResolver symResolver;
private Types types;
private BLangDiagnosticLog dlog;
private SymbolEnv env;
private boolean isTypeChecked;
private TypeNarrower typeNarrower;
private TypeParamAnalyzer typeParamAnalyzer;
/**
* Expected types or inherited types.
*/
private BType expType;
private BType resultType;
private DiagnosticCode diagCode;
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeNarrower = TypeNarrower.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) {
return checkExpr(expr, env, expType, DiagnosticCode.INCOMPATIBLE_TYPES);
}
/**
* Check the given list of expressions against the given expected types.
*
* @param exprs list of expressions to be analyzed
* @param env current symbol environment
* @param expType expected type
* @return the actual types of the given list of expressions
*/
public List<BType> checkExprs(List<BLangExpression> exprs, SymbolEnv env, BType expType) {
List<BType> resTypes = new ArrayList<>(exprs.size());
for (BLangExpression expr : exprs) {
resTypes.add(checkExpr(expr, env, expType));
}
return resTypes;
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {
if (expr.typeChecked) {
return expr.type;
}
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
this.expType = expType;
this.isTypeChecked = true;
expr.accept(this);
expr.type = resultType;
expr.typeChecked = isTypeChecked;
this.env = prevEnv;
this.expType = preExpType;
this.diagCode = preDiagCode;
return resultType;
}
public void visit(BLangLiteral literalExpr) {
BType literalType = setLiteralValueAndGetType(literalExpr, expType);
if (literalType == symTable.semanticError || literalExpr.isFiniteContext) {
return;
}
resultType = types.checkType(literalExpr, literalType, expType);
}
private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) {
BType literalType = symTable.getTypeFromTag(literalExpr.type.tag);
Object literalValue = literalExpr.value;
literalExpr.isJSONContext = types.isJSONContext(expType);
if (literalType.tag == TypeTags.INT) {
if (expType.tag == TypeTags.FLOAT) {
literalType = symTable.floatType;
literalExpr.value = ((Long) literalValue).doubleValue();
} else if (expType.tag == TypeTags.DECIMAL) {
literalType = symTable.decimalType;
literalExpr.value = String.valueOf(literalValue);
} else if (expType.tag == TypeTags.BYTE) {
if (!types.isByteLiteralValue((Long) literalValue)) {
dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, literalType);
resultType = symTable.semanticError;
return resultType;
}
literalType = symTable.byteType;
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes();
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON ||
memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY)) {
return setLiteralValueAndGetType(literalExpr, symTable.intType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) {
return setLiteralValueAndGetType(literalExpr, symTable.byteType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) {
return setLiteralValueAndGetType(literalExpr, symTable.floatType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
}
} else if (literalType.tag == TypeTags.FLOAT) {
String literal = String.valueOf(literalValue);
String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal);
boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal);
if (expType.tag == TypeTags.DECIMAL) {
if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) {
dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, symTable.floatType);
resultType = symTable.semanticError;
return resultType;
}
literalType = symTable.decimalType;
literalExpr.value = numericLiteral;
} else if (expType.tag == TypeTags.FLOAT) {
literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral));
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (!isDiscriminatedFloat
&& literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
} else if (literalType.tag == TypeTags.DECIMAL) {
return decimalLiteral(literalValue, literalExpr, expType);
} else {
if (this.expType.tag == TypeTags.FINITE) {
boolean foundMember = types.isAssignableToFiniteType(this.expType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else if (this.expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.expType;
boolean foundMember = unionType.getMemberTypes()
.stream()
.anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr));
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
}
}
if (literalExpr.type.tag == TypeTags.BYTE_ARRAY) {
literalType = new BArrayType(symTable.byteType);
}
return literalType;
}
private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) {
Set<BType> memberTypes = expType.getMemberTypes();
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == desiredType.tag
|| memType.tag == TypeTags.JSON
|| memType.tag == TypeTags.ANYDATA
|| memType.tag == TypeTags.ANY)) {
return setLiteralValueAndGetType(literalExpr, desiredType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
return symTable.noType;
}
private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType,
int targetMemberTypeTag) {
return finiteType.valueSpace.stream()
.anyMatch(valueExpr -> valueExpr.type.tag == targetMemberTypeTag &&
types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr));
}
private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) {
String literal = String.valueOf(literalValue);
if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) {
dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, symTable.decimalType);
resultType = symTable.semanticError;
return resultType;
}
if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal);
resultType = symTable.decimalType;
return symTable.decimalType;
}
private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) {
types.setImplicitCastExpr(literalExpr, type, this.expType);
this.resultType = type;
literalExpr.isFiniteContext = true;
}
private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) {
List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(memFiniteType -> (BFiniteType) memFiniteType)
.collect(Collectors.toList());
if (finiteTypeMembers.isEmpty()) {
return symTable.semanticError;
}
int tag = matchType.tag;
Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>();
for (BFiniteType finiteType : finiteTypeMembers) {
matchedValueSpace.addAll(finiteType.valueSpace.stream()
.filter(expression -> expression.type.tag == tag)
.collect(Collectors.toSet()));
}
if (matchedValueSpace.isEmpty()) {
return symTable.semanticError;
}
return new BFiniteType(null, matchedValueSpace);
}
public void visit(BLangTableLiteral tableLiteral) {
if (expType.tag == symTable.semanticError.tag) {
return;
}
BType tableConstraint = ((BTableType) expType).getConstraint();
if (tableConstraint.tag == TypeTags.NONE) {
dlog.error(tableLiteral.pos, DiagnosticCode.TABLE_CANNOT_BE_CREATED_WITHOUT_CONSTRAINT);
return;
}
validateTableColumns(tableConstraint, tableLiteral);
checkExprs(tableLiteral.tableDataRows, this.env, tableConstraint);
resultType = types.checkType(tableLiteral, expType, symTable.noType);
}
private void validateTableColumns(BType tableConstraint, BLangTableLiteral tableLiteral) {
if (tableConstraint.tag != TypeTags.SEMANTIC_ERROR) {
List<String> columnNames = new ArrayList<>();
for (BField field : ((BRecordType) tableConstraint).fields) {
columnNames.add(field.getName().getValue());
if (!(field.type.tag == TypeTags.INT || field.type.tag == TypeTags.STRING ||
field.type.tag == TypeTags.FLOAT || field.type.tag == TypeTags.DECIMAL ||
field.type.tag == TypeTags.XML || field.type.tag == TypeTags.JSON ||
field.type.tag == TypeTags.BOOLEAN || field.type.tag == TypeTags.ARRAY)) {
dlog.error(tableLiteral.pos, DiagnosticCode.FIELD_NOT_ALLOWED_WITH_TABLE_COLUMN,
field.name.value, field.type);
}
if (field.type.tag == TypeTags.ARRAY) {
BType arrayType = ((BArrayType) field.type).eType;
if (!(arrayType.tag == TypeTags.INT || arrayType.tag == TypeTags.FLOAT ||
arrayType.tag == TypeTags.DECIMAL || arrayType.tag == TypeTags.STRING ||
arrayType.tag == TypeTags.BOOLEAN || arrayType.tag == TypeTags.BYTE)) {
dlog.error(tableLiteral.pos, DiagnosticCode.FIELD_NOT_ALLOWED_WITH_TABLE_COLUMN,
field.name.value, field.type);
}
}
}
for (BLangTableLiteral.BLangTableColumn column : tableLiteral.columns) {
boolean contains = columnNames.contains(column.columnName);
if (!contains) {
dlog.error(column.pos, DiagnosticCode.UNDEFINED_TABLE_COLUMN, column.columnName, tableConstraint);
}
if (column.flagSet.contains(TableColumnFlag.PRIMARYKEY)) {
for (BField field : ((BRecordType) tableConstraint).fields) {
if (field.name.value.equals(column.columnName)) {
if (!(field.type.tag == TypeTags.INT || field.type.tag == TypeTags.STRING)) {
dlog.error(column.pos, DiagnosticCode.TYPE_NOT_ALLOWED_WITH_PRIMARYKEY,
column.columnName, field.type);
}
break;
}
}
}
}
}
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
BType actualType = symTable.semanticError;
if ((expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA || expType.tag == TypeTags.NONE)
&& listConstructor.exprs.isEmpty()) {
dlog.error(listConstructor.pos, DiagnosticCode.INVALID_LIST_CONSTRUCTOR, expType);
resultType = symTable.semanticError;
return;
}
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.JSON) {
checkExprs(listConstructor.exprs, this.env, expType);
actualType = expType;
} else if (expTypeTag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) expType;
if (arrayType.state == BArrayState.OPEN_SEALED) {
arrayType.size = listConstructor.exprs.size();
arrayType.state = BArrayState.CLOSED_SEALED;
} else if (arrayType.state != BArrayState.UNSEALED && arrayType.size != listConstructor.exprs.size()) {
dlog.error(listConstructor.pos,
DiagnosticCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size, listConstructor.exprs.size());
resultType = symTable.semanticError;
return;
}
checkExprs(listConstructor.exprs, this.env, arrayType.eType);
actualType = arrayType;
} else if (expTypeTag == TypeTags.UNION) {
Set<BType> expTypes = ((BUnionType) expType).getMemberTypes();
List<BType> matchedTypeList = expTypes.stream()
.filter(type -> type.tag == TypeTags.ARRAY || type.tag == TypeTags.TUPLE)
.collect(Collectors.toList());
if (matchedTypeList.isEmpty()) {
dlog.error(listConstructor.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, actualType);
} else if (matchedTypeList.size() == 1) {
actualType = matchedTypeList.get(0);
if (actualType.tag == TypeTags.ARRAY) {
checkExprs(listConstructor.exprs, this.env, ((BArrayType) actualType).eType);
} else {
BTupleType tupleType = (BTupleType) actualType;
List<BType> results = new ArrayList<>();
BType restType = null;
for (int i = 0; i < listConstructor.exprs.size(); i++) {
BType expType, actType;
if (i < tupleType.tupleTypes.size()) {
expType = tupleType.tupleTypes.get(i);
actType = checkExpr(listConstructor.exprs.get(i), env, expType);
results.add(expType.tag != TypeTags.NONE ? expType : actType);
} else {
restType = checkExpr(listConstructor.exprs.get(i), env, tupleType.restType);
}
}
actualType = new BTupleType(results);
((BTupleType) actualType).restType = restType;
}
} else {
actualType = checkArrayLiteralExpr(listConstructor);
}
} else if (expTypeTag == TypeTags.TYPEDESC) {
List<BType> results = new ArrayList<>();
listConstructor.isTypedescExpr = true;
for (int i = 0; i < listConstructor.exprs.size(); i++) {
results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType));
}
List<BType> actualTypes = new ArrayList<>();
for (int i = 0; i < listConstructor.exprs.size(); i++) {
final BLangExpression expr = listConstructor.exprs.get(i);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(results.get(i));
}
}
if (actualTypes.size() == 1) {
listConstructor.typedescType = actualTypes.get(0);
} else {
listConstructor.typedescType = new BTupleType(actualTypes);
}
resultType = new BTypedescType(listConstructor.typedescType, null);
return;
} else if (expTypeTag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) this.expType;
if ((tupleType.restType != null && (tupleType.tupleTypes.size() > listConstructor.exprs.size()))
|| (tupleType.restType == null && tupleType.tupleTypes.size() != listConstructor.exprs.size())) {
dlog.error(listConstructor.pos, DiagnosticCode.SYNTAX_ERROR,
"tuple and expression size does not match");
return;
}
List<BType> expTypes = tupleType.tupleTypes;
List<BType> results = new ArrayList<>();
BType restType = null;
for (int i = 0; i < listConstructor.exprs.size(); i++) {
BType expType, actType;
if (i < expTypes.size()) {
expType = expTypes.get(i);
actType = checkExpr(listConstructor.exprs.get(i), env, expType);
results.add(expType.tag != TypeTags.NONE ? expType : actType);
} else {
restType = checkExpr(listConstructor.exprs.get(i), env, tupleType.restType);
}
}
actualType = new BTupleType(results);
((BTupleType) actualType).restType = restType;
} else if (listConstructor.exprs.size() > 1) {
List<BType> narrowTypes = new ArrayList<>();
for (int i = 0; i < listConstructor.exprs.size(); i++) {
narrowTypes.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType));
}
Set<BType> narrowTypesSet = new LinkedHashSet<>(narrowTypes);
LinkedHashSet<BType> broadTypesSet = new LinkedHashSet<>();
BType[] uniqueNarrowTypes = narrowTypesSet.toArray(new BType[0]);
BType broadType;
for (BType t1 : uniqueNarrowTypes) {
broadType = t1;
for (BType t2 : uniqueNarrowTypes) {
if (types.isAssignable(t2, t1)) {
broadType = t1;
} else if (types.isAssignable(t1, t2)) {
broadType = t2;
}
}
broadTypesSet.add(broadType);
}
BType eType;
if (broadTypesSet.size() > 1) {
eType = BUnionType.create(null, broadTypesSet);
} else {
eType = broadTypesSet.toArray(new BType[0])[0];
}
BArrayType arrayType = new BArrayType(eType);
checkExprs(listConstructor.exprs, this.env, arrayType.eType);
actualType = arrayType;
} else if (expTypeTag != TypeTags.SEMANTIC_ERROR) {
actualType = checkArrayLiteralExpr(listConstructor);
}
resultType = types.checkType(listConstructor, actualType, expType);
}
private BType checkArrayLiteralExpr(BLangListConstructorExpr listConstructorExpr) {
Set<BType> expTypes;
if (expType.tag == TypeTags.UNION) {
expTypes = ((BUnionType) expType).getMemberTypes();
} else {
expTypes = new LinkedHashSet<>();
expTypes.add(expType);
}
BType actualType = symTable.noType;
List<BType> listCompatibleTypes = new ArrayList<>();
for (BType type : expTypes) {
if (type.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) type;
if (checkTupleType(listConstructorExpr, tupleType)) {
listCompatibleTypes.add(tupleType);
}
} else {
BType[] uniqueExprTypes = checkArrayExpr(listConstructorExpr, this.env);
BType arrayLiteralType;
if (uniqueExprTypes.length == 0) {
arrayLiteralType = symTable.anyType;
} else if (uniqueExprTypes.length == 1) {
arrayLiteralType = uniqueExprTypes[0];
} else {
BType superType = uniqueExprTypes[0];
for (int i = 1; i < uniqueExprTypes.length; i++) {
if (types.isAssignable(superType, uniqueExprTypes[i])) {
superType = uniqueExprTypes[i];
} else if (!types.isAssignable(uniqueExprTypes[i], superType)) {
superType = symTable.anyType;
break;
}
}
arrayLiteralType = superType;
}
if (arrayLiteralType.tag != TypeTags.SEMANTIC_ERROR) {
if (type.tag == TypeTags.ARRAY && ((BArrayType) type).state != BArrayState.UNSEALED) {
actualType = new BArrayType(arrayLiteralType, null,
((BArrayType) type).state == BArrayState.CLOSED_SEALED
? listConstructorExpr.exprs.size() : ((BArrayType) type).size,
((BArrayType) type).state);
} else {
if (type.tag == TypeTags.ARRAY
&& types.isAssignable(arrayLiteralType, ((BArrayType) type).eType)) {
arrayLiteralType = ((BArrayType) type).eType;
}
actualType = new BArrayType(arrayLiteralType);
}
listCompatibleTypes.addAll(getListCompatibleTypes(type, actualType));
}
}
}
if (listCompatibleTypes.isEmpty()) {
dlog.error(listConstructorExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, actualType);
actualType = symTable.semanticError;
} else if (listCompatibleTypes.size() > 1) {
dlog.error(listConstructorExpr.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType);
actualType = symTable.semanticError;
} else if (listCompatibleTypes.get(0).tag == TypeTags.ANY) {
dlog.error(listConstructorExpr.pos, DiagnosticCode.INVALID_ARRAY_LITERAL, expType);
actualType = symTable.semanticError;
} else if (listCompatibleTypes.get(0).tag == TypeTags.ARRAY) {
checkExpr(listConstructorExpr, this.env, listCompatibleTypes.get(0));
} else if (listCompatibleTypes.get(0).tag == TypeTags.TUPLE) {
actualType = listCompatibleTypes.get(0);
setTupleType(listConstructorExpr, actualType);
}
return actualType;
}
private BType[] checkArrayExpr(BLangListConstructorExpr expr, SymbolEnv env) {
List<BType> types = new ArrayList<>();
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
this.env = env;
this.expType = symTable.noType;
for (BLangExpression e : expr.exprs) {
e.accept(this);
types.add(resultType);
}
this.env = prevEnv;
this.expType = preExpType;
LinkedHashSet<BType> typesSet = new LinkedHashSet<>(types);
return typesSet.toArray(new BType[0]);
}
private boolean checkTupleType(BLangExpression expression, BType type) {
if (type.tag == TypeTags.TUPLE && expression.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR
|| expression.getKind() == NodeKind.TUPLE_LITERAL_EXPR) {
BTupleType tupleType = (BTupleType) type;
BLangListConstructorExpr tupleExpr = (BLangListConstructorExpr) expression;
if (tupleType.restType == null && tupleType.tupleTypes.size() != tupleExpr.exprs.size()) {
return false;
}
for (int i = 0; i < tupleExpr.exprs.size(); i++) {
BLangExpression expr = tupleExpr.exprs.get(i);
if (i < tupleType.tupleTypes.size()) {
if (!checkTupleType(expr, tupleType.tupleTypes.get(i))) {
return false;
}
} else {
if (tupleType.restType != null) {
if (!checkTupleType(expr, tupleType.restType)) {
return false;
}
} else {
return false;
}
}
}
return true;
} else {
return types.isAssignable(checkExpr(expression, env), type);
}
}
private void setTupleType(BLangExpression expression, BType type) {
if (type.tag == TypeTags.TUPLE && expression.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR
|| expression.getKind() == NodeKind.TUPLE_LITERAL_EXPR) {
BTupleType tupleType = (BTupleType) type;
BLangListConstructorExpr tupleExpr = (BLangListConstructorExpr) expression;
tupleExpr.type = type;
if (tupleType.tupleTypes.size() == tupleExpr.exprs.size()) {
for (int i = 0; i < tupleExpr.exprs.size(); i++) {
setTupleType(tupleExpr.exprs.get(i), tupleType.tupleTypes.get(i));
}
}
} else {
checkExpr(expression, env);
}
}
public void visit(BLangRecordLiteral recordLiteral) {
BType actualType = symTable.semanticError;
int expTypeTag = expType.tag;
BType originalExpType = expType;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.ANY) {
expType = symTable.mapType;
}
if (expTypeTag == TypeTags.ANY || expTypeTag == TypeTags.ANYDATA || expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticCode.INVALID_RECORD_LITERAL, originalExpType);
resultType = symTable.semanticError;
return;
}
List<BType> matchedTypeList = getRecordCompatibleType(expType, recordLiteral);
if (matchedTypeList.isEmpty()) {
dlog.error(recordLiteral.pos, DiagnosticCode.INVALID_LITERAL_FOR_TYPE, expType);
recordLiteral.keyValuePairs
.forEach(keyValuePair -> checkRecLiteralKeyValue(keyValuePair, symTable.errorType));
} else if (matchedTypeList.size() > 1) {
dlog.error(recordLiteral.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType);
recordLiteral.keyValuePairs
.forEach(keyValuePair -> checkRecLiteralKeyValue(keyValuePair, symTable.errorType));
} else {
recordLiteral.keyValuePairs
.forEach(keyValuePair -> checkRecLiteralKeyValue(keyValuePair, matchedTypeList.get(0)));
actualType = matchedTypeList.get(0);
}
resultType = types.checkType(recordLiteral, actualType, expType);
if (recordLiteral.type.tag == TypeTags.RECORD) {
checkMissingRequiredFields((BRecordType) recordLiteral.type, recordLiteral.keyValuePairs,
recordLiteral.pos);
}
}
private List<BType> getRecordCompatibleType(BType bType, BLangRecordLiteral recordLiteral) {
if (bType.tag == TypeTags.UNION) {
Set<BType> expTypes = ((BUnionType) bType).getMemberTypes();
List<BType> possibleTypes =
expTypes.stream()
.filter(type -> type.tag == TypeTags.MAP ||
(type.tag == TypeTags.RECORD &&
(!((BRecordType) type).sealed ||
isCompatibleClosedRecordLiteral((BRecordType) type,
recordLiteral))))
.collect(Collectors.toList());
if (expTypes.stream().anyMatch(type -> type.tag == TypeTags.JSON) &&
expTypes.stream().noneMatch(type -> type.tag == TypeTags.MAP &&
((BMapType) type).constraint.tag == TypeTags.JSON)) {
possibleTypes.add(new BMapType(TypeTags.MAP, symTable.jsonType, null));
}
return possibleTypes;
}
switch (expType.tag) {
case TypeTags.JSON:
return Collections.singletonList(new BMapType(TypeTags.MAP, symTable.jsonType, null));
case TypeTags.MAP:
case TypeTags.RECORD:
return Collections.singletonList(bType);
default:
return Collections.emptyList();
}
}
private boolean isCompatibleClosedRecordLiteral(BRecordType bRecordType, BLangRecordLiteral recordLiteral) {
if (!hasRequiredRecordFields(recordLiteral.getKeyValuePairs(), bRecordType)) {
return false;
}
for (BLangRecordKeyValue literalKeyValuePair : recordLiteral.getKeyValuePairs()) {
boolean matched = false;
for (BField field : bRecordType.getFields()) {
matched = field.getName().getValue().equals(getFieldName(literalKeyValuePair.key));
if (matched) {
break;
}
}
if (!matched) {
return false;
}
}
return true;
}
private void checkMissingRequiredFields(BRecordType type, List<BLangRecordKeyValue> keyValuePairs,
DiagnosticPos pos) {
type.fields.forEach(field -> {
boolean hasField = keyValuePairs.stream()
.anyMatch(keyVal -> field.name.value.equals(getFieldName(keyVal.key)));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private String getFieldName(BLangRecordKey key) {
BLangExpression keyExpression = key.expr;
if (key.computedKey) {
return null;
}
if (keyExpression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyExpression).variableName.value;
} else if (keyExpression.getKind() == NodeKind.LITERAL) {
BLangLiteral literal = (BLangLiteral) keyExpression;
if (literal.type.tag != TypeTags.STRING) {
return null;
}
return (String) literal.value;
}
return null;
}
private boolean hasRequiredRecordFields(List<BLangRecordKeyValue> keyValuePairs, BRecordType targetRecType) {
for (BField field : targetRecType.fields) {
boolean hasField = keyValuePairs.stream()
.filter(keyVal -> keyVal.key.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF)
.anyMatch(keyVal -> field.name.value
.equals(((BLangSimpleVarRef) keyVal.key.expr).variableName.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
return false;
}
}
return true;
}
private List<BType> getListCompatibleTypes(BType expType, BType actualType) {
Set<BType> expTypes =
expType.tag == TypeTags.UNION ? ((BUnionType) expType).getMemberTypes() : new LinkedHashSet<BType>() {
{
add(expType);
}
};
return expTypes.stream()
.filter(type -> types.isAssignable(actualType, type) ||
type.tag == TypeTags.NONE ||
type.tag == TypeTags.ANY)
.collect(Collectors.toList());
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
if (workerFlushExpr.workerIdentifier != null) {
String workerName = workerFlushExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(workerFlushExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName);
}
}
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(workerFlushExpr, actualType, expType);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
BSymbol symbol = symResolver.lookupSymbol(env, names.fromIdNode(syncSendExpr.workerIdentifier),
SymTag.VARIABLE);
if (symTable.notFoundSymbol.equals(symbol)) {
syncSendExpr.workerType = symTable.semanticError;
} else {
syncSendExpr.workerType = symbol.type;
}
syncSendExpr.env = this.env;
checkExpr(syncSendExpr.expr, this.env);
if (!types.isAnydata(syncSendExpr.expr.type)) {
this.dlog.error(syncSendExpr.pos, DiagnosticCode.INVALID_TYPE_FOR_SEND, syncSendExpr.expr.type);
}
String workerName = syncSendExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName);
}
if (expType == symTable.noType) {
resultType = BUnionType.create(null, symTable.errorType, symTable.nilType);
} else {
resultType = expType;
}
}
@Override
public void visit(BLangWorkerReceive workerReceiveExpr) {
BSymbol symbol = symResolver.lookupSymbol(env, names.fromIdNode(workerReceiveExpr.workerIdentifier),
SymTag.VARIABLE);
if (workerReceiveExpr.isChannel) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticCode.UNDEFINED_ACTION);
return;
}
workerReceiveExpr.env = this.env;
if (symTable.notFoundSymbol.equals(symbol)) {
workerReceiveExpr.workerType = symTable.semanticError;
} else {
workerReceiveExpr.workerType = symbol.type;
}
if (symTable.noType == this.expType) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION);
}
workerReceiveExpr.type = this.expType;
resultType = this.expType;
}
private boolean workerExists(SymbolEnv env, String workerName) {
if (workerName.equals(DEFAULT_WORKER_NAME)) {
return true;
}
BSymbol symbol = this.symResolver.lookupSymbol(env, new Name(workerName), SymTag.VARIABLE);
return symbol != this.symTable.notFoundSymbol &&
symbol.type.tag == TypeTags.FUTURE &&
((BFutureType) symbol.type).workerDerivative;
}
public void visit(BLangSimpleVarRef varRefExpr) {
BType actualType = symTable.semanticError;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
if (varRefExpr.lhsVar) {
varRefExpr.type = this.symTable.anyType;
} else {
varRefExpr.type = this.symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED);
}
varRefExpr.symbol = new BVarSymbol(0, varName, env.enclPkg.symbol.pkgID, varRefExpr.type, env.scope.owner);
resultType = varRefExpr.type;
return;
}
varRefExpr.pkgSymbol = symResolver.resolveImportSymbol(varRefExpr.pos,
env, names.fromIdNode(varRefExpr.pkgAlias));
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupSymbolInPackage(varRefExpr.pos, env,
names.fromIdNode(varRefExpr.pkgAlias), varName, SymTag.VARIABLE_NAME);
if (symbol == symTable.notFoundSymbol && env.enclType != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(env.enclType.type.tsymbol.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName,
env.enclType.type.tsymbol);
}
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSefReferences(varRefExpr.pos, env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
BLangInvokableNode encInvokable = env.enclInvokable;
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA) &&
!(symbol.owner instanceof BPackageSymbol) &&
!isFunctionArgument(varSym, encInvokable.requiredParams)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv,
symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(
new ClosureVarSymbol(resolvedSymbol, varRefExpr.pos));
}
}
if (env.node.getKind() == NodeKind.ARROW_EXPR && !(symbol.owner instanceof BPackageSymbol)) {
if (!isFunctionArgument(varSym, ((BLangArrowFunction) env.node).params)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name,
SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol) {
resolvedSymbol.closure = true;
((BLangArrowFunction) env.node).closureVarSymbols.add(
new ClosureVarSymbol(resolvedSymbol, varRefExpr.pos));
}
}
}
} else if ((symbol.tag & SymTag.TYPE) == SymTag.TYPE) {
actualType = new BTypedescType(symbol.type, null);
varRefExpr.symbol = symbol;
} else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
varRefExpr.symbol = symbol;
BType symbolType = symbol.type;
if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE ||
(expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream()
.anyMatch(memType -> memType.tag == TypeTags.FINITE &&
types.isAssignable(symbolType, memType)))) {
actualType = symbolType;
} else {
actualType = ((BConstantSymbol) symbol).literalType;
}
if (varRefExpr.lhsVar || varRefExpr.compoundAssignmentLhsVar) {
actualType = symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticCode.CANNOT_UPDATE_CONSTANT_VALUE);
}
} else {
dlog.error(varRefExpr.pos, DiagnosticCode.UNDEFINED_SYMBOL, varName.toString());
}
}
if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) {
dlog.error(varRefExpr.pos, DiagnosticCode.SEALED_ARRAY_TYPE_CAN_NOT_INFER_SIZE);
return;
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
List<BField> fields = new ArrayList<>();
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, Names.EMPTY, env.enclPkg.symbol.pkgID,
null, env.scope.owner);
boolean unresolvedReference = false;
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
((BLangVariableReference) recordRefField.variableReference).lhsVar = true;
checkExpr(recordRefField.variableReference, env);
if (((BLangVariableReference) recordRefField.variableReference).symbol == null ||
!isValidVariableReference(recordRefField.variableReference)) {
unresolvedReference = true;
continue;
}
BVarSymbol bVarSymbol = (BVarSymbol) ((BLangVariableReference) recordRefField.variableReference).symbol;
fields.add(new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos,
new BVarSymbol(0, names.fromIdNode(recordRefField.variableName), env.enclPkg.symbol.pkgID,
bVarSymbol.type, recordSymbol)));
}
if (varRefExpr.restParam != null) {
BLangExpression restParam = (BLangExpression) varRefExpr.restParam;
checkExpr(restParam, env);
unresolvedReference = !isValidVariableReference(restParam);
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BRecordType bRecordType = new BRecordType(recordSymbol);
bRecordType.fields = fields;
recordSymbol.type = bRecordType;
varRefExpr.symbol = new BVarSymbol(0, Names.EMPTY, env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner);
if (varRefExpr.restParam == null) {
bRecordType.sealed = true;
} else {
bRecordType.restFieldType = symTable.mapType;
}
resultType = bRecordType;
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
if (varRefExpr.reason != null) {
varRefExpr.reason.lhsVar = true;
checkExpr(varRefExpr.reason, env);
}
BErrorTypeSymbol errorTSymbol = Symbols.createErrorSymbol(0, Names.EMPTY, env.enclPkg.symbol.pkgID,
null, env.scope.owner);
boolean unresolvedReference = false;
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
BLangVariableReference refItem = (BLangVariableReference) detailItem.expr;
refItem.lhsVar = true;
checkExpr(refItem, env);
if (!isValidVariableReference(refItem)) {
unresolvedReference = true;
continue;
}
if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
dlog.error(detailItem.pos, DiagnosticCode.ERROR_BINDING_PATTERN_DOES_NOT_SUPPORT_FIELD_ACCESS);
unresolvedReference = true;
continue;
} else if (refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
dlog.error(detailItem.pos, DiagnosticCode.ERROR_BINDING_PATTERN_DOES_NOT_SUPPORT_INDEX_ACCESS);
unresolvedReference = true;
continue;
}
if (refItem.symbol == null) {
unresolvedReference = true;
continue;
}
}
if (varRefExpr.restVar != null) {
varRefExpr.restVar.lhsVar = true;
if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
checkExpr(varRefExpr.restVar, env);
unresolvedReference = unresolvedReference
|| varRefExpr.restVar.symbol == null
|| !isValidVariableReference(varRefExpr.restVar);
} else if (varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
unresolvedReference = checkErrorRestParamVarRef(varRefExpr, unresolvedReference);
}
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BType errorRefRestFieldType;
if (varRefExpr.restVar == null) {
errorRefRestFieldType = symTable.pureType;
} else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) {
errorRefRestFieldType = symTable.pureType;
} else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
errorRefRestFieldType = varRefExpr.restVar.type;
} else if (varRefExpr.restVar.type.tag == TypeTags.MAP) {
errorRefRestFieldType = ((BMapType) varRefExpr.restVar.type).constraint;
} else {
dlog.error(varRefExpr.restVar.pos, DiagnosticCode.INCOMPATIBLE_TYPES,
varRefExpr.restVar.type, symTable.detailType);
resultType = symTable.semanticError;
return;
}
BType errorDetailType = getCompatibleDetailType(errorRefRestFieldType);
resultType = new BErrorType(errorTSymbol, varRefExpr.reason.type, errorDetailType);
}
private BRecordType getCompatibleDetailType(BType errorRefRestFieldType) {
PackageID packageID = env.enclPkg.packageID;
BRecordTypeSymbol detailSymbol = new BRecordTypeSymbol(SymTag.RECORD, Flags.PUBLIC, Names.EMPTY,
packageID, null, env.scope.owner);
detailSymbol.scope = new Scope(env.scope.owner);
BRecordType detailType = new BRecordType(detailSymbol);
int flags = Flags.asMask(new HashSet<>(Lists.of(Flag.OPTIONAL, Flag.PUBLIC)));
BField messageField = new BField(Names.DETAIL_MESSAGE, null,
new BVarSymbol(flags, Names.DETAIL_MESSAGE, packageID, symTable.stringType, detailSymbol));
detailType.fields.add(messageField);
detailSymbol.scope.define(Names.DETAIL_MESSAGE, messageField.symbol);
BField causeField = new BField(Names.DETAIL_CAUSE, null,
new BVarSymbol(flags, Names.DETAIL_CAUSE, packageID, symTable.errorType, detailSymbol));
detailType.fields.add(causeField);
detailSymbol.scope.define(Names.DETAIL_CAUSE, causeField.symbol);
detailType.restFieldType = errorRefRestFieldType;
BInvokableType invokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initSymbol = Symbols.createFunctionSymbol(0, Names.INIT_FUNCTION_SUFFIX, packageID,
invokableType, detailSymbol, false);
detailSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initSymbol, invokableType);
detailSymbol.scope.define(initSymbol.name, initSymbol);
return detailType;
}
private boolean checkErrorRestParamVarRef(BLangErrorVarRef varRefExpr, boolean unresolvedReference) {
BLangAccessExpression accessExpression = (BLangAccessExpression) varRefExpr.restVar;
Name exprName = names.fromIdNode(((BLangSimpleVarRef) accessExpression.expr).variableName);
BSymbol fSym = symResolver.lookupSymbol(env, exprName, SymTag.VARIABLE);
if (fSym != null) {
if (fSym.type.getKind() == TypeKind.MAP) {
BType constraint = ((BMapType) fSym.type).constraint;
if (types.isAssignable(constraint, symTable.pureType)) {
varRefExpr.restVar.type = constraint;
} else {
varRefExpr.restVar.type = symTable.pureType;
}
} else {
throw new UnsupportedOperationException("rec field base access");
}
} else {
unresolvedReference = true;
}
return unresolvedReference;
}
@Override
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.OPEN_SEALED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclInvokable != null && env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.type.tag == symbol.type.tag));
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
((BLangVariableReference) fieldAccessExpr.expr).lhsVar = fieldAccessExpr.lhsVar;
((BLangVariableReference) fieldAccessExpr.expr).compoundAssignmentLhsVar =
fieldAccessExpr.compoundAssignmentLhsVar;
BType varRefType = getTypeOfExprInFieldAccess(fieldAccessExpr.expr);
BType actualType;
if (fieldAccessExpr.fieldKind == FieldKind.ALL && varRefType.tag != TypeTags.XML) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_GET_ALL_FIELDS, varRefType);
actualType = symTable.semanticError;
} else {
if (fieldAccessExpr.optionalFieldAccess) {
if (fieldAccessExpr.lhsVar || fieldAccessExpr.compoundAssignmentLhsVar) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS);
resultType = symTable.semanticError;
return;
}
actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType,
names.fromIdNode(fieldAccessExpr.field));
} else {
actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field));
}
}
resultType = types.checkType(fieldAccessExpr, actualType, this.expType);
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr) {
((BLangVariableReference) indexBasedAccessExpr.expr).lhsVar = indexBasedAccessExpr.lhsVar;
((BLangVariableReference) indexBasedAccessExpr.expr).compoundAssignmentLhsVar =
indexBasedAccessExpr.compoundAssignmentLhsVar;
checkExpr(indexBasedAccessExpr.expr, this.env, symTable.noType);
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr);
if (indexBasedAccessExpr.lhsVar) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.type = actualType;
resultType = actualType;
return;
}
this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType);
}
public void visit(BLangInvocation iExpr) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr);
return;
}
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(iExpr.pos, DiagnosticCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return;
}
final BType exprType = checkExpr(iExpr.expr, this.env, symTable.noType);
if (iExpr.actionInvocation) {
checkActionInvocationExpr(iExpr, exprType);
return;
}
BType varRefType = iExpr.expr.type;
switch (varRefType.tag) {
case TypeTags.OBJECT:
checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType);
break;
case TypeTags.RECORD:
boolean methodFound = checkFieldFunctionPointer(iExpr);
if (!methodFound) {
checkInLangLib(iExpr, varRefType);
}
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.SEMANTIC_ERROR:
break;
default:
checkInLangLib(iExpr, varRefType);
}
}
private void checkInLangLib(BLangInvocation iExpr, BType varRefType) {
boolean langLibMethodExists = checkLangLibMethodInvocationExpr(iExpr, varRefType);
if (!langLibMethodExists) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, iExpr.name.value);
resultType = symTable.semanticError;
}
}
private boolean checkFieldFunctionPointer(BLangInvocation iExpr) {
BType type = checkExpr(iExpr.expr, this.env);
if (type == symTable.semanticError) {
return false;
}
BSymbol funcSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(iExpr.name), type.tsymbol);
if (funcSymbol == symTable.notFoundSymbol) {
return false;
}
iExpr.symbol = funcSymbol;
iExpr.type = ((BInvokableSymbol) funcSymbol).retType;
checkInvocationParamAndReturnType(iExpr);
iExpr.functionPointerInvocation = true;
return true;
}
public void visit(BLangTypeInit cIExpr) {
if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticCode.INVALID_TYPE_NEW_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
} else {
actualType = expType;
}
if (actualType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
switch (actualType.tag) {
case TypeTags.OBJECT:
if ((actualType.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) {
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, actualType.tsymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return;
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.type = ((BInvokableSymbol) cIExpr.initInvocation.symbol).retType;
} else if (!cIExpr.initInvocation.argExprs.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.initInvocation.exprSymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return;
}
break;
case TypeTags.STREAM:
if (!cIExpr.initInvocation.argExprs.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.initInvocation.name);
resultType = symTable.semanticError;
return;
}
break;
case TypeTags.UNION:
List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType);
BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType);
cIExpr.initInvocation.type = symTable.nilType;
if (matchedType.tag == TypeTags.OBJECT
&& ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.type = ((BInvokableSymbol) cIExpr.initInvocation.symbol).retType;
actualType = matchedType;
break;
}
types.checkType(cIExpr, matchedType, expType);
cIExpr.type = matchedType;
resultType = matchedType;
return;
default:
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
resultType = symTable.semanticError;
return;
}
if (cIExpr.initInvocation.type == null) {
cIExpr.initInvocation.type = symTable.nilType;
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.type);
resultType = types.checkType(cIExpr, actualTypeInitType, expType);
}
private BType getObjectConstructorReturnType(BType objType, BType initRetType) {
if (initRetType.tag == TypeTags.UNION) {
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(objType);
retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes());
retTypeMembers.remove(symTable.nilType);
BUnionType unionType = BUnionType.create(null, retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0,
Names.EMPTY, env.enclPkg.symbol.pkgID, unionType,
env.scope.owner);
return unionType;
} else if (initRetType.tag == TypeTags.NIL) {
return objType;
}
return symTable.semanticError;
}
private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) {
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
List<BType> matchingLhsMemberTypes = new ArrayList<>();
for (BType memberType : lhsUnionType.getMemberTypes()) {
if (memberType.tag != TypeTags.OBJECT) {
continue;
}
if ((memberType.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) {
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, lhsUnionType.tsymbol);
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc;
if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) {
matchingLhsMemberTypes.add(memberType);
}
}
return matchingLhsMemberTypes;
}
private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) {
if (matchingLhsMembers.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
} else if (matchingLhsMembers.size() == 1) {
return matchingLhsMembers.get(0).tsymbol.type;
} else {
dlog.error(cIExpr.pos, DiagnosticCode.AMBIGUOUS_TYPES, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
}
}
private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) {
if (function == null) {
return invocationArguments.isEmpty();
}
if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) {
return true;
}
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
List<BLangExpression> unnamedArgs = new ArrayList<>();
for (BLangExpression argument : invocationArguments) {
if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) {
namedArgs.add((BLangNamedArgsExpression) argument);
} else {
unnamedArgs.add(argument);
}
}
if (!matchDefaultableParameters(function, namedArgs)) {
return false;
}
List<BVarSymbol> requiredParams = function.symbol.params.stream()
.filter(param -> !param.defaultableParam).collect(Collectors.toList());
if (requiredParams.size() > unnamedArgs.size()) {
return false;
}
if (function.symbol.restParam == null && requiredParams.size() != unnamedArgs.size()) {
return false;
}
if (function.symbol.restParam != null) {
BType restParamType = ((BArrayType) function.symbol.restParam.type).eType;
if (!restArgTypesMatch(unnamedArgs, requiredParams.size(), restParamType)) {
return false;
}
}
for (int i = 0, paramsSize = requiredParams.size(); i < paramsSize; i++) {
BVarSymbol param = requiredParams.get(i);
BLangExpression argument = unnamedArgs.get(i);
if (!types.isAssignable(argument.type, param.type)) {
return false;
}
}
return true;
}
private boolean restArgTypesMatch(List<BLangExpression> unnamedArgs, int requiredParamCount, BType restParamType) {
if (unnamedArgs.size() == requiredParamCount) {
return true;
}
List<BLangExpression> restArgs = unnamedArgs.subList(requiredParamCount, unnamedArgs.size());
for (BLangExpression restArg : restArgs) {
if (!types.isAssignable(restArg.type, restParamType)) {
return false;
}
}
return true;
}
private boolean matchDefaultableParameters(BAttachedFunction function, List<BLangNamedArgsExpression> namedArgs) {
List<BVarSymbol> defaultableParams = function.symbol.params.stream()
.filter(param -> param.defaultableParam)
.collect(Collectors.toList());
if (defaultableParams.size() < namedArgs.size()) {
return false;
}
int matchedParamterCount = 0;
for (BVarSymbol defaultableParam : defaultableParams) {
for (BLangNamedArgsExpression namedArg : namedArgs) {
if (!namedArg.name.value.equals(defaultableParam.name.value)) {
continue;
}
BType namedArgExprType = checkExpr(namedArg.expr, env);
if (types.isAssignable(defaultableParam.type, namedArgExprType)) {
matchedParamterCount++;
} else {
return false;
}
}
}
return namedArgs.size() == matchedParamterCount;
}
public void visit(BLangWaitForAllExpr waitForAllExpr) {
switch (expType.tag) {
case TypeTags.RECORD:
checkTypesForRecords(waitForAllExpr);
break;
case TypeTags.MAP:
checkTypesForMap(waitForAllExpr.keyValuePairs, ((BMapType) expType).constraint);
LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypesForMap.size() == 1) {
resultType = new BMapType(TypeTags.MAP,
memberTypesForMap.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap);
resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol);
break;
case TypeTags.NONE:
case TypeTags.ANY:
checkTypesForMap(waitForAllExpr.keyValuePairs, expType);
LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypes.size() == 1) {
resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintType = BUnionType.create(null, memberTypes);
resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol);
break;
default:
dlog.error(waitForAllExpr.pos, DiagnosticCode.INVALID_LITERAL_FOR_TYPE, expType);
resultType = symTable.semanticError;
break;
}
waitForAllExpr.type = resultType;
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.type, expType);
}
}
private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.type : keyVal.valueExpr.type;
if (bType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) bType).constraint);
} else {
memberTypes.add(bType);
}
}
return memberTypes;
}
private void checkTypesForMap(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs, BType expType) {
keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType));
}
private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs();
Map<String, BType> lhsFields = new HashMap<>();
((BRecordType) expType).getFields().forEach(field -> lhsFields.put(field.name.value, field.type));
if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) {
dlog.error(waitExpr.pos, DiagnosticCode.INVALID_LITERAL_FOR_TYPE, expType);
resultType = symTable.semanticError;
return;
}
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) {
String key = keyVal.key.value;
if (!lhsFields.containsKey(key)) {
if (((BRecordType) expType).sealed) {
dlog.error(waitExpr.pos, DiagnosticCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType);
resultType = symTable.semanticError;
} else {
BType restFieldType = ((BRecordType) expType).restFieldType;
checkWaitKeyValExpr(keyVal, restFieldType);
}
} else {
checkWaitKeyValExpr(keyVal, lhsFields.get(key));
}
}
checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos);
if (symTable.semanticError != resultType) {
resultType = expType;
}
}
private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs,
DiagnosticPos pos) {
type.fields.forEach(field -> {
boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) {
BLangExpression expr;
if (keyVal.keyExpr != null) {
BSymbol symbol = symResolver.lookupSymbol(env, names.fromIdNode
(((BLangSimpleVarRef) keyVal.keyExpr).variableName), SymTag.VARIABLE);
keyVal.keyExpr.type = symbol.type;
expr = keyVal.keyExpr;
} else {
expr = keyVal.valueExpr;
}
BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null);
checkExpr(expr, env, futureType);
}
public void visit(BLangTernaryExpr ternaryExpr) {
BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType);
SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env);
BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType);
SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env);
BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType);
if (condExprType == symTable.semanticError || thenType == symTable.semanticError ||
elseType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isAssignable(elseType, thenType)) {
resultType = thenType;
} else if (types.isAssignable(thenType, elseType)) {
resultType = elseType;
} else {
dlog.error(ternaryExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, thenType, elseType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
public void visit(BLangWaitExpr waitExpr) {
expType = new BFutureType(TypeTags.FUTURE, expType, null);
checkExpr(waitExpr.getExpression(), env, expType);
if (resultType.tag == TypeTags.UNION) {
LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>());
if (memberTypes.size() == 1) {
resultType = memberTypes.toArray(new BType[0])[0];
} else {
resultType = BUnionType.create(null, memberTypes);
}
} else if (resultType != symTable.semanticError) {
resultType = ((BFutureType) resultType).constraint;
}
waitExpr.type = resultType;
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitExpr, waitExpr.type, ((BFutureType) expType).constraint);
}
}
private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) {
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) memberType).constraint);
} else {
memberTypes.add(memberType);
}
}
return memberTypes;
}
@Override
public void visit(BLangTrapExpr trapExpr) {
boolean firstVisit = trapExpr.expr.type == null;
BType actualType;
BType exprType = checkExpr(trapExpr.expr, env, expType);
boolean definedWithVar = expType == symTable.noType;
if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = trapExpr.type;
exprType = trapExpr.expr.type;
}
}
if (expType == symTable.semanticError) {
actualType = symTable.semanticError;
} else {
LinkedHashSet<BType> resultTypes = new LinkedHashSet<>();
if (exprType.tag == TypeTags.UNION) {
resultTypes.addAll(((BUnionType) exprType).getMemberTypes());
} else {
resultTypes.add(exprType);
}
resultTypes.add(symTable.errorType);
actualType = BUnionType.create(null, resultTypes);
}
resultType = types.checkType(trapExpr, actualType, expType);
if (definedWithVar && resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.type, resultType);
}
}
public void visit(BLangBinaryExpr binaryExpr) {
if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType);
BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType);
if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
resultType = BUnionType.create(null, lhsResultType, rhsResultType);
return;
}
checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr);
SymbolEnv rhsExprEnv;
BType lhsType = checkExpr(binaryExpr.lhsExpr, env);
if (binaryExpr.opKind == OperatorKind.AND) {
rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env);
} else if (binaryExpr.opKind == OperatorKind.OR) {
rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env);
} else {
rhsExprEnv = env;
}
BType rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType, binaryExpr);
}
if (opSymbol == symTable.notFoundSymbol) {
dlog.error(binaryExpr.pos, DiagnosticCode.BINARY_OP_INCOMPATIBLE_TYPES,
binaryExpr.opKind, lhsType, rhsType);
} else {
if ((binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL) &&
(couldHoldTableValues(lhsType, new ArrayList<>()) &&
couldHoldTableValues(rhsType, new ArrayList<>()))) {
dlog.error(binaryExpr.pos, DiagnosticCode.EQUALITY_NOT_YET_SUPPORTED, TABLE_TNAME);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
resultType = types.checkType(binaryExpr, actualType, expType);
}
private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) {
if (expType.tag != TypeTags.DECIMAL) {
return;
}
switch (binaryExpr.opKind) {
case ADD:
case SUB:
case MUL:
case DIV:
checkExpr(binaryExpr.lhsExpr, env, expType);
checkExpr(binaryExpr.rhsExpr, env, expType);
break;
default:
break;
}
}
public void visit(BLangElvisExpr elvisExpr) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, env);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream()
.filter(type -> type.tag != TypeTags.NIL)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (memberTypes.size() == 1) {
actualType = memberTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, memberTypes);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticCode.OPERATOR_NOT_SUPPORTED,
OperatorKind.ELVIS, lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType,
DiagnosticCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isSameType(rhsReturnType, lhsReturnType)) {
resultType = lhsReturnType;
} else {
dlog.error(elvisExpr.rhsExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, lhsReturnType, rhsReturnType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
@Override
public void visit(BLangGroupExpr groupExpr) {
resultType = checkExpr(groupExpr.expression, env, expType);
}
public void visit(BLangTypedescExpr accessExpr) {
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env);
int resolveTypeTag = accessExpr.resolvedType.tag;
final BType actualType;
if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) {
actualType = new BTypedescType(accessExpr.resolvedType, null);
} else {
actualType = accessExpr.resolvedType;
}
resultType = types.checkType(accessExpr, actualType, expType);
}
public void visit(BLangUnaryExpr unaryExpr) {
BType exprType;
BType actualType = symTable.semanticError;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = exprType;
}
} else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = new BTypedescType(exprType, null);
}
} else {
exprType = OperatorKind.ADD.equals(unaryExpr.operator) ? checkExpr(unaryExpr.expr, env, expType) :
checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
}
resultType = types.checkType(unaryExpr, actualType, expType);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
BType actualType = symTable.semanticError;
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
BType expType = checkExpr(conversionExpr.expr, env, this.expType);
resultType = expType;
return;
}
BType targetType = symResolver.resolveTypeNode(conversionExpr.typeNode, env);
conversionExpr.targetType = targetType;
BType expType = conversionExpr.expr.getKind() == NodeKind.RECORD_LITERAL_EXPR ? targetType : symTable.noType;
BType sourceType = checkExpr(conversionExpr.expr, env, expType);
if (targetType.tag == TypeTags.FUTURE) {
dlog.error(conversionExpr.pos, DiagnosticCode.TYPE_CAST_NOT_YET_SUPPORTED, targetType);
} else {
BSymbol symbol = symResolver.resolveTypeCastOperator(conversionExpr.expr, sourceType, targetType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(conversionExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES_CAST, sourceType, targetType);
} else {
conversionExpr.conversionSymbol = (BOperatorSymbol) symbol;
actualType = targetType;
}
}
resultType = types.checkType(conversionExpr, actualType, this.expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
bLangLambdaFunction.type = bLangLambdaFunction.function.symbol.type;
bLangLambdaFunction.cachedEnv = env.createClone();
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.type, expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BType expectedType = expType;
if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE)
.collect(Collectors.collectingAndThen(Collectors.toList(), list -> {
if (list.size() != 1) {
return null;
}
return list.get(0);
}
));
if (invokableType != null) {
expectedType = invokableType;
}
}
if (expectedType.tag != TypeTags.INVOKABLE) {
dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
resultType = symTable.semanticError;
return;
}
BInvokableType expectedInvocation = (BInvokableType) expectedType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes);
bLangArrowFunction.expression.type = populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType);
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.expression.type;
}
resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName) {
String prefix = bLangXMLQName.prefix.value;
resultType = types.checkType(bLangXMLQName, symTable.stringType, expType);
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.type = symTable.semanticError;
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbol(env, names.fromIdNode(bLangXMLQName.prefix), SymTag.XMLNS);
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
dlog.error(bLangXMLQName.pos, DiagnosticCode.UNDEFINED_SYMBOL, prefix);
bLangXMLQName.type = symTable.semanticError;
return;
}
bLangXMLQName.namespaceURI = ((BXMLNSSymbol) xmlnsSymbol).namespaceURI;
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env);
checkExpr(bLangXMLAttribute.name, xmlAttributeEnv, symTable.stringType);
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType);
symbolEnter.defineNode(bLangXMLAttribute, env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env);
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (attribute.name.getKind() == NodeKind.XML_QNAME
&& ((BLangXMLQName) attribute.name).prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (attribute.name.getKind() != NodeKind.XML_QNAME
|| !((BLangXMLQName) attribute.name).prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
bLangXMLElementLiteral.namespacesInScope.putAll(namespaces);
validateTags(bLangXMLElementLiteral, xmlElementEnv);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv);
resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) {
checkStringTemplateExprs(bLangXMLTextLiteral.textFragments, false);
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) {
checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments, false);
resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) {
checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType);
checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments, false);
resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString) {
checkStringTemplateExprs(bLangXMLQuotedString.textFragments, false);
resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType);
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
BType actualType = symTable.semanticError;
checkExpr(xmlAttributeAccessExpr.expr, env, symTable.xmlType);
BLangExpression indexExpr = xmlAttributeAccessExpr.indexExpr;
if (indexExpr == null) {
if (xmlAttributeAccessExpr.lhsVar) {
dlog.error(xmlAttributeAccessExpr.pos, DiagnosticCode.XML_ATTRIBUTE_MAP_UPDATE_NOT_ALLOWED);
} else {
actualType = BUnionType.create(null, symTable.mapStringType, symTable.nilType);
}
resultType = types.checkType(xmlAttributeAccessExpr, actualType, expType);
return;
}
checkExpr(indexExpr, env, symTable.stringType);
if (indexExpr.type.tag == TypeTags.STRING) {
if (xmlAttributeAccessExpr.lhsVar) {
actualType = symTable.stringType;
} else {
actualType = BUnionType.create(null, symTable.stringType, symTable.nilType);
}
}
xmlAttributeAccessExpr.namespaces.putAll(symResolver.resolveAllNamespaces(env));
resultType = types.checkType(xmlAttributeAccessExpr, actualType, expType);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
checkStringTemplateExprs(stringTemplateLiteral.exprs, false);
resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType);
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
checkExpr(intRangeExpression.startExpr, env, symTable.intType);
checkExpr(intRangeExpression.endExpr, env, symTable.intType);
resultType = new BArrayType(symTable.intType);
}
@Override
public void visit(BLangTableQueryExpression tableQueryExpression) {
BType actualType = symTable.semanticError;
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.TABLE) {
actualType = expType;
} else if (expTypeTag != TypeTags.SEMANTIC_ERROR) {
dlog.error(tableQueryExpression.pos, DiagnosticCode.INCOMPATIBLE_TYPES_CONVERSION, expType);
}
BLangTableQuery tableQuery = (BLangTableQuery) tableQueryExpression.getTableQuery();
tableQuery.accept(this);
resultType = types.checkType(tableQueryExpression, actualType, expType);
}
@Override
public void visit(BLangTableQuery tableQuery) {
BLangStreamingInput streamingInput = (BLangStreamingInput) tableQuery.getStreamingInput();
streamingInput.accept(this);
BLangJoinStreamingInput joinStreamingInput = (BLangJoinStreamingInput) tableQuery.getJoinStreamingInput();
if (joinStreamingInput != null) {
joinStreamingInput.accept(this);
}
}
@Override
public void visit(BLangSelectClause selectClause) {
List<? extends SelectExpressionNode> selectExprList = selectClause.getSelectExpressions();
selectExprList.forEach(selectExpr -> ((BLangSelectExpression) selectExpr).accept(this));
BLangGroupBy groupBy = (BLangGroupBy) selectClause.getGroupBy();
if (groupBy != null) {
groupBy.accept(this);
}
BLangHaving having = (BLangHaving) selectClause.getHaving();
if (having != null) {
having.accept(this);
}
}
@Override
public void visit(BLangSelectExpression selectExpression) {
BLangExpression expr = (BLangExpression) selectExpression.getExpression();
expr.accept(this);
}
@Override
public void visit(BLangGroupBy groupBy) {
groupBy.getVariables().forEach(expr -> ((BLangExpression) expr).accept(this));
}
@Override
public void visit(BLangHaving having) {
BLangExpression expr = (BLangExpression) having.getExpression();
expr.accept(this);
}
@Override
public void visit(BLangOrderBy orderBy) {
for (OrderByVariableNode orderByVariableNode : orderBy.getVariables()) {
((BLangOrderByVariable) orderByVariableNode).accept(this);
}
}
@Override
public void visit(BLangOrderByVariable orderByVariable) {
BLangExpression expression = (BLangExpression) orderByVariable.getVariableReference();
expression.accept(this);
}
@Override
public void visit(BLangJoinStreamingInput joinStreamingInput) {
BLangStreamingInput streamingInput = (BLangStreamingInput) joinStreamingInput.getStreamingInput();
streamingInput.accept(this);
}
@Override
public void visit(BLangStreamingInput streamingInput) {
BLangExpression varRef = (BLangExpression) streamingInput.getStreamReference();
varRef.accept(this);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression) {
resultType = checkExpr(bLangRestArgExpression.expr, env, expType);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType);
bLangNamedArgsExpression.type = bLangNamedArgsExpression.expr.type;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env);
checkExpr(bLangMatchExpression.expr, matchExprEnv);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, expType);
pattern.variable.type = symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv);
});
LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.semanticError)) {
actualType = symTable.semanticError;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, matchExprTypes);
}
resultType = types.checkType(bLangMatchExpression, actualType, expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) {
String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic";
boolean firstVisit = checkedExpr.expr.type == null;
BType exprExpType;
if (expType == symTable.noType) {
exprExpType = symTable.noType;
} else {
exprExpType = BUnionType.create(null, expType, symTable.errorType);
}
BType exprType = checkExpr(checkedExpr.expr, env, exprExpType);
if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = checkedExpr.type;
exprType = checkedExpr.expr.type;
}
}
if (exprType.tag != TypeTags.UNION) {
if (types.isAssignable(exprType, symTable.errorType)) {
dlog.error(checkedExpr.expr.pos,
DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS, operatorType);
} else if (exprType != symTable.semanticError) {
dlog.error(checkedExpr.expr.pos,
DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
}
checkedExpr.type = symTable.semanticError;
return;
}
BUnionType unionType = (BUnionType) exprType;
Map<Boolean, List<BType>> resultTypeMap = unionType.getMemberTypes().stream()
.collect(Collectors.groupingBy(memberType -> types.isAssignable(memberType, symTable.errorType)));
checkedExpr.equivalentErrorTypeList = resultTypeMap.get(true);
if (checkedExpr.equivalentErrorTypeList == null ||
checkedExpr.equivalentErrorTypeList.size() == 0) {
dlog.error(checkedExpr.expr.pos,
DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
checkedExpr.type = symTable.semanticError;
return;
}
List<BType> nonErrorTypeList = resultTypeMap.get(false);
if (nonErrorTypeList == null || nonErrorTypeList.size() == 0) {
dlog.error(checkedExpr.expr.pos,
DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS, operatorType);
checkedExpr.type = symTable.semanticError;
return;
}
BType actualType;
if (nonErrorTypeList.size() == 1) {
actualType = nonErrorTypeList.get(0);
} else {
actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypeList));
}
resultType = types.checkType(checkedExpr, actualType, expType);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
resultType = serviceConstructorExpr.serviceNode.symbol.type;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
typeTestExpr.typeNode.type = symResolver.resolveTypeNode(typeTestExpr.typeNode, env);
checkExpr(typeTestExpr.expr, env);
resultType = types.checkType(typeTestExpr, symTable.booleanType, expType);
}
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc);
BType actualType = symTable.semanticError;
BSymbol symbol =
this.symResolver.resolveAnnotation(annotAccessExpr.pos, env,
names.fromString(annotAccessExpr.pkgAlias.getValue()),
names.fromString (annotAccessExpr.annotationName.getValue()));
if (symbol == this.symTable.notFoundSymbol) {
this.dlog.error(annotAccessExpr.pos, DiagnosticCode.UNDEFINED_ANNOTATION,
annotAccessExpr.annotationName.getValue());
} else {
annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol;
BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType :
((BAnnotationSymbol) symbol).attachedType.type;
actualType = BUnionType.create(null, annotType, symTable.nilType);
}
this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType);
}
private boolean isValidVariableReference(BLangExpression varRef) {
switch (varRef.getKind()) {
case SIMPLE_VARIABLE_REF:
case RECORD_VARIABLE_REF:
case TUPLE_VARIABLE_REF:
case ERROR_VARIABLE_REF:
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case XML_ATTRIBUTE_ACCESS_EXPR:
return true;
default:
dlog.error(varRef.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, varRef.type);
return false;
}
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.expression, arrowFunctionEnv, expectedRetType);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
resultType = symTable.semanticError;
bLangArrowFunction.params.forEach(param -> param.type = symTable.semanticError);
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.type = bType;
}
}
private void checkSefReferences(DiagnosticPos pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.semanticError);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
if (pkgAlias == Names.EMPTY && env.enclType != null) {
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(
env.enclType.type.tsymbol.name.value, funcName.value));
funcSymbol = symResolver.resolveStructField(iExpr.pos, env, objFuncName,
env.enclType.type.tsymbol);
if (funcSymbol != symTable.notFoundSymbol) {
iExpr.exprSymbol = symResolver.lookupSymbol(env, Names.SELF, SymTag.VARIABLE);
}
}
if (symResolver.resolvePkgSymbol(iExpr.pos, env, pkgAlias) != symTable.notFoundSymbol) {
if (funcSymbol == symTable.notFoundSymbol) {
funcSymbol = symResolver.lookupSymbolInPackage(iExpr.pos, env, pkgAlias, funcName, SymTag.VARIABLE);
}
if (funcSymbol == symTable.notFoundSymbol) {
funcSymbol = symResolver.lookupSymbolInPackage(iExpr.pos, env, pkgAlias, funcName, SymTag.CONSTRUCTOR);
}
}
if ((funcSymbol.tag & SymTag.ERROR) == SymTag.ERROR
|| ((funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR && funcSymbol.type.tag == TypeTags.ERROR)) {
iExpr.symbol = funcSymbol;
iExpr.type = funcSymbol.type;
checkErrorConstructorInvocation(iExpr);
return;
} else if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, funcName);
iExpr.argExprs.forEach(arg -> checkExpr(arg, env));
resultType = symTable.semanticError;
return;
}
if (isFunctionPointer(funcSymbol)) {
iExpr.functionPointerInvocation = true;
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION_SYNTAX);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private boolean isNotFunction(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
if (isFunctionPointer(funcSymbol)) {
return false;
}
return true;
}
private boolean isFunctionPointer(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE
&& funcSymbol.kind == SymbolKind.FUNCTION
&& (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE;
}
private void checkErrorConstructorInvocation(BLangInvocation iExpr) {
if (!types.isAssignable(expType, symTable.errorType)) {
if (expType != symTable.noType) {
dlog.error(iExpr.pos, DiagnosticCode.CANNOT_INFER_ERROR_TYPE, expType);
resultType = symTable.semanticError;
return;
} else if ((iExpr.symbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
expType = iExpr.type;
} else {
expType = symTable.errorType;
}
}
BErrorType expectedError = getExpectedErrorType(iExpr.pos, expType, iExpr.symbol);
if (expectedError == null) {
return;
}
BErrorType ctorType = (BErrorType) expectedError.ctorSymbol.type;
if (iExpr.argExprs.isEmpty() && checkNoArgErrorCtorInvocation(ctorType, iExpr.pos)) {
return;
}
if (nonNamedArgsGiven(iExpr) && (iExpr.symbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticCode.INDIRECT_ERROR_CTOR_REASON_NOT_ALLOWED);
resultType = symTable.semanticError;
return;
}
boolean reasonArgGiven = checkErrorReasonArg(iExpr, ctorType);
if (ctorType.detailType.tag == TypeTags.RECORD) {
BRecordType targetErrorDetailRec = (BRecordType) ctorType.detailType;
BRecordType recordType = createErrorDetailRecordType(iExpr, reasonArgGiven, targetErrorDetailRec);
if (resultType == symTable.semanticError) {
return;
}
if (!types.isAssignable(recordType, targetErrorDetailRec)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ERROR_CONSTRUCTOR_DETAIL, iExpr);
resultType = symTable.semanticError;
return;
}
} else {
BMapType targetErrorDetailMap = (BMapType) ctorType.detailType;
List<BLangNamedArgsExpression> providedErrorDetails = getProvidedErrorDetails(iExpr, reasonArgGiven);
if (providedErrorDetails == null) {
return;
}
for (BLangNamedArgsExpression errorDetailArg : providedErrorDetails) {
checkExpr(errorDetailArg, env, targetErrorDetailMap.constraint);
}
}
setErrorReasonParam(iExpr, reasonArgGiven, ctorType);
setErrorDetailArgsToNamedArgsList(iExpr);
resultType = expectedError;
iExpr.symbol = expectedError.ctorSymbol;
}
private BErrorType getExpectedErrorType(DiagnosticPos pos, BType expType, BSymbol iExprSymbol) {
if (iExprSymbol == symTable.errorType.tsymbol) {
if (expType.tag == TypeTags.UNION) {
List<BType> matchedErrors = ((BUnionType) expType).getMemberTypes().stream()
.filter(m -> types.isAssignable(m, iExprSymbol.type))
.collect(Collectors.toList());
if (matchedErrors.size() == 1) {
return (BErrorType) matchedErrors.get(0);
} else {
dlog.error(pos, DiagnosticCode.CANNOT_INFER_ERROR_TYPE, expType);
resultType = symTable.semanticError;
return null;
}
}
return (BErrorType) expType;
} else {
return (BErrorType) iExprSymbol.type;
}
}
private boolean nonNamedArgsGiven(BLangInvocation iExpr) {
return iExpr.argExprs.stream().anyMatch(arg -> arg.getKind() != NodeKind.NAMED_ARGS_EXPR);
}
private boolean checkErrorReasonArg(BLangInvocation iExpr, BErrorType ctorType) {
if (iExpr.argExprs.isEmpty()) {
return false;
}
BLangExpression firstErrorArg = iExpr.argExprs.get(0);
if (firstErrorArg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
checkExpr(firstErrorArg, env, ctorType.reasonType, DiagnosticCode.INVALID_ERROR_REASON_TYPE);
return true;
}
return false;
}
private boolean checkNoArgErrorCtorInvocation(BErrorType errorType, DiagnosticPos pos) {
if (errorType.reasonType.tag != TypeTags.FINITE) {
dlog.error(pos, DiagnosticCode.INDIRECT_ERROR_CTOR_NOT_ALLOWED_ON_NON_CONST_REASON,
errorType.ctorSymbol.name);
resultType = symTable.semanticError;
return true;
} else {
BFiniteType finiteType = (BFiniteType) errorType.reasonType;
if (finiteType.valueSpace.size() != 1) {
if (errorType == symTable.errorType) {
dlog.error(pos, DiagnosticCode.CANNOT_INFER_ERROR_TYPE, expType.tsymbol.name);
} else {
dlog.error(pos, DiagnosticCode.INDIRECT_ERROR_CTOR_NOT_ALLOWED_ON_NON_CONST_REASON,
expType.tsymbol.name);
}
resultType = symTable.semanticError;
return true;
}
}
return false;
}
private void setErrorDetailArgsToNamedArgsList(BLangInvocation iExpr) {
List<BLangExpression> namedArgPositions = new ArrayList<>(iExpr.argExprs.size());
for (int i = 0; i < iExpr.argExprs.size(); i++) {
BLangExpression argExpr = iExpr.argExprs.get(i);
checkExpr(argExpr, env, symTable.pureType);
if (argExpr.getKind() == NodeKind.NAMED_ARGS_EXPR) {
iExpr.requiredArgs.add(argExpr);
namedArgPositions.add(argExpr);
} else {
dlog.error(argExpr.pos, DiagnosticCode.ERROR_DETAIL_ARG_IS_NOT_NAMED_ARG);
resultType = symTable.semanticError;
}
}
for (BLangExpression expr : namedArgPositions) {
iExpr.argExprs.remove(expr);
}
}
private void setErrorReasonParam(BLangInvocation iExpr, boolean reasonArgGiven, BErrorType ctorType) {
if (!reasonArgGiven && ctorType.reasonType.getKind() == TypeKind.FINITE) {
BFiniteType finiteType = (BFiniteType) ctorType.reasonType;
BLangExpression reasonExpr = (BLangExpression) finiteType.valueSpace.toArray()[0];
iExpr.requiredArgs.add(reasonExpr);
return;
}
iExpr.requiredArgs.add(iExpr.argExprs.get(0));
iExpr.argExprs.remove(0);
}
/**
* Create a error detail record using all metadata from {@code targetErrorDetailsType} and put actual error details
* from {@code iExpr} expression.
*
* @param iExpr error constructor invocation
* @param reasonArgGiven error reason is provided as first argument
* @param targetErrorDetailsType target error details type to extract metadata such as pkgId from
* @return error detail record
*/
private BRecordType createErrorDetailRecordType(BLangInvocation iExpr, boolean reasonArgGiven,
BRecordType targetErrorDetailsType) {
List<BLangNamedArgsExpression> namedArgs = getProvidedErrorDetails(iExpr, reasonArgGiven);
if (namedArgs == null) {
return null;
}
BRecordTypeSymbol recordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD, targetErrorDetailsType.tsymbol.flags, Names.EMPTY, targetErrorDetailsType.tsymbol.pkgID,
symTable.recordType, null);
BRecordType recordType = new BRecordType(recordTypeSymbol);
recordType.sealed = targetErrorDetailsType.sealed;
recordType.restFieldType = targetErrorDetailsType.restFieldType;
Set<Name> availableErrorDetailFields = new HashSet<>();
for (BLangNamedArgsExpression arg : namedArgs) {
Name fieldName = names.fromIdNode(arg.name);
BField field = new BField(fieldName, arg.pos, new BVarSymbol(0, fieldName, null, arg.type, null));
recordType.fields.add(field);
availableErrorDetailFields.add(fieldName);
}
for (BField field : targetErrorDetailsType.fields) {
boolean notRequired = (field.symbol.flags & Flags.REQUIRED) != Flags.REQUIRED;
if (notRequired && !availableErrorDetailFields.contains(field.name)) {
BField defaultableField = new BField(field.name, iExpr.pos,
new BVarSymbol(field.symbol.flags, field.name, null, field.type, null));
recordType.fields.add(defaultableField);
}
}
return recordType;
}
private List<BLangNamedArgsExpression> getProvidedErrorDetails(BLangInvocation iExpr, boolean reasonArgGiven) {
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (int i = reasonArgGiven ? 1 : 0; i < iExpr.argExprs.size(); i++) {
BLangExpression argExpr = iExpr.argExprs.get(i);
checkExpr(argExpr, env);
if (argExpr.getKind() != NodeKind.NAMED_ARGS_EXPR) {
dlog.error(argExpr.pos, DiagnosticCode.ERROR_DETAIL_ARG_IS_NOT_NAMED_ARG);
resultType = symTable.semanticError;
return null;
}
namedArgs.add((BLangNamedArgsExpression) argExpr);
}
return namedArgs;
}
private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) {
Name funcName =
names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol =
symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) {
if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION_IN_OBJECT, iExpr.name.value, objectType);
resultType = symTable.semanticError;
return;
}
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION_SYNTAX);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private void checkActionInvocationExpr(BLangInvocation iExpr, BType epType) {
BType actualType = symTable.semanticError;
if (epType == symTable.semanticError || epType.tag != TypeTags.OBJECT
|| ((BLangVariableReference) iExpr.expr).symbol.tag != SymTag.ENDPOINT) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION);
resultType = actualType;
return;
}
final BVarSymbol epSymbol = (BVarSymbol) ((BLangVariableReference) iExpr.expr).symbol;
Name remoteFuncQName = names
.fromString(Symbols.getAttachedFuncSymbolName(epType.tsymbol.name.value, iExpr.name.value));
Name actionName = names.fromIdNode(iExpr.name);
BSymbol remoteFuncSymbol = symResolver
.lookupMemberSymbol(iExpr.pos, ((BObjectTypeSymbol) epSymbol.type.tsymbol).methodScope, env,
remoteFuncQName, SymTag.FUNCTION);
if (remoteFuncSymbol == symTable.notFoundSymbol || !Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_ACTION, actionName, epSymbol.type.tsymbol.name);
resultType = actualType;
return;
}
iExpr.symbol = remoteFuncSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) {
Name funcName = names.fromString(iExpr.name.value);
BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName);
if (funcSymbol == symTable.notFoundSymbol) {
return false;
}
iExpr.symbol = funcSymbol;
iExpr.langLibInvocation = true;
SymbolEnv enclEnv = this.env;
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
iExpr.argExprs.add(0, iExpr.expr);
checkInvocationParamAndReturnType(iExpr);
this.env = enclEnv;
return true;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr) {
BType actualType = checkInvocationParam(iExpr);
resultType = types.checkType(iExpr, actualType, this.expType);
}
private BType checkInvocationParam(BLangInvocation iExpr) {
if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
List<BType> paramTypes = ((BInvokableType) iExpr.symbol.type).getParameterTypes();
Map<String, BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params
.stream().collect(Collectors.toMap(a -> a.name.getValue(), a -> a));
int parameterCount;
if (iExpr.symbol.tag == SymTag.VARIABLE) {
parameterCount = paramTypes.size();
} else {
parameterCount = ((BInvokableSymbol) iExpr.symbol).params.size();
}
iExpr.requiredArgs = new ArrayList<>();
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
BVarSymbol varSymbol = params.get(((BLangNamedArgsExpression) expr).name.value);
if (!env.enclPkg.packageID.equals(iExpr.symbol.pkgID)
&& !Symbols.isFlagOn(varSymbol.flags, Flags.PUBLIC)) {
dlog.error(expr.pos, DiagnosticCode.NON_PUBLIC_ARG_ACCESSED_WITH_NAMED_ARG,
((BLangNamedArgsExpression) expr).name.value, iExpr.toString());
}
foundNamedArg = true;
if (i < parameterCount) {
iExpr.requiredArgs.add(expr);
} else {
dlog.error(expr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCount) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg) {
BType actualType = symTable.semanticError;
BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol;
List<BVarSymbol> nonRestParams = new ArrayList<>(invokableSymbol.params);
checkNonRestArgs(nonRestParams, iExpr, paramTypes);
if (invokableSymbol.restParam == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
dlog.error(iExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return actualType;
}
checkRestArgs(iExpr.restArgs, vararg, invokableSymbol.restParam);
BType retType = typeParamAnalyzer.getReturnTypeParams(env, invokableSymbol.type.getReturnType());
if (iExpr.async) {
return this.generateFutureType(invokableSymbol, retType);
} else {
return retType;
}
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) {
boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX);
return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart);
}
private void checkNonRestArgs(List<BVarSymbol> nonRestParams, BLangInvocation iExpr, List<BType> paramTypes) {
List<BLangExpression> nonRestArgs = iExpr.requiredArgs;
List<BVarSymbol> requiredParams = nonRestParams.stream()
.filter(param -> !param.defaultableParam)
.collect(Collectors.toList());
if (nonRestArgs.size() < requiredParams.size()) {
dlog.error(iExpr.pos, DiagnosticCode.NOT_ENOUGH_ARGS_FUNC_CALL, iExpr.name.value);
}
List<BVarSymbol> valueProvidedParams = new ArrayList<>();
for (int i = 0; i < nonRestArgs.size(); i++) {
BLangExpression arg = nonRestArgs.get(i);
BType expectedType = paramTypes.get(i);
if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) {
types.checkType(arg.pos, arg.type, expectedType, DiagnosticCode.INCOMPATIBLE_TYPES);
types.setImplicitCastExpr(arg, arg.type, expectedType);
}
if (iExpr.symbol.tag == SymTag.VARIABLE) {
if (i < paramTypes.size()) {
checkExpr(arg, this.env, paramTypes.get(i));
typeParamAnalyzer.checkForTypeParamsInArg(arg.type, this.env, expectedType);
continue;
}
dlog.error(arg.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return;
}
if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
if (i < nonRestParams.size()) {
BVarSymbol param = nonRestParams.get(i);
checkExpr(arg, this.env, param.type);
typeParamAnalyzer.checkForTypeParamsInArg(arg.type, this.env, expectedType);
valueProvidedParams.add(param);
requiredParams.remove(param);
continue;
}
dlog.error(arg.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return;
}
if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) {
BLangIdentifier argName = ((NamedArgNode) arg).getName();
BVarSymbol varSym = nonRestParams.stream()
.filter(param -> param.getName().value.equals(argName.value))
.findAny()
.orElse(null);
if (varSym == null) {
dlog.error(arg.pos, DiagnosticCode.UNDEFINED_PARAMETER, argName);
break;
}
requiredParams.remove(varSym);
if (valueProvidedParams.contains(varSym)) {
dlog.error(arg.pos, DiagnosticCode.DUPLICATE_NAMED_ARGS, varSym.name.value);
continue;
}
checkExpr(arg, this.env, varSym.type);
typeParamAnalyzer.checkForTypeParamsInArg(arg.type, this.env, varSym.type);
valueProvidedParams.add(varSym);
}
}
for (BVarSymbol reqParam : requiredParams) {
dlog.error(iExpr.pos, DiagnosticCode.MISSING_REQUIRED_PARAMETER, reqParam.name, iExpr.name.value);
}
}
private void checkRestArgs(List<BLangExpression> restArgExprs, BLangExpression vararg, BVarSymbol restParam) {
if (vararg != null && !restArgExprs.isEmpty()) {
dlog.error(vararg.pos, DiagnosticCode.INVALID_REST_ARGS);
return;
}
if (vararg != null) {
checkExpr(vararg, this.env, restParam.type);
restArgExprs.add(vararg);
return;
}
for (BLangExpression arg : restArgExprs) {
BType restType = ((BArrayType) restParam.type).eType;
checkExpr(arg, this.env, restType);
typeParamAnalyzer.checkForTypeParamsInArg(arg.type, env, restType);
}
}
private void checkRecLiteralKeyValue(BLangRecordKeyValue keyValuePair, BType recType) {
BType fieldType = symTable.semanticError;
BLangExpression valueExpr = keyValuePair.valueExpr;
switch (recType.tag) {
case TypeTags.RECORD:
fieldType = checkRecordLiteralKeyExpr(keyValuePair.key, (BRecordType) recType);
break;
case TypeTags.MAP:
fieldType = checkValidJsonOrMapLiteralKeyExpr(keyValuePair.key) ? ((BMapType) recType).constraint :
symTable.semanticError;
break;
case TypeTags.JSON:
fieldType = checkValidJsonOrMapLiteralKeyExpr(keyValuePair.key) ? symTable.jsonType :
symTable.semanticError;
checkExpr(valueExpr, this.env, fieldType);
if (valueExpr.impConversionExpr == null) {
types.checkTypes(valueExpr, Lists.of(valueExpr.type), Lists.of(symTable.jsonType));
} else {
BType valueType = valueExpr.type;
types.checkType(valueExpr, valueExpr.impConversionExpr.type, symTable.jsonType);
valueExpr.type = valueType;
}
resultType = valueExpr.type;
return;
case TypeTags.ERROR:
checkExpr(valueExpr, this.env, fieldType);
}
checkExpr(valueExpr, this.env, fieldType);
}
private BType checkRecordLiteralKeyExpr(BLangRecordKey key, BRecordType recordType) {
Name fieldName;
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.type == symTable.semanticError) {
return symTable.semanticError;
}
LinkedHashSet<BType> fieldTypes = recordType.fields.stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (recordType.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(recordType.restFieldType);
}
return BUnionType.create(null, fieldTypes);
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else if (keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).type.tag == TypeTags.STRING) {
fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value);
} else {
dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_KEY);
return symTable.semanticError;
}
BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, this.env,
fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
if (recordType.sealed) {
dlog.error(keyExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
recordType.tsymbol.type.getKind().typeName(), recordType.tsymbol);
return symTable.semanticError;
}
return recordType.restFieldType;
}
return fieldSymbol.type;
}
private boolean checkValidJsonOrMapLiteralKeyExpr(BLangRecordKey key) {
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.type == symTable.semanticError) {
return false;
}
return true;
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ||
(keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).type.tag == TypeTags.STRING)) {
return true;
}
dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_KEY);
return false;
}
private BType checkRecLiteralKeyExpr(BLangExpression keyExpr) {
if (keyExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return checkExpr(keyExpr, this.env, symTable.stringType);
}
keyExpr.type = symTable.stringType;
return keyExpr.type;
}
private BType checkIndexExprForObjectFieldAccess(BLangExpression indexExpr) {
if (indexExpr.getKind() != NodeKind.LITERAL && indexExpr.getKind() != NodeKind.NUMERIC_LITERAL) {
indexExpr.type = symTable.semanticError;
dlog.error(indexExpr.pos, DiagnosticCode.INVALID_INDEX_EXPR_STRUCT_FIELD_ACCESS);
return indexExpr.type;
}
return checkExpr(indexExpr, this.env, symTable.stringType);
}
private BType addNilForNillableIndexBasedAccess(BType actualType) {
if (actualType.isNullable()) {
return actualType;
}
return BUnionType.create(null, actualType, symTable.nilType);
}
private BType checkRecordRequiredFieldAccess(BLangVariableReference varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordOptionalFieldAccess(BLangVariableReference varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordRestFieldAccess(BLangVariableReference varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return symTable.semanticError;
}
if (recordType.sealed) {
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType checkObjectFieldAccess(BLangVariableReference varReferExpr, Name fieldName, BObjectType objectType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, objectType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value,
fieldName.value));
fieldSymbol = symResolver.resolveObjectField(varReferExpr.pos, env, objFuncName, objectType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(varReferExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol);
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkTupleFieldType(BType tupleType, int indexValue) {
BTupleType bTupleType = (BTupleType) tupleType;
if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) {
return bTupleType.restType;
} else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) {
return symTable.semanticError;
}
return bTupleType.tupleTypes.get(indexValue);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName != null) {
checkExpr(endTagName, xmlElementEnv, symTable.stringType);
}
if (endTagName == null) {
return;
}
if (startTagName.getKind() == NodeKind.XML_QNAME && startTagName.getKind() == NodeKind.XML_QNAME
&& startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && startTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(startTagName.pos, DiagnosticCode.XML_TAGS_MISMATCH);
}
private void checkStringTemplateExprs(List<BLangExpression> exprs, boolean allowXml) {
for (BLangExpression expr : exprs) {
checkExpr(expr, env);
BType type = expr.type;
if (type == symTable.semanticError) {
continue;
}
if (type.tag >= TypeTags.JSON) {
if (allowXml) {
if (type.tag != TypeTags.XML) {
dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType,
symTable.stringType, symTable.booleanType, symTable.xmlType),
type);
}
continue;
}
dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType,
symTable.stringType, symTable.booleanType), type);
}
}
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) {
List<BLangExpression> newChildren = new ArrayList<>();
List<BLangExpression> tempConcatExpressions = new ArrayList<>();
for (BLangExpression expr : exprs) {
BType exprType = checkExpr(expr, xmlElementEnv);
if (exprType == symTable.xmlType) {
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
tempConcatExpressions = new ArrayList<>();
}
newChildren.add(expr);
continue;
}
BType type = expr.type;
if (type.tag >= TypeTags.JSON) {
if (type != symTable.semanticError) {
dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType,
symTable.stringType, symTable.booleanType, symTable.xmlType),
type);
}
continue;
}
tempConcatExpressions.add(expr);
}
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
}
return newChildren;
}
private BLangExpression getBinaryAddExpr(BLangExpression lExpr, BLangExpression rExpr, BSymbol opSymbol) {
BLangBinaryExpr binaryExpressionNode = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpressionNode.lhsExpr = lExpr;
binaryExpressionNode.rhsExpr = rExpr;
binaryExpressionNode.pos = rExpr.pos;
binaryExpressionNode.opKind = OperatorKind.ADD;
if (opSymbol != symTable.notFoundSymbol) {
binaryExpressionNode.type = opSymbol.type.getReturnType();
binaryExpressionNode.opSymbol = (BOperatorSymbol) opSymbol;
} else {
binaryExpressionNode.type = symTable.semanticError;
}
types.checkType(binaryExpressionNode, binaryExpressionNode.type, symTable.stringType);
return binaryExpressionNode;
}
private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments = exprs;
xmlTextLiteral.pos = exprs.get(0).pos;
xmlTextLiteral.type = symTable.xmlType;
return xmlTextLiteral;
}
private BType getTypeOfExprInFieldAccess(BLangExpression expr) {
checkExpr(expr, this.env, symTable.noType);
return expr.type;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = BUnionType.create(null, actualType);
if (returnsNull(accessExpr)) {
unionType.add(symTable.nilType);
}
BType parentType = accessExpr.expr.type;
if (accessExpr.errorSafeNavigation
&& (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION
&& ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) {
unionType.add(symTable.errorType);
}
if (unionType.getMemberTypes().size() == 1) {
return unionType.getMemberTypes().toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.type;
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR && accessExpr.expr.type.tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.type).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.OBJECT) {
return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return symTable.semanticError;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType == symTable.semanticError) {
return fieldType;
}
return BUnionType.create(null, fieldType, symTable.nilType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.isEmpty()) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
BType actualType = symTable.semanticError;
if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) {
actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) {
actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
if (actualType != symTable.semanticError) {
fieldAccessExpr.originalType = actualType;
return actualType;
}
if (!fieldAccessExpr.lhsVar) {
dlog.error(fieldAccessExpr.pos,
DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_NON_REQUIRED_FIELD, varRefType,
fieldName);
return actualType;
}
actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
varRefType.tsymbol.type.getKind().typeName(), varRefType);
}
} else if (types.isLax(varRefType)) {
if (fieldAccessExpr.lhsVar) {
dlog.error(fieldAccessExpr.pos,
DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT, varRefType);
return symTable.semanticError;
}
BType laxFieldAccessType = getLaxFieldAccessType(varRefType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (varRefType.tag == TypeTags.XML) {
if (fieldAccessExpr.lhsVar) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE);
}
actualType = symTable.xmlType;
fieldAccessExpr.originalType = actualType;
} else if (varRefType.tag == TypeTags.STREAM || varRefType.tag == TypeTags.TABLE) {
BType constraint = (fieldAccessExpr.expr.type.tag == TypeTags.STREAM ?
((BStreamType) fieldAccessExpr.expr.type).constraint :
((BTableType) fieldAccessExpr.expr.type).constraint);
if (constraint.tag != TypeTags.RECORD) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS, varRefType);
return symTable.semanticError;
}
Optional<BField> fieldType =
((BRecordType) constraint).fields.stream().filter(field -> field.name.value.equals(fieldName.value))
.findFirst();
if (fieldType.isPresent()) {
actualType = fieldType.get().type;
} else {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
varRefType.tsymbol.type.getKind().typeName(), varRefType);
return symTable.semanticError;
}
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS, varRefType);
}
return actualType;
}
private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) {
return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType);
}
private BType getLaxFieldAccessType(BType exprType) {
switch (exprType.tag) {
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.MAP:
return ((BMapType) exprType).constraint;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) exprType;
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType)));
return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes);
}
return symTable.semanticError;
}
private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
BType actualType = symTable.semanticError;
boolean nillableExprType = false;
BType effectiveType = varRefType;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
}
}
if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) {
actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName);
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos,
DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD,
varRefType, fieldName);
}
fieldAccessExpr.nilSafeNavigation = nillableExprType;
fieldAccessExpr.originalType = getSafeType(actualType, fieldAccessExpr);
} else if (types.isLax(effectiveType)) {
BType laxFieldAccessType = getLaxFieldAccessType(effectiveType);
actualType = couldHoldNonMappingJson(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = couldHoldNonMappingJson(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS,
varRefType);
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private boolean couldHoldNonMappingJson(BType type) {
if (type.tag == TypeTags.JSON) {
return true;
}
if (type.tag == TypeTags.MAP) {
return false;
}
return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::couldHoldNonMappingJson);
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) {
BType varRefType = indexBasedAccessExpr.expr.type;
boolean nillableExprType = false;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
if (nillableExprType) {
varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
if (!types.isSubTypeOfMapping(varRefType)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEXING,
indexBasedAccessExpr.expr.type);
return symTable.semanticError;
}
if (indexBasedAccessExpr.lhsVar) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEX_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.type);
return symTable.semanticError;
}
}
}
}
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.semanticError;
if (types.isSubTypeOfMapping(varRefType)) {
checkExpr(indexExpr, this.env, symTable.stringType);
if (indexExpr.type == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType);
if (actualType == symTable.semanticError) {
if (indexExpr.type.tag == TypeTags.STRING && isConst(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD,
fieldName, indexBasedAccessExpr.expr.type);
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticCode.INVALID_RECORD_INDEX_EXPR, indexExpr.type);
return actualType;
}
indexBasedAccessExpr.nilSafeNavigation = nillableExprType;
indexBasedAccessExpr.originalType = getSafeType(actualType, indexBasedAccessExpr);
} else if (types.isSubTypeOfList(varRefType)) {
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.type == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType);
indexBasedAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
if (indexExpr.type.tag == TypeTags.INT && isConst(indexExpr)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.LIST_INDEX_OUT_OF_RANGE,
getConstIndex(indexExpr));
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticCode.INVALID_LIST_INDEX_EXPR, indexExpr.type);
return actualType;
}
} else if (types.isAssignable(varRefType, symTable.stringType)) {
if (indexBasedAccessExpr.lhsVar) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEX_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.type);
return symTable.semanticError;
}
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.type == symTable.semanticError) {
return symTable.semanticError;
}
indexBasedAccessExpr.originalType = symTable.stringType;
actualType = symTable.stringType;
} else if (varRefType.tag == TypeTags.XML) {
if (indexBasedAccessExpr.lhsVar) {
indexExpr.type = symTable.semanticError;
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE);
return actualType;
}
checkExpr(indexExpr, this.env);
actualType = symTable.xmlType;
indexBasedAccessExpr.originalType = actualType;
} else if (varRefType == symTable.semanticError) {
indexBasedAccessExpr.indexExpr.type = symTable.semanticError;
return symTable.semanticError;
} else {
indexBasedAccessExpr.indexExpr.type = symTable.semanticError;
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEXING,
indexBasedAccessExpr.expr.type);
return symTable.semanticError;
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private Long getConstIndex(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value :
(Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private String getConstFieldName(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value :
(String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType,
BArrayType arrayType) {
BType actualType = symTable.semanticError;
switch (indexExprType.tag) {
case TypeTags.INT:
BLangExpression indexExpr = indexBasedAccess.indexExpr;
if (!isConst(indexExpr) || arrayType.state == BArrayState.UNSEALED) {
actualType = arrayType.eType;
break;
}
actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType;
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) indexExprType;
boolean validIndexExists = false;
for (BLangExpression finiteMember : finiteIndexExpr.valueSpace) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
if (indexValue >= 0 &&
(arrayType.state == BArrayState.UNSEALED || indexValue < arrayType.size)) {
validIndexExists = true;
break;
}
}
if (!validIndexExists) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.UNION:
List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(matchedType -> (BFiniteType) matchedType)
.collect(Collectors.toList());
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.valueSpace));
finiteType = new BFiniteType(null, valueSpace);
}
BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType);
if (elementType == symTable.semanticError) {
return symTable.semanticError;
}
actualType = arrayType.eType;
}
return actualType;
}
private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.ARRAY) {
return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.type, (BArrayType) type);
}
if (type.tag == TypeTags.TUPLE) {
return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.type);
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.INT:
if (isConst(indexExpr)) {
actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue());
} else {
BTupleType tupleExpr = (BTupleType) accessExpr.expr.type;
LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>());
actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null,
tupleTypes);
}
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.valueSpace) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
BType fieldType = checkTupleFieldType(tuple, indexValue);
if (fieldType.tag != TypeTags.SEMANTIC_ERROR) {
possibleTypes.add(fieldType);
}
}
if (possibleTypes.size() == 0) {
return symTable.semanticError;
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.valueSpace));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) {
tupleType.tupleTypes
.forEach(memberType -> {
if (memberType.tag == TypeTags.UNION) {
collectMemberTypes((BUnionType) memberType, memberTypes);
} else {
memberTypes.add(memberType);
}
});
return memberTypes;
}
private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.MAP) {
BType constraint = ((BMapType) type).constraint;
return accessExpr.lhsVar ? constraint : addNilForNillableIndexBasedAccess(constraint);
}
if (type.tag == TypeTags.RECORD) {
return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.type);
}
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableIndexBasedAccess(fieldType) : fieldType;
}
private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.STRING:
if (isConst(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType != symTable.semanticError) {
return actualType;
}
actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
return actualType;
}
return addNilForNillableIndexBasedAccess(actualType);
}
if (accessExpr.lhsVar) {
return actualType;
}
return addNilForNillableIndexBasedAccess(actualType);
}
LinkedHashSet<BType> fieldTypes = record.fields.stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (record.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(record.restFieldType);
}
if (fieldTypes.stream().noneMatch(BType::isNullable)) {
fieldTypes.add(symTable.nilType);
}
actualType = BUnionType.create(null, fieldTypes);
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.valueSpace) {
String fieldName = (String) ((BLangLiteral) finiteMember).value;
BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
}
if (fieldType != symTable.semanticError) {
fieldType = addNilForNillableIndexBasedAccess(fieldType);
}
}
if (fieldType.tag == TypeTags.SEMANTIC_ERROR) {
continue;
}
possibleTypes.add(fieldType);
}
if (possibleTypes.isEmpty()) {
return symTable.semanticError;
}
if (possibleTypes.stream().noneMatch(BType::isNullable)) {
possibleTypes.add(symTable.nilType);
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.valueSpace));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private BType getSafeType(BType type, BLangAccessExpression accessExpr) {
if (type.tag != TypeTags.UNION) {
return type;
}
List<BType> lhsTypes = new ArrayList<>(((BUnionType) type).getMemberTypes());
if (accessExpr.errorSafeNavigation) {
if (!lhsTypes.contains(symTable.errorType)) {
dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type);
return symTable.semanticError;
}
lhsTypes = lhsTypes.stream()
.filter(memberType -> memberType != symTable.errorType)
.collect(Collectors.toList());
if (lhsTypes.isEmpty()) {
dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type);
return symTable.semanticError;
}
}
if (accessExpr.nilSafeNavigation) {
lhsTypes = lhsTypes.stream()
.filter(memberType -> memberType != symTable.nilType)
.collect(Collectors.toList());
}
if (lhsTypes.size() == 1) {
return lhsTypes.get(0);
}
return BUnionType.create(null, new LinkedHashSet<>(lhsTypes));
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.getMemberTypes());
} else {
return Lists.of(type);
}
}
private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.type);
LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.type;
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) {
return new LinkedHashSet<BType>() {
{
add(symTable.semanticError);
}
};
}
assignable = this.types.isAssignable(type, pattern.variable.type);
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
private BSymbol getSymbolForBuiltinMethodWithDynamicRetType(BLangInvocation iExpr, BLangBuiltInMethod function) {
switch (function) {
case CLONE:
case FREEZE:
return getSymbolForAnydataReturningBuiltinMethods(iExpr);
case IS_FROZEN:
return getSymbolForIsFrozenBuiltinMethod(iExpr);
case STAMP:
List<BLangExpression> functionArgList = iExpr.argExprs;
for (BLangExpression expression : functionArgList) {
checkExpr(expression, env, symTable.noType);
}
return symResolver.createSymbolForStampOperator(iExpr.pos, new Name(function.getName()),
functionArgList, iExpr.expr);
case CONVERT:
functionArgList = iExpr.argExprs;
for (BLangExpression expression : functionArgList) {
checkExpr(expression, env, symTable.noType);
}
return symResolver.createSymbolForConvertOperator(iExpr.pos, new Name(function.getName()),
functionArgList, iExpr.expr);
case CALL:
return getFunctionPointerCallSymbol(iExpr);
case DETAIL:
return symResolver.createSymbolForDetailBuiltInMethod(iExpr.name, iExpr.expr.type);
default:
return symTable.notFoundSymbol;
}
}
private BSymbol getFunctionPointerCallSymbol(BLangInvocation iExpr) {
if (iExpr.expr == null) {
return symTable.notFoundSymbol;
}
BSymbol varSymbol = ((BLangVariableReference) iExpr.expr).symbol;
if (varSymbol == null) {
return symTable.notFoundSymbol;
}
BType varType = varSymbol.type;
if (varType.tag != TypeTags.INVOKABLE) {
return symTable.notFoundSymbol;
}
if (varSymbol.kind != SymbolKind.FUNCTION) {
varSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, varSymbol.name, env.enclPkg.symbol.pkgID, varType,
env.scope.owner);
varSymbol.kind = SymbolKind.FUNCTION;
}
iExpr.symbol = varSymbol;
return varSymbol;
}
private BSymbol getSymbolForAnydataReturningBuiltinMethods(BLangInvocation iExpr) {
BType type = iExpr.expr.type;
if (!types.isLikeAnydataOrNotNil(type)) {
return symTable.notFoundSymbol;
}
BType retType;
if (types.isAnydata(type)) {
retType = type;
} else {
retType = BUnionType.create(null, type, symTable.errorType);
}
return symResolver.createBuiltinMethodSymbol(BLangBuiltInMethod.FREEZE, type, retType);
}
private BSymbol getSymbolForIsFrozenBuiltinMethod(BLangInvocation iExpr) {
BType type = iExpr.expr.type;
if (!types.isLikeAnydataOrNotNil(type)) {
return symTable.notFoundSymbol;
}
return symResolver.createBuiltinMethodSymbol(BLangBuiltInMethod.IS_FROZEN, type, symTable.booleanType);
}
private boolean isSafeNavigable(BLangAccessExpression fieldAccessExpr, BType varRefType) {
if (fieldAccessExpr.errorSafeNavigation && varRefType.tag != TypeTags.UNION
&& varRefType != symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, varRefType);
return false;
}
return true;
}
private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) {
if (encounteredTypes.contains(type)) {
return false;
}
encounteredTypes.add(type);
switch (type.tag) {
case TypeTags.TABLE:
return true;
case TypeTags.UNION:
return ((BUnionType) type).getMemberTypes().stream()
.anyMatch(bType -> couldHoldTableValues(bType, encounteredTypes));
case TypeTags.MAP:
return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
return recordType.fields.stream()
.anyMatch(field -> couldHoldTableValues(field.type, encounteredTypes)) ||
(!recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes));
case TypeTags.ARRAY:
return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes);
case TypeTags.TUPLE:
return ((BTupleType) type).getTupleTypes().stream()
.anyMatch(bType -> couldHoldTableValues(bType, encounteredTypes));
}
return false;
}
private boolean isConst(BLangExpression expression) {
if (symbolEnter.isValidConstantExpression(expression)) {
return true;
}
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
}
} | class TypeChecker extends BLangNodeVisitor {
private static final CompilerContext.Key<TypeChecker> TYPE_CHECKER_KEY =
new CompilerContext.Key<>();
private static final String TABLE_TNAME = "table";
private Names names;
private SymbolTable symTable;
private SymbolEnter symbolEnter;
private SymbolResolver symResolver;
private Types types;
private BLangDiagnosticLog dlog;
private SymbolEnv env;
private boolean isTypeChecked;
private TypeNarrower typeNarrower;
private TypeParamAnalyzer typeParamAnalyzer;
/**
* Expected types or inherited types.
*/
private BType expType;
private BType resultType;
private DiagnosticCode diagCode;
public static TypeChecker getInstance(CompilerContext context) {
TypeChecker typeChecker = context.get(TYPE_CHECKER_KEY);
if (typeChecker == null) {
typeChecker = new TypeChecker(context);
}
return typeChecker;
}
public TypeChecker(CompilerContext context) {
context.put(TYPE_CHECKER_KEY, this);
this.names = Names.getInstance(context);
this.symTable = SymbolTable.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.types = Types.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.typeNarrower = TypeNarrower.getInstance(context);
this.typeParamAnalyzer = TypeParamAnalyzer.getInstance(context);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env) {
return checkExpr(expr, env, symTable.noType);
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType) {
return checkExpr(expr, env, expType, DiagnosticCode.INCOMPATIBLE_TYPES);
}
/**
* Check the given list of expressions against the given expected types.
*
* @param exprs list of expressions to be analyzed
* @param env current symbol environment
* @param expType expected type
* @return the actual types of the given list of expressions
*/
public List<BType> checkExprs(List<BLangExpression> exprs, SymbolEnv env, BType expType) {
List<BType> resTypes = new ArrayList<>(exprs.size());
for (BLangExpression expr : exprs) {
resTypes.add(checkExpr(expr, env, expType));
}
return resTypes;
}
public BType checkExpr(BLangExpression expr, SymbolEnv env, BType expType, DiagnosticCode diagCode) {
if (expr.typeChecked) {
return expr.type;
}
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
this.expType = expType;
this.isTypeChecked = true;
expr.accept(this);
expr.type = resultType;
expr.typeChecked = isTypeChecked;
this.env = prevEnv;
this.expType = preExpType;
this.diagCode = preDiagCode;
return resultType;
}
public void visit(BLangLiteral literalExpr) {
BType literalType = setLiteralValueAndGetType(literalExpr, expType);
if (literalType == symTable.semanticError || literalExpr.isFiniteContext) {
return;
}
resultType = types.checkType(literalExpr, literalType, expType);
}
private BType setLiteralValueAndGetType(BLangLiteral literalExpr, BType expType) {
BType literalType = symTable.getTypeFromTag(literalExpr.type.tag);
Object literalValue = literalExpr.value;
literalExpr.isJSONContext = types.isJSONContext(expType);
if (literalType.tag == TypeTags.INT) {
if (expType.tag == TypeTags.FLOAT) {
literalType = symTable.floatType;
literalExpr.value = ((Long) literalValue).doubleValue();
} else if (expType.tag == TypeTags.DECIMAL) {
literalType = symTable.decimalType;
literalExpr.value = String.valueOf(literalValue);
} else if (expType.tag == TypeTags.BYTE) {
if (!types.isByteLiteralValue((Long) literalValue)) {
dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, literalType);
resultType = symTable.semanticError;
return resultType;
}
literalType = symTable.byteType;
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.INT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.intType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.BYTE)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.byteType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) expType).getMemberTypes();
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == TypeTags.INT || memType.tag == TypeTags.JSON ||
memType.tag == TypeTags.ANYDATA || memType.tag == TypeTags.ANY)) {
return setLiteralValueAndGetType(literalExpr, symTable.intType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.intType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.BYTE)) {
return setLiteralValueAndGetType(literalExpr, symTable.byteType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.byteType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.FLOAT)) {
return setLiteralValueAndGetType(literalExpr, symTable.floatType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType((BUnionType) expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
}
} else if (literalType.tag == TypeTags.FLOAT) {
String literal = String.valueOf(literalValue);
String numericLiteral = NumericLiteralSupport.stripDiscriminator(literal);
boolean isDiscriminatedFloat = NumericLiteralSupport.isFloatDiscriminated(literal);
if (expType.tag == TypeTags.DECIMAL) {
if (isDiscriminatedFloat || NumericLiteralSupport.isHexLiteral(numericLiteral)) {
dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, symTable.floatType);
resultType = symTable.semanticError;
return resultType;
}
literalType = symTable.decimalType;
literalExpr.value = numericLiteral;
} else if (expType.tag == TypeTags.FLOAT) {
literalExpr.value = Double.parseDouble(String.valueOf(numericLiteral));
} else if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.FLOAT)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.floatType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
} else if (!isDiscriminatedFloat
&& literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.floatType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
} else if (literalType.tag == TypeTags.DECIMAL) {
return decimalLiteral(literalValue, literalExpr, expType);
} else {
if (this.expType.tag == TypeTags.FINITE) {
boolean foundMember = types.isAssignableToFiniteType(this.expType, literalExpr);
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
} else if (this.expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.expType;
boolean foundMember = unionType.getMemberTypes()
.stream()
.anyMatch(memberType -> types.isAssignableToFiniteType(memberType, literalExpr));
if (foundMember) {
setLiteralValueForFiniteType(literalExpr, literalType);
return literalType;
}
}
}
if (literalExpr.type.tag == TypeTags.BYTE_ARRAY) {
literalType = new BArrayType(symTable.byteType);
}
return literalType;
}
private BType getAndSetAssignableUnionMember(BLangLiteral literalExpr, BUnionType expType, BType desiredType) {
Set<BType> memberTypes = expType.getMemberTypes();
if (memberTypes.stream()
.anyMatch(memType -> memType.tag == desiredType.tag
|| memType.tag == TypeTags.JSON
|| memType.tag == TypeTags.ANYDATA
|| memType.tag == TypeTags.ANY)) {
return setLiteralValueAndGetType(literalExpr, desiredType);
}
BType finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.floatType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
if (memberTypes.stream().anyMatch(memType -> memType.tag == TypeTags.DECIMAL)) {
return setLiteralValueAndGetType(literalExpr, symTable.decimalType);
}
finiteType = getFiniteTypeWithValuesOfSingleType(expType, symTable.decimalType);
if (finiteType != symTable.semanticError) {
BType setType = setLiteralValueAndGetType(literalExpr, finiteType);
if (literalExpr.isFiniteContext) {
return setType;
}
}
return symTable.noType;
}
private boolean literalAssignableToFiniteType(BLangLiteral literalExpr, BFiniteType finiteType,
int targetMemberTypeTag) {
return finiteType.valueSpace.stream()
.anyMatch(valueExpr -> valueExpr.type.tag == targetMemberTypeTag &&
types.checkLiteralAssignabilityBasedOnType((BLangLiteral) valueExpr, literalExpr));
}
private BType decimalLiteral(Object literalValue, BLangLiteral literalExpr, BType expType) {
String literal = String.valueOf(literalValue);
if (expType.tag == TypeTags.FLOAT && NumericLiteralSupport.isDecimalDiscriminated(literal)) {
dlog.error(literalExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, symTable.decimalType);
resultType = symTable.semanticError;
return resultType;
}
if (expType.tag == TypeTags.FINITE && types.isAssignableToFiniteType(expType, literalExpr)) {
BFiniteType finiteType = (BFiniteType) expType;
if (literalAssignableToFiniteType(literalExpr, finiteType, TypeTags.DECIMAL)) {
BType valueType = setLiteralValueAndGetType(literalExpr, symTable.decimalType);
setLiteralValueForFiniteType(literalExpr, valueType);
return valueType;
}
} else if (expType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expType;
BType unionMember = getAndSetAssignableUnionMember(literalExpr, unionType, symTable.decimalType);
if (unionMember != symTable.noType) {
return unionMember;
}
}
literalExpr.value = NumericLiteralSupport.stripDiscriminator(literal);
resultType = symTable.decimalType;
return symTable.decimalType;
}
private void setLiteralValueForFiniteType(BLangLiteral literalExpr, BType type) {
types.setImplicitCastExpr(literalExpr, type, this.expType);
this.resultType = type;
literalExpr.isFiniteContext = true;
}
private BType getFiniteTypeWithValuesOfSingleType(BUnionType unionType, BType matchType) {
List<BFiniteType> finiteTypeMembers = unionType.getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(memFiniteType -> (BFiniteType) memFiniteType)
.collect(Collectors.toList());
if (finiteTypeMembers.isEmpty()) {
return symTable.semanticError;
}
int tag = matchType.tag;
Set<BLangExpression> matchedValueSpace = new LinkedHashSet<>();
for (BFiniteType finiteType : finiteTypeMembers) {
matchedValueSpace.addAll(finiteType.valueSpace.stream()
.filter(expression -> expression.type.tag == tag)
.collect(Collectors.toSet()));
}
if (matchedValueSpace.isEmpty()) {
return symTable.semanticError;
}
return new BFiniteType(null, matchedValueSpace);
}
public void visit(BLangTableLiteral tableLiteral) {
if (expType.tag == symTable.semanticError.tag) {
return;
}
BType tableConstraint = ((BTableType) expType).getConstraint();
if (tableConstraint.tag == TypeTags.NONE) {
dlog.error(tableLiteral.pos, DiagnosticCode.TABLE_CANNOT_BE_CREATED_WITHOUT_CONSTRAINT);
return;
}
validateTableColumns(tableConstraint, tableLiteral);
checkExprs(tableLiteral.tableDataRows, this.env, tableConstraint);
resultType = types.checkType(tableLiteral, expType, symTable.noType);
}
private void validateTableColumns(BType tableConstraint, BLangTableLiteral tableLiteral) {
if (tableConstraint.tag != TypeTags.SEMANTIC_ERROR) {
List<String> columnNames = new ArrayList<>();
for (BField field : ((BRecordType) tableConstraint).fields) {
columnNames.add(field.getName().getValue());
if (!(field.type.tag == TypeTags.INT || field.type.tag == TypeTags.STRING ||
field.type.tag == TypeTags.FLOAT || field.type.tag == TypeTags.DECIMAL ||
field.type.tag == TypeTags.XML || field.type.tag == TypeTags.JSON ||
field.type.tag == TypeTags.BOOLEAN || field.type.tag == TypeTags.ARRAY)) {
dlog.error(tableLiteral.pos, DiagnosticCode.FIELD_NOT_ALLOWED_WITH_TABLE_COLUMN,
field.name.value, field.type);
}
if (field.type.tag == TypeTags.ARRAY) {
BType arrayType = ((BArrayType) field.type).eType;
if (!(arrayType.tag == TypeTags.INT || arrayType.tag == TypeTags.FLOAT ||
arrayType.tag == TypeTags.DECIMAL || arrayType.tag == TypeTags.STRING ||
arrayType.tag == TypeTags.BOOLEAN || arrayType.tag == TypeTags.BYTE)) {
dlog.error(tableLiteral.pos, DiagnosticCode.FIELD_NOT_ALLOWED_WITH_TABLE_COLUMN,
field.name.value, field.type);
}
}
}
for (BLangTableLiteral.BLangTableColumn column : tableLiteral.columns) {
boolean contains = columnNames.contains(column.columnName);
if (!contains) {
dlog.error(column.pos, DiagnosticCode.UNDEFINED_TABLE_COLUMN, column.columnName, tableConstraint);
}
if (column.flagSet.contains(TableColumnFlag.PRIMARYKEY)) {
for (BField field : ((BRecordType) tableConstraint).fields) {
if (field.name.value.equals(column.columnName)) {
if (!(field.type.tag == TypeTags.INT || field.type.tag == TypeTags.STRING)) {
dlog.error(column.pos, DiagnosticCode.TYPE_NOT_ALLOWED_WITH_PRIMARYKEY,
column.columnName, field.type);
}
break;
}
}
}
}
}
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
BType actualType = symTable.semanticError;
if ((expType.tag == TypeTags.ANY || expType.tag == TypeTags.ANYDATA || expType.tag == TypeTags.NONE)
&& listConstructor.exprs.isEmpty()) {
dlog.error(listConstructor.pos, DiagnosticCode.INVALID_LIST_CONSTRUCTOR, expType);
resultType = symTable.semanticError;
return;
}
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.JSON) {
checkExprs(listConstructor.exprs, this.env, expType);
actualType = expType;
} else if (expTypeTag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) expType;
if (arrayType.state == BArrayState.OPEN_SEALED) {
arrayType.size = listConstructor.exprs.size();
arrayType.state = BArrayState.CLOSED_SEALED;
} else if (arrayType.state != BArrayState.UNSEALED && arrayType.size != listConstructor.exprs.size()) {
dlog.error(listConstructor.pos,
DiagnosticCode.MISMATCHING_ARRAY_LITERAL_VALUES, arrayType.size, listConstructor.exprs.size());
resultType = symTable.semanticError;
return;
}
checkExprs(listConstructor.exprs, this.env, arrayType.eType);
actualType = arrayType;
} else if (expTypeTag == TypeTags.UNION) {
Set<BType> expTypes = ((BUnionType) expType).getMemberTypes();
List<BType> matchedTypeList = expTypes.stream()
.filter(type -> type.tag == TypeTags.ARRAY || type.tag == TypeTags.TUPLE)
.collect(Collectors.toList());
if (matchedTypeList.isEmpty()) {
dlog.error(listConstructor.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, actualType);
} else if (matchedTypeList.size() == 1) {
actualType = matchedTypeList.get(0);
if (actualType.tag == TypeTags.ARRAY) {
checkExprs(listConstructor.exprs, this.env, ((BArrayType) actualType).eType);
} else {
BTupleType tupleType = (BTupleType) actualType;
List<BType> results = new ArrayList<>();
BType restType = null;
for (int i = 0; i < listConstructor.exprs.size(); i++) {
BType expType, actType;
if (i < tupleType.tupleTypes.size()) {
expType = tupleType.tupleTypes.get(i);
actType = checkExpr(listConstructor.exprs.get(i), env, expType);
results.add(expType.tag != TypeTags.NONE ? expType : actType);
} else {
restType = checkExpr(listConstructor.exprs.get(i), env, tupleType.restType);
}
}
actualType = new BTupleType(results);
((BTupleType) actualType).restType = restType;
}
} else {
actualType = checkArrayLiteralExpr(listConstructor);
}
} else if (expTypeTag == TypeTags.TYPEDESC) {
List<BType> results = new ArrayList<>();
listConstructor.isTypedescExpr = true;
for (int i = 0; i < listConstructor.exprs.size(); i++) {
results.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType));
}
List<BType> actualTypes = new ArrayList<>();
for (int i = 0; i < listConstructor.exprs.size(); i++) {
final BLangExpression expr = listConstructor.exprs.get(i);
if (expr.getKind() == NodeKind.TYPEDESC_EXPRESSION) {
actualTypes.add(((BLangTypedescExpr) expr).resolvedType);
} else if (expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
actualTypes.add(((BLangSimpleVarRef) expr).symbol.type);
} else {
actualTypes.add(results.get(i));
}
}
if (actualTypes.size() == 1) {
listConstructor.typedescType = actualTypes.get(0);
} else {
listConstructor.typedescType = new BTupleType(actualTypes);
}
resultType = new BTypedescType(listConstructor.typedescType, null);
return;
} else if (expTypeTag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) this.expType;
if ((tupleType.restType != null && (tupleType.tupleTypes.size() > listConstructor.exprs.size()))
|| (tupleType.restType == null && tupleType.tupleTypes.size() != listConstructor.exprs.size())) {
dlog.error(listConstructor.pos, DiagnosticCode.SYNTAX_ERROR,
"tuple and expression size does not match");
return;
}
List<BType> expTypes = tupleType.tupleTypes;
List<BType> results = new ArrayList<>();
BType restType = null;
for (int i = 0; i < listConstructor.exprs.size(); i++) {
BType expType, actType;
if (i < expTypes.size()) {
expType = expTypes.get(i);
actType = checkExpr(listConstructor.exprs.get(i), env, expType);
results.add(expType.tag != TypeTags.NONE ? expType : actType);
} else {
restType = checkExpr(listConstructor.exprs.get(i), env, tupleType.restType);
}
}
actualType = new BTupleType(results);
((BTupleType) actualType).restType = restType;
} else if (listConstructor.exprs.size() > 1) {
List<BType> narrowTypes = new ArrayList<>();
for (int i = 0; i < listConstructor.exprs.size(); i++) {
narrowTypes.add(checkExpr(listConstructor.exprs.get(i), env, symTable.noType));
}
Set<BType> narrowTypesSet = new LinkedHashSet<>(narrowTypes);
LinkedHashSet<BType> broadTypesSet = new LinkedHashSet<>();
BType[] uniqueNarrowTypes = narrowTypesSet.toArray(new BType[0]);
BType broadType;
for (BType t1 : uniqueNarrowTypes) {
broadType = t1;
for (BType t2 : uniqueNarrowTypes) {
if (types.isAssignable(t2, t1)) {
broadType = t1;
} else if (types.isAssignable(t1, t2)) {
broadType = t2;
}
}
broadTypesSet.add(broadType);
}
BType eType;
if (broadTypesSet.size() > 1) {
eType = BUnionType.create(null, broadTypesSet);
} else {
eType = broadTypesSet.toArray(new BType[0])[0];
}
BArrayType arrayType = new BArrayType(eType);
checkExprs(listConstructor.exprs, this.env, arrayType.eType);
actualType = arrayType;
} else if (expTypeTag != TypeTags.SEMANTIC_ERROR) {
actualType = checkArrayLiteralExpr(listConstructor);
}
resultType = types.checkType(listConstructor, actualType, expType);
}
private BType checkArrayLiteralExpr(BLangListConstructorExpr listConstructorExpr) {
Set<BType> expTypes;
if (expType.tag == TypeTags.UNION) {
expTypes = ((BUnionType) expType).getMemberTypes();
} else {
expTypes = new LinkedHashSet<>();
expTypes.add(expType);
}
BType actualType = symTable.noType;
List<BType> listCompatibleTypes = new ArrayList<>();
for (BType type : expTypes) {
if (type.tag == TypeTags.TUPLE) {
BTupleType tupleType = (BTupleType) type;
if (checkTupleType(listConstructorExpr, tupleType)) {
listCompatibleTypes.add(tupleType);
}
} else {
BType[] uniqueExprTypes = checkArrayExpr(listConstructorExpr, this.env);
BType arrayLiteralType;
if (uniqueExprTypes.length == 0) {
arrayLiteralType = symTable.anyType;
} else if (uniqueExprTypes.length == 1) {
arrayLiteralType = uniqueExprTypes[0];
} else {
BType superType = uniqueExprTypes[0];
for (int i = 1; i < uniqueExprTypes.length; i++) {
if (types.isAssignable(superType, uniqueExprTypes[i])) {
superType = uniqueExprTypes[i];
} else if (!types.isAssignable(uniqueExprTypes[i], superType)) {
superType = symTable.anyType;
break;
}
}
arrayLiteralType = superType;
}
if (arrayLiteralType.tag != TypeTags.SEMANTIC_ERROR) {
if (type.tag == TypeTags.ARRAY && ((BArrayType) type).state != BArrayState.UNSEALED) {
actualType = new BArrayType(arrayLiteralType, null,
((BArrayType) type).state == BArrayState.CLOSED_SEALED
? listConstructorExpr.exprs.size() : ((BArrayType) type).size,
((BArrayType) type).state);
} else {
if (type.tag == TypeTags.ARRAY
&& types.isAssignable(arrayLiteralType, ((BArrayType) type).eType)) {
arrayLiteralType = ((BArrayType) type).eType;
}
actualType = new BArrayType(arrayLiteralType);
}
listCompatibleTypes.addAll(getListCompatibleTypes(type, actualType));
}
}
}
if (listCompatibleTypes.isEmpty()) {
dlog.error(listConstructorExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, expType, actualType);
actualType = symTable.semanticError;
} else if (listCompatibleTypes.size() > 1) {
dlog.error(listConstructorExpr.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType);
actualType = symTable.semanticError;
} else if (listCompatibleTypes.get(0).tag == TypeTags.ANY) {
dlog.error(listConstructorExpr.pos, DiagnosticCode.INVALID_ARRAY_LITERAL, expType);
actualType = symTable.semanticError;
} else if (listCompatibleTypes.get(0).tag == TypeTags.ARRAY) {
checkExpr(listConstructorExpr, this.env, listCompatibleTypes.get(0));
} else if (listCompatibleTypes.get(0).tag == TypeTags.TUPLE) {
actualType = listCompatibleTypes.get(0);
setTupleType(listConstructorExpr, actualType);
}
return actualType;
}
private BType[] checkArrayExpr(BLangListConstructorExpr expr, SymbolEnv env) {
List<BType> types = new ArrayList<>();
SymbolEnv prevEnv = this.env;
BType preExpType = this.expType;
this.env = env;
this.expType = symTable.noType;
for (BLangExpression e : expr.exprs) {
e.accept(this);
types.add(resultType);
}
this.env = prevEnv;
this.expType = preExpType;
LinkedHashSet<BType> typesSet = new LinkedHashSet<>(types);
return typesSet.toArray(new BType[0]);
}
private boolean checkTupleType(BLangExpression expression, BType type) {
if (type.tag == TypeTags.TUPLE && expression.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR
|| expression.getKind() == NodeKind.TUPLE_LITERAL_EXPR) {
BTupleType tupleType = (BTupleType) type;
BLangListConstructorExpr tupleExpr = (BLangListConstructorExpr) expression;
if (tupleType.restType == null && tupleType.tupleTypes.size() != tupleExpr.exprs.size()) {
return false;
}
for (int i = 0; i < tupleExpr.exprs.size(); i++) {
BLangExpression expr = tupleExpr.exprs.get(i);
if (i < tupleType.tupleTypes.size()) {
if (!checkTupleType(expr, tupleType.tupleTypes.get(i))) {
return false;
}
} else {
if (tupleType.restType == null || !checkTupleType(expr, tupleType.restType)) {
return false;
}
}
}
return true;
} else {
return types.isAssignable(checkExpr(expression, env), type);
}
}
private void setTupleType(BLangExpression expression, BType type) {
if (type.tag == TypeTags.TUPLE && expression.getKind() == NodeKind.LIST_CONSTRUCTOR_EXPR
|| expression.getKind() == NodeKind.TUPLE_LITERAL_EXPR) {
BTupleType tupleType = (BTupleType) type;
BLangListConstructorExpr tupleExpr = (BLangListConstructorExpr) expression;
tupleExpr.type = type;
if (tupleType.tupleTypes.size() == tupleExpr.exprs.size()) {
for (int i = 0; i < tupleExpr.exprs.size(); i++) {
setTupleType(tupleExpr.exprs.get(i), tupleType.tupleTypes.get(i));
}
}
} else {
checkExpr(expression, env);
}
}
public void visit(BLangRecordLiteral recordLiteral) {
BType actualType = symTable.semanticError;
int expTypeTag = expType.tag;
BType originalExpType = expType;
if (expTypeTag == TypeTags.NONE || expTypeTag == TypeTags.ANY) {
expType = symTable.mapType;
}
if (expTypeTag == TypeTags.ANY || expTypeTag == TypeTags.ANYDATA || expTypeTag == TypeTags.OBJECT) {
dlog.error(recordLiteral.pos, DiagnosticCode.INVALID_RECORD_LITERAL, originalExpType);
resultType = symTable.semanticError;
return;
}
List<BType> matchedTypeList = getRecordCompatibleType(expType, recordLiteral);
if (matchedTypeList.isEmpty()) {
dlog.error(recordLiteral.pos, DiagnosticCode.INVALID_LITERAL_FOR_TYPE, expType);
recordLiteral.keyValuePairs
.forEach(keyValuePair -> checkRecLiteralKeyValue(keyValuePair, symTable.errorType));
} else if (matchedTypeList.size() > 1) {
dlog.error(recordLiteral.pos, DiagnosticCode.AMBIGUOUS_TYPES, expType);
recordLiteral.keyValuePairs
.forEach(keyValuePair -> checkRecLiteralKeyValue(keyValuePair, symTable.errorType));
} else {
recordLiteral.keyValuePairs
.forEach(keyValuePair -> checkRecLiteralKeyValue(keyValuePair, matchedTypeList.get(0)));
actualType = matchedTypeList.get(0);
}
resultType = types.checkType(recordLiteral, actualType, expType);
if (recordLiteral.type.tag == TypeTags.RECORD) {
checkMissingRequiredFields((BRecordType) recordLiteral.type, recordLiteral.keyValuePairs,
recordLiteral.pos);
}
}
private List<BType> getRecordCompatibleType(BType bType, BLangRecordLiteral recordLiteral) {
if (bType.tag == TypeTags.UNION) {
Set<BType> expTypes = ((BUnionType) bType).getMemberTypes();
List<BType> possibleTypes =
expTypes.stream()
.filter(type -> type.tag == TypeTags.MAP ||
(type.tag == TypeTags.RECORD &&
(!((BRecordType) type).sealed ||
isCompatibleClosedRecordLiteral((BRecordType) type,
recordLiteral))))
.collect(Collectors.toList());
if (expTypes.stream().anyMatch(type -> type.tag == TypeTags.JSON) &&
expTypes.stream().noneMatch(type -> type.tag == TypeTags.MAP &&
((BMapType) type).constraint.tag == TypeTags.JSON)) {
possibleTypes.add(new BMapType(TypeTags.MAP, symTable.jsonType, null));
}
return possibleTypes;
}
switch (expType.tag) {
case TypeTags.JSON:
return Collections.singletonList(new BMapType(TypeTags.MAP, symTable.jsonType, null));
case TypeTags.MAP:
case TypeTags.RECORD:
return Collections.singletonList(bType);
default:
return Collections.emptyList();
}
}
private boolean isCompatibleClosedRecordLiteral(BRecordType bRecordType, BLangRecordLiteral recordLiteral) {
if (!hasRequiredRecordFields(recordLiteral.getKeyValuePairs(), bRecordType)) {
return false;
}
for (BLangRecordKeyValue literalKeyValuePair : recordLiteral.getKeyValuePairs()) {
boolean matched = false;
for (BField field : bRecordType.getFields()) {
matched = field.getName().getValue().equals(getFieldName(literalKeyValuePair.key));
if (matched) {
break;
}
}
if (!matched) {
return false;
}
}
return true;
}
private void checkMissingRequiredFields(BRecordType type, List<BLangRecordKeyValue> keyValuePairs,
DiagnosticPos pos) {
type.fields.forEach(field -> {
boolean hasField = keyValuePairs.stream()
.anyMatch(keyVal -> field.name.value.equals(getFieldName(keyVal.key)));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private String getFieldName(BLangRecordKey key) {
BLangExpression keyExpression = key.expr;
if (key.computedKey) {
return null;
}
if (keyExpression.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) keyExpression).variableName.value;
} else if (keyExpression.getKind() == NodeKind.LITERAL) {
BLangLiteral literal = (BLangLiteral) keyExpression;
if (literal.type.tag != TypeTags.STRING) {
return null;
}
return (String) literal.value;
}
return null;
}
private boolean hasRequiredRecordFields(List<BLangRecordKeyValue> keyValuePairs, BRecordType targetRecType) {
for (BField field : targetRecType.fields) {
boolean hasField = keyValuePairs.stream()
.filter(keyVal -> keyVal.key.expr.getKind() == NodeKind.SIMPLE_VARIABLE_REF)
.anyMatch(keyVal -> field.name.value
.equals(((BLangSimpleVarRef) keyVal.key.expr).variableName.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
return false;
}
}
return true;
}
private List<BType> getListCompatibleTypes(BType expType, BType actualType) {
Set<BType> expTypes =
expType.tag == TypeTags.UNION ? ((BUnionType) expType).getMemberTypes() : new LinkedHashSet<BType>() {
{
add(expType);
}
};
return expTypes.stream()
.filter(type -> types.isAssignable(actualType, type) ||
type.tag == TypeTags.NONE ||
type.tag == TypeTags.ANY)
.collect(Collectors.toList());
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
if (workerFlushExpr.workerIdentifier != null) {
String workerName = workerFlushExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(workerFlushExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName);
}
}
BType actualType = BUnionType.create(null, symTable.errorType, symTable.nilType);
resultType = types.checkType(workerFlushExpr, actualType, expType);
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
BSymbol symbol = symResolver.lookupSymbol(env, names.fromIdNode(syncSendExpr.workerIdentifier),
SymTag.VARIABLE);
if (symTable.notFoundSymbol.equals(symbol)) {
syncSendExpr.workerType = symTable.semanticError;
} else {
syncSendExpr.workerType = symbol.type;
}
syncSendExpr.env = this.env;
checkExpr(syncSendExpr.expr, this.env);
if (!types.isAnydata(syncSendExpr.expr.type)) {
this.dlog.error(syncSendExpr.pos, DiagnosticCode.INVALID_TYPE_FOR_SEND, syncSendExpr.expr.type);
}
String workerName = syncSendExpr.workerIdentifier.getValue();
if (!this.workerExists(this.env, workerName)) {
this.dlog.error(syncSendExpr.pos, DiagnosticCode.UNDEFINED_WORKER, workerName);
}
if (expType == symTable.noType) {
resultType = BUnionType.create(null, symTable.errorType, symTable.nilType);
} else {
resultType = expType;
}
}
@Override
public void visit(BLangWorkerReceive workerReceiveExpr) {
BSymbol symbol = symResolver.lookupSymbol(env, names.fromIdNode(workerReceiveExpr.workerIdentifier),
SymTag.VARIABLE);
if (workerReceiveExpr.isChannel) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticCode.UNDEFINED_ACTION);
return;
}
workerReceiveExpr.env = this.env;
if (symTable.notFoundSymbol.equals(symbol)) {
workerReceiveExpr.workerType = symTable.semanticError;
} else {
workerReceiveExpr.workerType = symbol.type;
}
if (symTable.noType == this.expType) {
this.dlog.error(workerReceiveExpr.pos, DiagnosticCode.INVALID_USAGE_OF_RECEIVE_EXPRESSION);
}
workerReceiveExpr.type = this.expType;
resultType = this.expType;
}
private boolean workerExists(SymbolEnv env, String workerName) {
if (workerName.equals(DEFAULT_WORKER_NAME)) {
return true;
}
BSymbol symbol = this.symResolver.lookupSymbol(env, new Name(workerName), SymTag.VARIABLE);
return symbol != this.symTable.notFoundSymbol &&
symbol.type.tag == TypeTags.FUTURE &&
((BFutureType) symbol.type).workerDerivative;
}
public void visit(BLangSimpleVarRef varRefExpr) {
BType actualType = symTable.semanticError;
Name varName = names.fromIdNode(varRefExpr.variableName);
if (varName == Names.IGNORE) {
if (varRefExpr.lhsVar) {
varRefExpr.type = this.symTable.anyType;
} else {
varRefExpr.type = this.symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticCode.UNDERSCORE_NOT_ALLOWED);
}
varRefExpr.symbol = new BVarSymbol(0, varName, env.enclPkg.symbol.pkgID, varRefExpr.type, env.scope.owner);
resultType = varRefExpr.type;
return;
}
varRefExpr.pkgSymbol = symResolver.resolveImportSymbol(varRefExpr.pos,
env, names.fromIdNode(varRefExpr.pkgAlias));
if (varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
actualType = symTable.stringType;
} else if (varRefExpr.pkgSymbol != symTable.notFoundSymbol) {
BSymbol symbol = symResolver.lookupSymbolInPackage(varRefExpr.pos, env,
names.fromIdNode(varRefExpr.pkgAlias), varName, SymTag.VARIABLE_NAME);
if (symbol == symTable.notFoundSymbol && env.enclType != null) {
Name objFuncName = names.fromString(Symbols
.getAttachedFuncSymbolName(env.enclType.type.tsymbol.name.value, varName.value));
symbol = symResolver.resolveStructField(varRefExpr.pos, env, objFuncName,
env.enclType.type.tsymbol);
}
if ((symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSym = (BVarSymbol) symbol;
checkSefReferences(varRefExpr.pos, env, varSym);
varRefExpr.symbol = varSym;
actualType = varSym.type;
BLangInvokableNode encInvokable = env.enclInvokable;
if (encInvokable != null && encInvokable.flagSet.contains(Flag.LAMBDA) &&
!(symbol.owner instanceof BPackageSymbol) &&
!isFunctionArgument(varSym, encInvokable.requiredParams)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv,
symbol.name, SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol && !encInvokable.flagSet.contains(Flag.ATTACHED)) {
resolvedSymbol.closure = true;
((BLangFunction) encInvokable).closureVarSymbols.add(
new ClosureVarSymbol(resolvedSymbol, varRefExpr.pos));
}
}
if (env.node.getKind() == NodeKind.ARROW_EXPR && !(symbol.owner instanceof BPackageSymbol)) {
if (!isFunctionArgument(varSym, ((BLangArrowFunction) env.node).params)) {
SymbolEnv encInvokableEnv = findEnclosingInvokableEnv(env, encInvokable);
BSymbol resolvedSymbol = symResolver.lookupClosureVarSymbol(encInvokableEnv, symbol.name,
SymTag.VARIABLE);
if (resolvedSymbol != symTable.notFoundSymbol) {
resolvedSymbol.closure = true;
((BLangArrowFunction) env.node).closureVarSymbols.add(
new ClosureVarSymbol(resolvedSymbol, varRefExpr.pos));
}
}
}
} else if ((symbol.tag & SymTag.TYPE) == SymTag.TYPE) {
actualType = new BTypedescType(symbol.type, null);
varRefExpr.symbol = symbol;
} else if ((symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
varRefExpr.symbol = symbol;
BType symbolType = symbol.type;
if (symbolType != symTable.noType && expType.tag == TypeTags.FINITE ||
(expType.tag == TypeTags.UNION && ((BUnionType) expType).getMemberTypes().stream()
.anyMatch(memType -> memType.tag == TypeTags.FINITE &&
types.isAssignable(symbolType, memType)))) {
actualType = symbolType;
} else {
actualType = ((BConstantSymbol) symbol).literalType;
}
if (varRefExpr.lhsVar || varRefExpr.compoundAssignmentLhsVar) {
actualType = symTable.semanticError;
dlog.error(varRefExpr.pos, DiagnosticCode.CANNOT_UPDATE_CONSTANT_VALUE);
}
} else {
dlog.error(varRefExpr.pos, DiagnosticCode.UNDEFINED_SYMBOL, varName.toString());
}
}
if (expType.tag == TypeTags.ARRAY && isArrayOpenSealedType((BArrayType) expType)) {
dlog.error(varRefExpr.pos, DiagnosticCode.SEALED_ARRAY_TYPE_CAN_NOT_INFER_SIZE);
return;
}
resultType = types.checkType(varRefExpr, actualType, expType);
}
@Override
public void visit(BLangRecordVarRef varRefExpr) {
List<BField> fields = new ArrayList<>();
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(0, Names.EMPTY, env.enclPkg.symbol.pkgID,
null, env.scope.owner);
boolean unresolvedReference = false;
for (BLangRecordVarRef.BLangRecordVarRefKeyValue recordRefField : varRefExpr.recordRefFields) {
((BLangVariableReference) recordRefField.variableReference).lhsVar = true;
checkExpr(recordRefField.variableReference, env);
if (((BLangVariableReference) recordRefField.variableReference).symbol == null ||
!isValidVariableReference(recordRefField.variableReference)) {
unresolvedReference = true;
continue;
}
BVarSymbol bVarSymbol = (BVarSymbol) ((BLangVariableReference) recordRefField.variableReference).symbol;
fields.add(new BField(names.fromIdNode(recordRefField.variableName), varRefExpr.pos,
new BVarSymbol(0, names.fromIdNode(recordRefField.variableName), env.enclPkg.symbol.pkgID,
bVarSymbol.type, recordSymbol)));
}
if (varRefExpr.restParam != null) {
BLangExpression restParam = (BLangExpression) varRefExpr.restParam;
checkExpr(restParam, env);
unresolvedReference = !isValidVariableReference(restParam);
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BRecordType bRecordType = new BRecordType(recordSymbol);
bRecordType.fields = fields;
recordSymbol.type = bRecordType;
varRefExpr.symbol = new BVarSymbol(0, Names.EMPTY, env.enclPkg.symbol.pkgID, bRecordType, env.scope.owner);
if (varRefExpr.restParam == null) {
bRecordType.sealed = true;
} else {
bRecordType.restFieldType = symTable.mapType;
}
resultType = bRecordType;
}
@Override
public void visit(BLangErrorVarRef varRefExpr) {
if (varRefExpr.reason != null) {
varRefExpr.reason.lhsVar = true;
checkExpr(varRefExpr.reason, env);
}
BErrorTypeSymbol errorTSymbol = Symbols.createErrorSymbol(0, Names.EMPTY, env.enclPkg.symbol.pkgID,
null, env.scope.owner);
boolean unresolvedReference = false;
for (BLangNamedArgsExpression detailItem : varRefExpr.detail) {
BLangVariableReference refItem = (BLangVariableReference) detailItem.expr;
refItem.lhsVar = true;
checkExpr(refItem, env);
if (!isValidVariableReference(refItem)) {
unresolvedReference = true;
continue;
}
if (refItem.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
dlog.error(detailItem.pos, DiagnosticCode.ERROR_BINDING_PATTERN_DOES_NOT_SUPPORT_FIELD_ACCESS);
unresolvedReference = true;
continue;
} else if (refItem.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
dlog.error(detailItem.pos, DiagnosticCode.ERROR_BINDING_PATTERN_DOES_NOT_SUPPORT_INDEX_ACCESS);
unresolvedReference = true;
continue;
}
if (refItem.symbol == null) {
unresolvedReference = true;
continue;
}
}
if (varRefExpr.restVar != null) {
varRefExpr.restVar.lhsVar = true;
if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
checkExpr(varRefExpr.restVar, env);
unresolvedReference = unresolvedReference
|| varRefExpr.restVar.symbol == null
|| !isValidVariableReference(varRefExpr.restVar);
} else if (varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
unresolvedReference = checkErrorRestParamVarRef(varRefExpr, unresolvedReference);
}
}
if (unresolvedReference) {
resultType = symTable.semanticError;
return;
}
BType errorRefRestFieldType;
if (varRefExpr.restVar == null) {
errorRefRestFieldType = symTable.pureType;
} else if (varRefExpr.restVar.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) varRefExpr.restVar).variableName.value.equals(Names.IGNORE.value)) {
errorRefRestFieldType = symTable.pureType;
} else if (varRefExpr.restVar.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR
|| varRefExpr.restVar.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
errorRefRestFieldType = varRefExpr.restVar.type;
} else if (varRefExpr.restVar.type.tag == TypeTags.MAP) {
errorRefRestFieldType = ((BMapType) varRefExpr.restVar.type).constraint;
} else {
dlog.error(varRefExpr.restVar.pos, DiagnosticCode.INCOMPATIBLE_TYPES,
varRefExpr.restVar.type, symTable.detailType);
resultType = symTable.semanticError;
return;
}
BType errorDetailType = getCompatibleDetailType(errorRefRestFieldType);
resultType = new BErrorType(errorTSymbol, varRefExpr.reason.type, errorDetailType);
}
private BRecordType getCompatibleDetailType(BType errorRefRestFieldType) {
PackageID packageID = env.enclPkg.packageID;
BRecordTypeSymbol detailSymbol = new BRecordTypeSymbol(SymTag.RECORD, Flags.PUBLIC, Names.EMPTY,
packageID, null, env.scope.owner);
detailSymbol.scope = new Scope(env.scope.owner);
BRecordType detailType = new BRecordType(detailSymbol);
int flags = Flags.asMask(new HashSet<>(Lists.of(Flag.OPTIONAL, Flag.PUBLIC)));
BField messageField = new BField(Names.DETAIL_MESSAGE, null,
new BVarSymbol(flags, Names.DETAIL_MESSAGE, packageID, symTable.stringType, detailSymbol));
detailType.fields.add(messageField);
detailSymbol.scope.define(Names.DETAIL_MESSAGE, messageField.symbol);
BField causeField = new BField(Names.DETAIL_CAUSE, null,
new BVarSymbol(flags, Names.DETAIL_CAUSE, packageID, symTable.errorType, detailSymbol));
detailType.fields.add(causeField);
detailSymbol.scope.define(Names.DETAIL_CAUSE, causeField.symbol);
detailType.restFieldType = errorRefRestFieldType;
BInvokableType invokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initSymbol = Symbols.createFunctionSymbol(0, Names.INIT_FUNCTION_SUFFIX, packageID,
invokableType, detailSymbol, false);
detailSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initSymbol, invokableType);
detailSymbol.scope.define(initSymbol.name, initSymbol);
return detailType;
}
private boolean checkErrorRestParamVarRef(BLangErrorVarRef varRefExpr, boolean unresolvedReference) {
BLangAccessExpression accessExpression = (BLangAccessExpression) varRefExpr.restVar;
Name exprName = names.fromIdNode(((BLangSimpleVarRef) accessExpression.expr).variableName);
BSymbol fSym = symResolver.lookupSymbol(env, exprName, SymTag.VARIABLE);
if (fSym != null) {
if (fSym.type.getKind() == TypeKind.MAP) {
BType constraint = ((BMapType) fSym.type).constraint;
if (types.isAssignable(constraint, symTable.pureType)) {
varRefExpr.restVar.type = constraint;
} else {
varRefExpr.restVar.type = symTable.pureType;
}
} else {
throw new UnsupportedOperationException("rec field base access");
}
} else {
unresolvedReference = true;
}
return unresolvedReference;
}
@Override
/**
* This method will recursively check if a multidimensional array has at least one open sealed dimension.
*
* @param arrayType array to check if open sealed
* @return true if at least one dimension is open sealed
*/
public boolean isArrayOpenSealedType(BArrayType arrayType) {
if (arrayType.state == BArrayState.OPEN_SEALED) {
return true;
}
if (arrayType.eType.tag == TypeTags.ARRAY) {
return isArrayOpenSealedType((BArrayType) arrayType.eType);
}
return false;
}
/**
* This method will recursively traverse and find the symbol environment of a lambda node (which is given as the
* enclosing invokable node) which is needed to lookup closure variables. The variable lookup will start from the
* enclosing invokable node's environment, which are outside of the scope of a lambda function.
*/
private SymbolEnv findEnclosingInvokableEnv(SymbolEnv env, BLangInvokableNode encInvokable) {
if (env.enclEnv.node != null && env.enclEnv.node.getKind() == NodeKind.ARROW_EXPR) {
return env.enclEnv;
}
if (env.enclInvokable != null && env.enclInvokable == encInvokable) {
return findEnclosingInvokableEnv(env.enclEnv, encInvokable);
}
return env;
}
private boolean isFunctionArgument(BSymbol symbol, List<BLangSimpleVariable> params) {
return params.stream().anyMatch(param -> (param.symbol.name.equals(symbol.name) &&
param.type.tag == symbol.type.tag));
}
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
((BLangVariableReference) fieldAccessExpr.expr).lhsVar = fieldAccessExpr.lhsVar;
((BLangVariableReference) fieldAccessExpr.expr).compoundAssignmentLhsVar =
fieldAccessExpr.compoundAssignmentLhsVar;
BType varRefType = getTypeOfExprInFieldAccess(fieldAccessExpr.expr);
BType actualType;
if (fieldAccessExpr.fieldKind == FieldKind.ALL && varRefType.tag != TypeTags.XML) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_GET_ALL_FIELDS, varRefType);
actualType = symTable.semanticError;
} else {
if (fieldAccessExpr.optionalFieldAccess) {
if (fieldAccessExpr.lhsVar || fieldAccessExpr.compoundAssignmentLhsVar) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPTIONAL_FIELD_ACCESS_NOT_REQUIRED_ON_LHS);
resultType = symTable.semanticError;
return;
}
actualType = checkOptionalFieldAccessExpr(fieldAccessExpr, varRefType,
names.fromIdNode(fieldAccessExpr.field));
} else {
actualType = checkFieldAccessExpr(fieldAccessExpr, varRefType, names.fromIdNode(fieldAccessExpr.field));
}
}
resultType = types.checkType(fieldAccessExpr, actualType, this.expType);
}
public void visit(BLangIndexBasedAccess indexBasedAccessExpr) {
((BLangVariableReference) indexBasedAccessExpr.expr).lhsVar = indexBasedAccessExpr.lhsVar;
((BLangVariableReference) indexBasedAccessExpr.expr).compoundAssignmentLhsVar =
indexBasedAccessExpr.compoundAssignmentLhsVar;
checkExpr(indexBasedAccessExpr.expr, this.env, symTable.noType);
BType actualType = checkIndexAccessExpr(indexBasedAccessExpr);
if (indexBasedAccessExpr.lhsVar) {
indexBasedAccessExpr.originalType = actualType;
indexBasedAccessExpr.type = actualType;
resultType = actualType;
return;
}
this.resultType = this.types.checkType(indexBasedAccessExpr, actualType, this.expType);
}
public void visit(BLangInvocation iExpr) {
if (iExpr.expr == null) {
checkFunctionInvocationExpr(iExpr);
return;
}
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
if (pkgAlias != Names.EMPTY) {
dlog.error(iExpr.pos, DiagnosticCode.PKG_ALIAS_NOT_ALLOWED_HERE);
return;
}
final BType exprType = checkExpr(iExpr.expr, this.env, symTable.noType);
if (iExpr.actionInvocation) {
checkActionInvocationExpr(iExpr, exprType);
return;
}
BType varRefType = iExpr.expr.type;
switch (varRefType.tag) {
case TypeTags.OBJECT:
checkObjectFunctionInvocationExpr(iExpr, (BObjectType) varRefType);
break;
case TypeTags.RECORD:
boolean methodFound = checkFieldFunctionPointer(iExpr);
if (!methodFound) {
checkInLangLib(iExpr, varRefType);
}
break;
case TypeTags.NONE:
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, iExpr.name);
break;
case TypeTags.SEMANTIC_ERROR:
break;
default:
checkInLangLib(iExpr, varRefType);
}
}
private void checkInLangLib(BLangInvocation iExpr, BType varRefType) {
boolean langLibMethodExists = checkLangLibMethodInvocationExpr(iExpr, varRefType);
if (!langLibMethodExists) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, iExpr.name.value);
resultType = symTable.semanticError;
}
}
private boolean checkFieldFunctionPointer(BLangInvocation iExpr) {
BType type = checkExpr(iExpr.expr, this.env);
if (type == symTable.semanticError) {
return false;
}
BSymbol funcSymbol = symResolver.resolveStructField(iExpr.pos, env, names.fromIdNode(iExpr.name), type.tsymbol);
if (funcSymbol == symTable.notFoundSymbol) {
return false;
}
iExpr.symbol = funcSymbol;
iExpr.type = ((BInvokableSymbol) funcSymbol).retType;
checkInvocationParamAndReturnType(iExpr);
iExpr.functionPointerInvocation = true;
return true;
}
public void visit(BLangTypeInit cIExpr) {
if ((expType.tag == TypeTags.ANY && cIExpr.userDefinedType == null) || expType.tag == TypeTags.RECORD) {
dlog.error(cIExpr.pos, DiagnosticCode.INVALID_TYPE_NEW_LITERAL, expType);
resultType = symTable.semanticError;
return;
}
BType actualType;
if (cIExpr.userDefinedType != null) {
actualType = symResolver.resolveTypeNode(cIExpr.userDefinedType, env);
} else {
actualType = expType;
}
if (actualType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
switch (actualType.tag) {
case TypeTags.OBJECT:
if ((actualType.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) {
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, actualType.tsymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return;
}
if (((BObjectTypeSymbol) actualType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) actualType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.type = ((BInvokableSymbol) cIExpr.initInvocation.symbol).retType;
} else if (!cIExpr.initInvocation.argExprs.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.initInvocation.exprSymbol);
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
resultType = symTable.semanticError;
return;
}
break;
case TypeTags.STREAM:
if (!cIExpr.initInvocation.argExprs.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, cIExpr.initInvocation.name);
resultType = symTable.semanticError;
return;
}
break;
case TypeTags.UNION:
List<BType> matchingMembers = findMembersWithMatchingInitFunc(cIExpr, (BUnionType) actualType);
BType matchedType = getMatchingType(matchingMembers, cIExpr, actualType);
cIExpr.initInvocation.type = symTable.nilType;
if (matchedType.tag == TypeTags.OBJECT
&& ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc != null) {
cIExpr.initInvocation.symbol = ((BObjectTypeSymbol) matchedType.tsymbol).initializerFunc.symbol;
checkInvocationParam(cIExpr.initInvocation);
cIExpr.initInvocation.type = ((BInvokableSymbol) cIExpr.initInvocation.symbol).retType;
actualType = matchedType;
break;
}
types.checkType(cIExpr, matchedType, expType);
cIExpr.type = matchedType;
resultType = matchedType;
return;
default:
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, actualType);
resultType = symTable.semanticError;
return;
}
if (cIExpr.initInvocation.type == null) {
cIExpr.initInvocation.type = symTable.nilType;
}
BType actualTypeInitType = getObjectConstructorReturnType(actualType, cIExpr.initInvocation.type);
resultType = types.checkType(cIExpr, actualTypeInitType, expType);
}
private BType getObjectConstructorReturnType(BType objType, BType initRetType) {
if (initRetType.tag == TypeTags.UNION) {
LinkedHashSet<BType> retTypeMembers = new LinkedHashSet<>();
retTypeMembers.add(objType);
retTypeMembers.addAll(((BUnionType) initRetType).getMemberTypes());
retTypeMembers.remove(symTable.nilType);
BUnionType unionType = BUnionType.create(null, retTypeMembers);
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, 0,
Names.EMPTY, env.enclPkg.symbol.pkgID, unionType,
env.scope.owner);
return unionType;
} else if (initRetType.tag == TypeTags.NIL) {
return objType;
}
return symTable.semanticError;
}
private List<BType> findMembersWithMatchingInitFunc(BLangTypeInit cIExpr, BUnionType lhsUnionType) {
cIExpr.initInvocation.argExprs.forEach(expr -> checkExpr(expr, env, symTable.noType));
List<BType> matchingLhsMemberTypes = new ArrayList<>();
for (BType memberType : lhsUnionType.getMemberTypes()) {
if (memberType.tag != TypeTags.OBJECT) {
continue;
}
if ((memberType.tsymbol.flags & Flags.ABSTRACT) == Flags.ABSTRACT) {
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INITIALIZE_ABSTRACT_OBJECT, lhsUnionType.tsymbol);
}
BAttachedFunction initializerFunc = ((BObjectTypeSymbol) memberType.tsymbol).initializerFunc;
if (isArgsMatchesFunction(cIExpr.argsExpr, initializerFunc)) {
matchingLhsMemberTypes.add(memberType);
}
}
return matchingLhsMemberTypes;
}
private BType getMatchingType(List<BType> matchingLhsMembers, BLangTypeInit cIExpr, BType lhsUnion) {
if (matchingLhsMembers.isEmpty()) {
dlog.error(cIExpr.pos, DiagnosticCode.CANNOT_INFER_OBJECT_TYPE_FROM_LHS, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
} else if (matchingLhsMembers.size() == 1) {
return matchingLhsMembers.get(0).tsymbol.type;
} else {
dlog.error(cIExpr.pos, DiagnosticCode.AMBIGUOUS_TYPES, lhsUnion);
resultType = symTable.semanticError;
return symTable.semanticError;
}
}
private boolean isArgsMatchesFunction(List<BLangExpression> invocationArguments, BAttachedFunction function) {
if (function == null) {
return invocationArguments.isEmpty();
}
if (function.symbol.params.isEmpty() && invocationArguments.isEmpty()) {
return true;
}
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
List<BLangExpression> unnamedArgs = new ArrayList<>();
for (BLangExpression argument : invocationArguments) {
if (argument.getKind() == NodeKind.NAMED_ARGS_EXPR) {
namedArgs.add((BLangNamedArgsExpression) argument);
} else {
unnamedArgs.add(argument);
}
}
if (!matchDefaultableParameters(function, namedArgs)) {
return false;
}
List<BVarSymbol> requiredParams = function.symbol.params.stream()
.filter(param -> !param.defaultableParam).collect(Collectors.toList());
if (requiredParams.size() > unnamedArgs.size()) {
return false;
}
if (function.symbol.restParam == null && requiredParams.size() != unnamedArgs.size()) {
return false;
}
if (function.symbol.restParam != null) {
BType restParamType = ((BArrayType) function.symbol.restParam.type).eType;
if (!restArgTypesMatch(unnamedArgs, requiredParams.size(), restParamType)) {
return false;
}
}
for (int i = 0, paramsSize = requiredParams.size(); i < paramsSize; i++) {
BVarSymbol param = requiredParams.get(i);
BLangExpression argument = unnamedArgs.get(i);
if (!types.isAssignable(argument.type, param.type)) {
return false;
}
}
return true;
}
private boolean restArgTypesMatch(List<BLangExpression> unnamedArgs, int requiredParamCount, BType restParamType) {
if (unnamedArgs.size() == requiredParamCount) {
return true;
}
List<BLangExpression> restArgs = unnamedArgs.subList(requiredParamCount, unnamedArgs.size());
for (BLangExpression restArg : restArgs) {
if (!types.isAssignable(restArg.type, restParamType)) {
return false;
}
}
return true;
}
private boolean matchDefaultableParameters(BAttachedFunction function, List<BLangNamedArgsExpression> namedArgs) {
List<BVarSymbol> defaultableParams = function.symbol.params.stream()
.filter(param -> param.defaultableParam)
.collect(Collectors.toList());
if (defaultableParams.size() < namedArgs.size()) {
return false;
}
int matchedParamterCount = 0;
for (BVarSymbol defaultableParam : defaultableParams) {
for (BLangNamedArgsExpression namedArg : namedArgs) {
if (!namedArg.name.value.equals(defaultableParam.name.value)) {
continue;
}
BType namedArgExprType = checkExpr(namedArg.expr, env);
if (types.isAssignable(defaultableParam.type, namedArgExprType)) {
matchedParamterCount++;
} else {
return false;
}
}
}
return namedArgs.size() == matchedParamterCount;
}
public void visit(BLangWaitForAllExpr waitForAllExpr) {
switch (expType.tag) {
case TypeTags.RECORD:
checkTypesForRecords(waitForAllExpr);
break;
case TypeTags.MAP:
checkTypesForMap(waitForAllExpr.keyValuePairs, ((BMapType) expType).constraint);
LinkedHashSet<BType> memberTypesForMap = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypesForMap.size() == 1) {
resultType = new BMapType(TypeTags.MAP,
memberTypesForMap.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintTypeForMap = BUnionType.create(null, memberTypesForMap);
resultType = new BMapType(TypeTags.MAP, constraintTypeForMap, symTable.mapType.tsymbol);
break;
case TypeTags.NONE:
case TypeTags.ANY:
checkTypesForMap(waitForAllExpr.keyValuePairs, expType);
LinkedHashSet<BType> memberTypes = collectWaitExprTypes(waitForAllExpr.keyValuePairs);
if (memberTypes.size() == 1) {
resultType = new BMapType(TypeTags.MAP, memberTypes.iterator().next(), symTable.mapType.tsymbol);
break;
}
BUnionType constraintType = BUnionType.create(null, memberTypes);
resultType = new BMapType(TypeTags.MAP, constraintType, symTable.mapType.tsymbol);
break;
default:
dlog.error(waitForAllExpr.pos, DiagnosticCode.INVALID_LITERAL_FOR_TYPE, expType);
resultType = symTable.semanticError;
break;
}
waitForAllExpr.type = resultType;
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitForAllExpr, waitForAllExpr.type, expType);
}
}
private LinkedHashSet<BType> collectWaitExprTypes(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyVals) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : keyVals) {
BType bType = keyVal.keyExpr != null ? keyVal.keyExpr.type : keyVal.valueExpr.type;
if (bType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) bType).constraint);
} else {
memberTypes.add(bType);
}
}
return memberTypes;
}
private void checkTypesForMap(List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValuePairs, BType expType) {
keyValuePairs.forEach(keyVal -> checkWaitKeyValExpr(keyVal, expType));
}
private void checkTypesForRecords(BLangWaitForAllExpr waitExpr) {
List<BLangWaitForAllExpr.BLangWaitKeyValue> rhsFields = waitExpr.getKeyValuePairs();
Map<String, BType> lhsFields = new HashMap<>();
((BRecordType) expType).getFields().forEach(field -> lhsFields.put(field.name.value, field.type));
if (((BRecordType) expType).sealed && rhsFields.size() > lhsFields.size()) {
dlog.error(waitExpr.pos, DiagnosticCode.INVALID_LITERAL_FOR_TYPE, expType);
resultType = symTable.semanticError;
return;
}
for (BLangWaitForAllExpr.BLangWaitKeyValue keyVal : rhsFields) {
String key = keyVal.key.value;
if (!lhsFields.containsKey(key)) {
if (((BRecordType) expType).sealed) {
dlog.error(waitExpr.pos, DiagnosticCode.INVALID_FIELD_NAME_RECORD_LITERAL, key, expType);
resultType = symTable.semanticError;
} else {
BType restFieldType = ((BRecordType) expType).restFieldType;
checkWaitKeyValExpr(keyVal, restFieldType);
}
} else {
checkWaitKeyValExpr(keyVal, lhsFields.get(key));
}
}
checkMissingReqFieldsForWait(((BRecordType) expType), rhsFields, waitExpr.pos);
if (symTable.semanticError != resultType) {
resultType = expType;
}
}
private void checkMissingReqFieldsForWait(BRecordType type, List<BLangWaitForAllExpr.BLangWaitKeyValue> keyValPairs,
DiagnosticPos pos) {
type.fields.forEach(field -> {
boolean hasField = keyValPairs.stream().anyMatch(keyVal -> field.name.value.equals(keyVal.key.value));
if (!hasField && Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
dlog.error(pos, DiagnosticCode.MISSING_REQUIRED_RECORD_FIELD, field.name);
}
});
}
private void checkWaitKeyValExpr(BLangWaitForAllExpr.BLangWaitKeyValue keyVal, BType type) {
BLangExpression expr;
if (keyVal.keyExpr != null) {
BSymbol symbol = symResolver.lookupSymbol(env, names.fromIdNode
(((BLangSimpleVarRef) keyVal.keyExpr).variableName), SymTag.VARIABLE);
keyVal.keyExpr.type = symbol.type;
expr = keyVal.keyExpr;
} else {
expr = keyVal.valueExpr;
}
BFutureType futureType = new BFutureType(TypeTags.FUTURE, type, null);
checkExpr(expr, env, futureType);
}
public void visit(BLangTernaryExpr ternaryExpr) {
BType condExprType = checkExpr(ternaryExpr.expr, env, this.symTable.booleanType);
SymbolEnv thenEnv = typeNarrower.evaluateTruth(ternaryExpr.expr, ternaryExpr.thenExpr, env);
BType thenType = checkExpr(ternaryExpr.thenExpr, thenEnv, expType);
SymbolEnv elseEnv = typeNarrower.evaluateFalsity(ternaryExpr.expr, ternaryExpr.elseExpr, env);
BType elseType = checkExpr(ternaryExpr.elseExpr, elseEnv, expType);
if (condExprType == symTable.semanticError || thenType == symTable.semanticError ||
elseType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isAssignable(elseType, thenType)) {
resultType = thenType;
} else if (types.isAssignable(thenType, elseType)) {
resultType = elseType;
} else {
dlog.error(ternaryExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, thenType, elseType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
public void visit(BLangWaitExpr waitExpr) {
expType = new BFutureType(TypeTags.FUTURE, expType, null);
checkExpr(waitExpr.getExpression(), env, expType);
if (resultType.tag == TypeTags.UNION) {
LinkedHashSet<BType> memberTypes = collectMemberTypes((BUnionType) resultType, new LinkedHashSet<>());
if (memberTypes.size() == 1) {
resultType = memberTypes.toArray(new BType[0])[0];
} else {
resultType = BUnionType.create(null, memberTypes);
}
} else if (resultType != symTable.semanticError) {
resultType = ((BFutureType) resultType).constraint;
}
waitExpr.type = resultType;
if (resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(waitExpr, waitExpr.type, ((BFutureType) expType).constraint);
}
}
private LinkedHashSet<BType> collectMemberTypes(BUnionType unionType, LinkedHashSet<BType> memberTypes) {
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.FUTURE) {
memberTypes.add(((BFutureType) memberType).constraint);
} else {
memberTypes.add(memberType);
}
}
return memberTypes;
}
@Override
public void visit(BLangTrapExpr trapExpr) {
boolean firstVisit = trapExpr.expr.type == null;
BType actualType;
BType exprType = checkExpr(trapExpr.expr, env, expType);
boolean definedWithVar = expType == symTable.noType;
if (trapExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = trapExpr.type;
exprType = trapExpr.expr.type;
}
}
if (expType == symTable.semanticError) {
actualType = symTable.semanticError;
} else {
LinkedHashSet<BType> resultTypes = new LinkedHashSet<>();
if (exprType.tag == TypeTags.UNION) {
resultTypes.addAll(((BUnionType) exprType).getMemberTypes());
} else {
resultTypes.add(exprType);
}
resultTypes.add(symTable.errorType);
actualType = BUnionType.create(null, resultTypes);
}
resultType = types.checkType(trapExpr, actualType, expType);
if (definedWithVar && resultType != null && resultType != symTable.semanticError) {
types.setImplicitCastExpr(trapExpr.expr, trapExpr.expr.type, resultType);
}
}
public void visit(BLangBinaryExpr binaryExpr) {
if (expType.tag == TypeTags.FUTURE && binaryExpr.opKind == OperatorKind.BITWISE_OR) {
BType lhsResultType = checkExpr(binaryExpr.lhsExpr, env, expType);
BType rhsResultType = checkExpr(binaryExpr.rhsExpr, env, expType);
if (lhsResultType == symTable.semanticError || rhsResultType == symTable.semanticError) {
resultType = symTable.semanticError;
return;
}
resultType = BUnionType.create(null, lhsResultType, rhsResultType);
return;
}
checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(binaryExpr);
SymbolEnv rhsExprEnv;
BType lhsType = checkExpr(binaryExpr.lhsExpr, env);
if (binaryExpr.opKind == OperatorKind.AND) {
rhsExprEnv = typeNarrower.evaluateTruth(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env);
} else if (binaryExpr.opKind == OperatorKind.OR) {
rhsExprEnv = typeNarrower.evaluateFalsity(binaryExpr.lhsExpr, binaryExpr.rhsExpr, env);
} else {
rhsExprEnv = env;
}
BType rhsType = checkExpr(binaryExpr.rhsExpr, rhsExprEnv);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError && rhsType != symTable.semanticError) {
BSymbol opSymbol = symResolver.resolveBinaryOperator(binaryExpr.opKind, lhsType, rhsType);
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver.getBinaryEqualityForTypeSets(binaryExpr.opKind, lhsType, rhsType, binaryExpr);
}
if (opSymbol == symTable.notFoundSymbol) {
dlog.error(binaryExpr.pos, DiagnosticCode.BINARY_OP_INCOMPATIBLE_TYPES,
binaryExpr.opKind, lhsType, rhsType);
} else {
if ((binaryExpr.opKind == OperatorKind.EQUAL || binaryExpr.opKind == OperatorKind.NOT_EQUAL) &&
(couldHoldTableValues(lhsType, new ArrayList<>()) &&
couldHoldTableValues(rhsType, new ArrayList<>()))) {
dlog.error(binaryExpr.pos, DiagnosticCode.EQUALITY_NOT_YET_SUPPORTED, TABLE_TNAME);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
actualType = opSymbol.type.getReturnType();
}
}
resultType = types.checkType(binaryExpr, actualType, expType);
}
private void checkDecimalCompatibilityForBinaryArithmeticOverLiteralValues(BLangBinaryExpr binaryExpr) {
if (expType.tag != TypeTags.DECIMAL) {
return;
}
switch (binaryExpr.opKind) {
case ADD:
case SUB:
case MUL:
case DIV:
checkExpr(binaryExpr.lhsExpr, env, expType);
checkExpr(binaryExpr.rhsExpr, env, expType);
break;
default:
break;
}
}
public void visit(BLangElvisExpr elvisExpr) {
BType lhsType = checkExpr(elvisExpr.lhsExpr, env);
BType actualType = symTable.semanticError;
if (lhsType != symTable.semanticError) {
if (lhsType.tag == TypeTags.UNION && lhsType.isNullable()) {
BUnionType unionType = (BUnionType) lhsType;
LinkedHashSet<BType> memberTypes = unionType.getMemberTypes().stream()
.filter(type -> type.tag != TypeTags.NIL)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (memberTypes.size() == 1) {
actualType = memberTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, memberTypes);
}
} else {
dlog.error(elvisExpr.pos, DiagnosticCode.OPERATOR_NOT_SUPPORTED,
OperatorKind.ELVIS, lhsType);
}
}
BType rhsReturnType = checkExpr(elvisExpr.rhsExpr, env, expType);
BType lhsReturnType = types.checkType(elvisExpr.lhsExpr.pos, actualType, expType,
DiagnosticCode.INCOMPATIBLE_TYPES);
if (rhsReturnType == symTable.semanticError || lhsReturnType == symTable.semanticError) {
resultType = symTable.semanticError;
} else if (expType == symTable.noType) {
if (types.isSameType(rhsReturnType, lhsReturnType)) {
resultType = lhsReturnType;
} else {
dlog.error(elvisExpr.rhsExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES, lhsReturnType, rhsReturnType);
resultType = symTable.semanticError;
}
} else {
resultType = expType;
}
}
@Override
public void visit(BLangGroupExpr groupExpr) {
resultType = checkExpr(groupExpr.expression, env, expType);
}
public void visit(BLangTypedescExpr accessExpr) {
accessExpr.resolvedType = symResolver.resolveTypeNode(accessExpr.typeNode, env);
int resolveTypeTag = accessExpr.resolvedType.tag;
final BType actualType;
if (resolveTypeTag != TypeTags.TYPEDESC && resolveTypeTag != TypeTags.NONE) {
actualType = new BTypedescType(accessExpr.resolvedType, null);
} else {
actualType = accessExpr.resolvedType;
}
resultType = types.checkType(accessExpr, actualType, expType);
}
public void visit(BLangUnaryExpr unaryExpr) {
BType exprType;
BType actualType = symTable.semanticError;
if (OperatorKind.UNTAINT.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = exprType;
}
} else if (OperatorKind.TYPEOF.equals(unaryExpr.operator)) {
exprType = checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
actualType = new BTypedescType(exprType, null);
}
} else {
exprType = OperatorKind.ADD.equals(unaryExpr.operator) ? checkExpr(unaryExpr.expr, env, expType) :
checkExpr(unaryExpr.expr, env);
if (exprType != symTable.semanticError) {
BSymbol symbol = symResolver.resolveUnaryOperator(unaryExpr.pos, unaryExpr.operator, exprType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(unaryExpr.pos, DiagnosticCode.UNARY_OP_INCOMPATIBLE_TYPES,
unaryExpr.operator, exprType);
} else {
unaryExpr.opSymbol = (BOperatorSymbol) symbol;
actualType = symbol.type.getReturnType();
}
}
}
resultType = types.checkType(unaryExpr, actualType, expType);
}
public void visit(BLangTypeConversionExpr conversionExpr) {
BType actualType = symTable.semanticError;
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
BType expType = checkExpr(conversionExpr.expr, env, this.expType);
resultType = expType;
return;
}
BType targetType = symResolver.resolveTypeNode(conversionExpr.typeNode, env);
conversionExpr.targetType = targetType;
BType expType = conversionExpr.expr.getKind() == NodeKind.RECORD_LITERAL_EXPR ? targetType : symTable.noType;
BType sourceType = checkExpr(conversionExpr.expr, env, expType);
if (targetType.tag == TypeTags.FUTURE) {
dlog.error(conversionExpr.pos, DiagnosticCode.TYPE_CAST_NOT_YET_SUPPORTED, targetType);
} else {
BSymbol symbol = symResolver.resolveTypeCastOperator(conversionExpr.expr, sourceType, targetType);
if (symbol == symTable.notFoundSymbol) {
dlog.error(conversionExpr.pos, DiagnosticCode.INCOMPATIBLE_TYPES_CAST, sourceType, targetType);
} else {
conversionExpr.conversionSymbol = (BOperatorSymbol) symbol;
actualType = targetType;
}
}
resultType = types.checkType(conversionExpr, actualType, this.expType);
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
bLangLambdaFunction.type = bLangLambdaFunction.function.symbol.type;
bLangLambdaFunction.cachedEnv = env.createClone();
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
resultType = types.checkType(bLangLambdaFunction, bLangLambdaFunction.type, expType);
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BType expectedType = expType;
if (expectedType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) expectedType;
BType invokableType = unionType.getMemberTypes().stream().filter(type -> type.tag == TypeTags.INVOKABLE)
.collect(Collectors.collectingAndThen(Collectors.toList(), list -> {
if (list.size() != 1) {
return null;
}
return list.get(0);
}
));
if (invokableType != null) {
expectedType = invokableType;
}
}
if (expectedType.tag != TypeTags.INVOKABLE) {
dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_CANNOT_INFER_TYPE_FROM_LHS);
resultType = symTable.semanticError;
return;
}
BInvokableType expectedInvocation = (BInvokableType) expectedType;
populateArrowExprParamTypes(bLangArrowFunction, expectedInvocation.paramTypes);
bLangArrowFunction.expression.type = populateArrowExprReturn(bLangArrowFunction, expectedInvocation.retType);
if (expectedInvocation.retType.tag == TypeTags.NONE) {
expectedInvocation.retType = bLangArrowFunction.expression.type;
}
resultType = bLangArrowFunction.funcType = expectedInvocation;
}
public void visit(BLangXMLQName bLangXMLQName) {
String prefix = bLangXMLQName.prefix.value;
resultType = types.checkType(bLangXMLQName, symTable.stringType, expType);
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.isEmpty()
&& bLangXMLQName.localname.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (env.node.getKind() == NodeKind.XML_ATTRIBUTE && prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
((BLangXMLAttribute) env.node).isNamespaceDeclr = true;
return;
}
if (prefix.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
dlog.error(bLangXMLQName.pos, DiagnosticCode.INVALID_NAMESPACE_PREFIX, prefix);
bLangXMLQName.type = symTable.semanticError;
return;
}
BSymbol xmlnsSymbol = symResolver.lookupSymbol(env, names.fromIdNode(bLangXMLQName.prefix), SymTag.XMLNS);
if (prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
return;
}
if (!prefix.isEmpty() && xmlnsSymbol == symTable.notFoundSymbol) {
dlog.error(bLangXMLQName.pos, DiagnosticCode.UNDEFINED_SYMBOL, prefix);
bLangXMLQName.type = symTable.semanticError;
return;
}
bLangXMLQName.namespaceURI = ((BXMLNSSymbol) xmlnsSymbol).namespaceURI;
bLangXMLQName.nsSymbol = (BXMLNSSymbol) xmlnsSymbol;
}
public void visit(BLangXMLAttribute bLangXMLAttribute) {
SymbolEnv xmlAttributeEnv = SymbolEnv.getXMLAttributeEnv(bLangXMLAttribute, env);
checkExpr(bLangXMLAttribute.name, xmlAttributeEnv, symTable.stringType);
checkExpr(bLangXMLAttribute.value, xmlAttributeEnv, symTable.stringType);
symbolEnter.defineNode(bLangXMLAttribute, env);
}
public void visit(BLangXMLElementLiteral bLangXMLElementLiteral) {
SymbolEnv xmlElementEnv = SymbolEnv.getXMLElementEnv(bLangXMLElementLiteral, env);
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (attribute.name.getKind() == NodeKind.XML_QNAME
&& ((BLangXMLQName) attribute.name).prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
bLangXMLElementLiteral.attributes.forEach(attribute -> {
if (attribute.name.getKind() != NodeKind.XML_QNAME
|| !((BLangXMLQName) attribute.name).prefix.value.equals(XMLConstants.XMLNS_ATTRIBUTE)) {
checkExpr(attribute, xmlElementEnv, symTable.noType);
}
});
Map<Name, BXMLNSSymbol> namespaces = symResolver.resolveAllNamespaces(xmlElementEnv);
Name defaultNs = names.fromString(XMLConstants.DEFAULT_NS_PREFIX);
if (namespaces.containsKey(defaultNs)) {
bLangXMLElementLiteral.defaultNsSymbol = namespaces.remove(defaultNs);
}
bLangXMLElementLiteral.namespacesInScope.putAll(namespaces);
validateTags(bLangXMLElementLiteral, xmlElementEnv);
bLangXMLElementLiteral.modifiedChildren =
concatSimilarKindXMLNodes(bLangXMLElementLiteral.children, xmlElementEnv);
resultType = types.checkType(bLangXMLElementLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLTextLiteral bLangXMLTextLiteral) {
checkStringTemplateExprs(bLangXMLTextLiteral.textFragments, false);
resultType = types.checkType(bLangXMLTextLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLCommentLiteral bLangXMLCommentLiteral) {
checkStringTemplateExprs(bLangXMLCommentLiteral.textFragments, false);
resultType = types.checkType(bLangXMLCommentLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLProcInsLiteral bLangXMLProcInsLiteral) {
checkExpr(bLangXMLProcInsLiteral.target, env, symTable.stringType);
checkStringTemplateExprs(bLangXMLProcInsLiteral.dataFragments, false);
resultType = types.checkType(bLangXMLProcInsLiteral, symTable.xmlType, expType);
}
public void visit(BLangXMLQuotedString bLangXMLQuotedString) {
checkStringTemplateExprs(bLangXMLQuotedString.textFragments, false);
resultType = types.checkType(bLangXMLQuotedString, symTable.stringType, expType);
}
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
BType actualType = symTable.semanticError;
checkExpr(xmlAttributeAccessExpr.expr, env, symTable.xmlType);
BLangExpression indexExpr = xmlAttributeAccessExpr.indexExpr;
if (indexExpr == null) {
if (xmlAttributeAccessExpr.lhsVar) {
dlog.error(xmlAttributeAccessExpr.pos, DiagnosticCode.XML_ATTRIBUTE_MAP_UPDATE_NOT_ALLOWED);
} else {
actualType = BUnionType.create(null, symTable.mapStringType, symTable.nilType);
}
resultType = types.checkType(xmlAttributeAccessExpr, actualType, expType);
return;
}
checkExpr(indexExpr, env, symTable.stringType);
if (indexExpr.type.tag == TypeTags.STRING) {
if (xmlAttributeAccessExpr.lhsVar) {
actualType = symTable.stringType;
} else {
actualType = BUnionType.create(null, symTable.stringType, symTable.nilType);
}
}
xmlAttributeAccessExpr.namespaces.putAll(symResolver.resolveAllNamespaces(env));
resultType = types.checkType(xmlAttributeAccessExpr, actualType, expType);
}
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
checkStringTemplateExprs(stringTemplateLiteral.exprs, false);
resultType = types.checkType(stringTemplateLiteral, symTable.stringType, expType);
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
checkExpr(intRangeExpression.startExpr, env, symTable.intType);
checkExpr(intRangeExpression.endExpr, env, symTable.intType);
resultType = new BArrayType(symTable.intType);
}
@Override
public void visit(BLangTableQueryExpression tableQueryExpression) {
BType actualType = symTable.semanticError;
int expTypeTag = expType.tag;
if (expTypeTag == TypeTags.TABLE) {
actualType = expType;
} else if (expTypeTag != TypeTags.SEMANTIC_ERROR) {
dlog.error(tableQueryExpression.pos, DiagnosticCode.INCOMPATIBLE_TYPES_CONVERSION, expType);
}
BLangTableQuery tableQuery = (BLangTableQuery) tableQueryExpression.getTableQuery();
tableQuery.accept(this);
resultType = types.checkType(tableQueryExpression, actualType, expType);
}
@Override
public void visit(BLangTableQuery tableQuery) {
BLangStreamingInput streamingInput = (BLangStreamingInput) tableQuery.getStreamingInput();
streamingInput.accept(this);
BLangJoinStreamingInput joinStreamingInput = (BLangJoinStreamingInput) tableQuery.getJoinStreamingInput();
if (joinStreamingInput != null) {
joinStreamingInput.accept(this);
}
}
@Override
public void visit(BLangSelectClause selectClause) {
List<? extends SelectExpressionNode> selectExprList = selectClause.getSelectExpressions();
selectExprList.forEach(selectExpr -> ((BLangSelectExpression) selectExpr).accept(this));
BLangGroupBy groupBy = (BLangGroupBy) selectClause.getGroupBy();
if (groupBy != null) {
groupBy.accept(this);
}
BLangHaving having = (BLangHaving) selectClause.getHaving();
if (having != null) {
having.accept(this);
}
}
@Override
public void visit(BLangSelectExpression selectExpression) {
BLangExpression expr = (BLangExpression) selectExpression.getExpression();
expr.accept(this);
}
@Override
public void visit(BLangGroupBy groupBy) {
groupBy.getVariables().forEach(expr -> ((BLangExpression) expr).accept(this));
}
@Override
public void visit(BLangHaving having) {
BLangExpression expr = (BLangExpression) having.getExpression();
expr.accept(this);
}
@Override
public void visit(BLangOrderBy orderBy) {
for (OrderByVariableNode orderByVariableNode : orderBy.getVariables()) {
((BLangOrderByVariable) orderByVariableNode).accept(this);
}
}
@Override
public void visit(BLangOrderByVariable orderByVariable) {
BLangExpression expression = (BLangExpression) orderByVariable.getVariableReference();
expression.accept(this);
}
@Override
public void visit(BLangJoinStreamingInput joinStreamingInput) {
BLangStreamingInput streamingInput = (BLangStreamingInput) joinStreamingInput.getStreamingInput();
streamingInput.accept(this);
}
@Override
public void visit(BLangStreamingInput streamingInput) {
BLangExpression varRef = (BLangExpression) streamingInput.getStreamReference();
varRef.accept(this);
}
@Override
public void visit(BLangRestArgsExpression bLangRestArgExpression) {
resultType = checkExpr(bLangRestArgExpression.expr, env, expType);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
resultType = checkExpr(bLangNamedArgsExpression.expr, env, expType);
bLangNamedArgsExpression.type = bLangNamedArgsExpression.expr.type;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
SymbolEnv matchExprEnv = SymbolEnv.createBlockEnv((BLangBlockStmt) TreeBuilder.createBlockNode(), env);
checkExpr(bLangMatchExpression.expr, matchExprEnv);
bLangMatchExpression.patternClauses.forEach(pattern -> {
if (!pattern.variable.name.value.endsWith(Names.IGNORE.value)) {
symbolEnter.defineNode(pattern.variable, matchExprEnv);
}
checkExpr(pattern.expr, matchExprEnv, expType);
pattern.variable.type = symResolver.resolveTypeNode(pattern.variable.typeNode, matchExprEnv);
});
LinkedHashSet<BType> matchExprTypes = getMatchExpressionTypes(bLangMatchExpression);
BType actualType;
if (matchExprTypes.contains(symTable.semanticError)) {
actualType = symTable.semanticError;
} else if (matchExprTypes.size() == 1) {
actualType = matchExprTypes.toArray(new BType[0])[0];
} else {
actualType = BUnionType.create(null, matchExprTypes);
}
resultType = types.checkType(bLangMatchExpression, actualType, expType);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr) {
String operatorType = checkedExpr.getKind() == NodeKind.CHECK_EXPR ? "check" : "checkpanic";
boolean firstVisit = checkedExpr.expr.type == null;
BType exprExpType;
if (expType == symTable.noType) {
exprExpType = symTable.noType;
} else {
exprExpType = BUnionType.create(null, expType, symTable.errorType);
}
BType exprType = checkExpr(checkedExpr.expr, env, exprExpType);
if (checkedExpr.expr.getKind() == NodeKind.WORKER_RECEIVE) {
if (firstVisit) {
isTypeChecked = false;
resultType = expType;
return;
} else {
expType = checkedExpr.type;
exprType = checkedExpr.expr.type;
}
}
if (exprType.tag != TypeTags.UNION) {
if (types.isAssignable(exprType, symTable.errorType)) {
dlog.error(checkedExpr.expr.pos,
DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS, operatorType);
} else if (exprType != symTable.semanticError) {
dlog.error(checkedExpr.expr.pos,
DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
}
checkedExpr.type = symTable.semanticError;
return;
}
BUnionType unionType = (BUnionType) exprType;
Map<Boolean, List<BType>> resultTypeMap = unionType.getMemberTypes().stream()
.collect(Collectors.groupingBy(memberType -> types.isAssignable(memberType, symTable.errorType)));
checkedExpr.equivalentErrorTypeList = resultTypeMap.get(true);
if (checkedExpr.equivalentErrorTypeList == null ||
checkedExpr.equivalentErrorTypeList.size() == 0) {
dlog.error(checkedExpr.expr.pos,
DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_NO_ERROR_TYPE_IN_RHS, operatorType);
checkedExpr.type = symTable.semanticError;
return;
}
List<BType> nonErrorTypeList = resultTypeMap.get(false);
if (nonErrorTypeList == null || nonErrorTypeList.size() == 0) {
dlog.error(checkedExpr.expr.pos,
DiagnosticCode.CHECKED_EXPR_INVALID_USAGE_ALL_ERROR_TYPES_IN_RHS, operatorType);
checkedExpr.type = symTable.semanticError;
return;
}
BType actualType;
if (nonErrorTypeList.size() == 1) {
actualType = nonErrorTypeList.get(0);
} else {
actualType = BUnionType.create(null, new LinkedHashSet<>(nonErrorTypeList));
}
resultType = types.checkType(checkedExpr, actualType, expType);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
resultType = serviceConstructorExpr.serviceNode.symbol.type;
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
typeTestExpr.typeNode.type = symResolver.resolveTypeNode(typeTestExpr.typeNode, env);
checkExpr(typeTestExpr.expr, env);
resultType = types.checkType(typeTestExpr, symTable.booleanType, expType);
}
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
checkExpr(annotAccessExpr.expr, this.env, symTable.typeDesc);
BType actualType = symTable.semanticError;
BSymbol symbol =
this.symResolver.resolveAnnotation(annotAccessExpr.pos, env,
names.fromString(annotAccessExpr.pkgAlias.getValue()),
names.fromString (annotAccessExpr.annotationName.getValue()));
if (symbol == this.symTable.notFoundSymbol) {
this.dlog.error(annotAccessExpr.pos, DiagnosticCode.UNDEFINED_ANNOTATION,
annotAccessExpr.annotationName.getValue());
} else {
annotAccessExpr.annotationSymbol = (BAnnotationSymbol) symbol;
BType annotType = ((BAnnotationSymbol) symbol).attachedType == null ? symTable.trueType :
((BAnnotationSymbol) symbol).attachedType.type;
actualType = BUnionType.create(null, annotType, symTable.nilType);
}
this.resultType = this.types.checkType(annotAccessExpr, actualType, this.expType);
}
private boolean isValidVariableReference(BLangExpression varRef) {
switch (varRef.getKind()) {
case SIMPLE_VARIABLE_REF:
case RECORD_VARIABLE_REF:
case TUPLE_VARIABLE_REF:
case ERROR_VARIABLE_REF:
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
case XML_ATTRIBUTE_ACCESS_EXPR:
return true;
default:
dlog.error(varRef.pos, DiagnosticCode.INVALID_RECORD_BINDING_PATTERN, varRef.type);
return false;
}
}
private BType populateArrowExprReturn(BLangArrowFunction bLangArrowFunction, BType expectedRetType) {
SymbolEnv arrowFunctionEnv = SymbolEnv.createArrowFunctionSymbolEnv(bLangArrowFunction, env);
bLangArrowFunction.params.forEach(param -> symbolEnter.defineNode(param, arrowFunctionEnv));
return checkExpr(bLangArrowFunction.expression, arrowFunctionEnv, expectedRetType);
}
private void populateArrowExprParamTypes(BLangArrowFunction bLangArrowFunction, List<BType> paramTypes) {
if (paramTypes.size() != bLangArrowFunction.params.size()) {
dlog.error(bLangArrowFunction.pos, DiagnosticCode.ARROW_EXPRESSION_MISMATCHED_PARAMETER_LENGTH,
paramTypes.size(), bLangArrowFunction.params.size());
resultType = symTable.semanticError;
bLangArrowFunction.params.forEach(param -> param.type = symTable.semanticError);
return;
}
for (int i = 0; i < bLangArrowFunction.params.size(); i++) {
BLangSimpleVariable paramIdentifier = bLangArrowFunction.params.get(i);
BType bType = paramTypes.get(i);
BLangValueType valueTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
valueTypeNode.setTypeKind(bType.getKind());
paramIdentifier.setTypeNode(valueTypeNode);
paramIdentifier.type = bType;
}
}
private void checkSefReferences(DiagnosticPos pos, SymbolEnv env, BVarSymbol varSymbol) {
if (env.enclVarSym == varSymbol) {
dlog.error(pos, DiagnosticCode.SELF_REFERENCE_VAR, varSymbol.name);
}
}
public List<BType> getListWithErrorTypes(int count) {
List<BType> list = new ArrayList<>(count);
for (int i = 0; i < count; i++) {
list.add(symTable.semanticError);
}
return list;
}
private void checkFunctionInvocationExpr(BLangInvocation iExpr) {
Name funcName = names.fromIdNode(iExpr.name);
Name pkgAlias = names.fromIdNode(iExpr.pkgAlias);
BSymbol funcSymbol = symTable.notFoundSymbol;
if (pkgAlias == Names.EMPTY && env.enclType != null) {
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(
env.enclType.type.tsymbol.name.value, funcName.value));
funcSymbol = symResolver.resolveStructField(iExpr.pos, env, objFuncName,
env.enclType.type.tsymbol);
if (funcSymbol != symTable.notFoundSymbol) {
iExpr.exprSymbol = symResolver.lookupSymbol(env, Names.SELF, SymTag.VARIABLE);
}
}
if (symResolver.resolvePkgSymbol(iExpr.pos, env, pkgAlias) != symTable.notFoundSymbol) {
if (funcSymbol == symTable.notFoundSymbol) {
funcSymbol = symResolver.lookupSymbolInPackage(iExpr.pos, env, pkgAlias, funcName, SymTag.VARIABLE);
}
if (funcSymbol == symTable.notFoundSymbol) {
funcSymbol = symResolver.lookupSymbolInPackage(iExpr.pos, env, pkgAlias, funcName, SymTag.CONSTRUCTOR);
}
}
if ((funcSymbol.tag & SymTag.ERROR) == SymTag.ERROR
|| ((funcSymbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR && funcSymbol.type.tag == TypeTags.ERROR)) {
iExpr.symbol = funcSymbol;
iExpr.type = funcSymbol.type;
checkErrorConstructorInvocation(iExpr);
return;
} else if (funcSymbol == symTable.notFoundSymbol || isNotFunction(funcSymbol)) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION, funcName);
iExpr.argExprs.forEach(arg -> checkExpr(arg, env));
resultType = symTable.semanticError;
return;
}
if (isFunctionPointer(funcSymbol)) {
iExpr.functionPointerInvocation = true;
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION_SYNTAX);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private boolean isNotFunction(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
if (isFunctionPointer(funcSymbol)) {
return false;
}
return true;
}
private boolean isFunctionPointer(BSymbol funcSymbol) {
if ((funcSymbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION) {
return false;
}
return (funcSymbol.tag & SymTag.FUNCTION) == SymTag.VARIABLE
&& funcSymbol.kind == SymbolKind.FUNCTION
&& (funcSymbol.flags & Flags.NATIVE) != Flags.NATIVE;
}
private void checkErrorConstructorInvocation(BLangInvocation iExpr) {
if (!types.isAssignable(expType, symTable.errorType)) {
if (expType != symTable.noType) {
dlog.error(iExpr.pos, DiagnosticCode.CANNOT_INFER_ERROR_TYPE, expType);
resultType = symTable.semanticError;
return;
} else if ((iExpr.symbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
expType = iExpr.type;
} else {
expType = symTable.errorType;
}
}
BErrorType expectedError = getExpectedErrorType(iExpr.pos, expType, iExpr.symbol);
if (expectedError == null) {
return;
}
BErrorType ctorType = (BErrorType) expectedError.ctorSymbol.type;
if (iExpr.argExprs.isEmpty() && checkNoArgErrorCtorInvocation(ctorType, iExpr.pos)) {
return;
}
if (nonNamedArgsGiven(iExpr) && (iExpr.symbol.tag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
dlog.error(iExpr.argExprs.get(0).pos, DiagnosticCode.INDIRECT_ERROR_CTOR_REASON_NOT_ALLOWED);
resultType = symTable.semanticError;
return;
}
boolean reasonArgGiven = checkErrorReasonArg(iExpr, ctorType);
if (ctorType.detailType.tag == TypeTags.RECORD) {
BRecordType targetErrorDetailRec = (BRecordType) ctorType.detailType;
BRecordType recordType = createErrorDetailRecordType(iExpr, reasonArgGiven, targetErrorDetailRec);
if (resultType == symTable.semanticError) {
return;
}
if (!types.isAssignable(recordType, targetErrorDetailRec)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ERROR_CONSTRUCTOR_DETAIL, iExpr);
resultType = symTable.semanticError;
return;
}
} else {
BMapType targetErrorDetailMap = (BMapType) ctorType.detailType;
List<BLangNamedArgsExpression> providedErrorDetails = getProvidedErrorDetails(iExpr, reasonArgGiven);
if (providedErrorDetails == null) {
return;
}
for (BLangNamedArgsExpression errorDetailArg : providedErrorDetails) {
checkExpr(errorDetailArg, env, targetErrorDetailMap.constraint);
}
}
setErrorReasonParam(iExpr, reasonArgGiven, ctorType);
setErrorDetailArgsToNamedArgsList(iExpr);
resultType = expectedError;
iExpr.symbol = expectedError.ctorSymbol;
}
private BErrorType getExpectedErrorType(DiagnosticPos pos, BType expType, BSymbol iExprSymbol) {
if (iExprSymbol == symTable.errorType.tsymbol) {
if (expType.tag == TypeTags.UNION) {
List<BType> matchedErrors = ((BUnionType) expType).getMemberTypes().stream()
.filter(m -> types.isAssignable(m, iExprSymbol.type))
.collect(Collectors.toList());
if (matchedErrors.size() == 1) {
return (BErrorType) matchedErrors.get(0);
} else {
dlog.error(pos, DiagnosticCode.CANNOT_INFER_ERROR_TYPE, expType);
resultType = symTable.semanticError;
return null;
}
}
return (BErrorType) expType;
} else {
return (BErrorType) iExprSymbol.type;
}
}
private boolean nonNamedArgsGiven(BLangInvocation iExpr) {
return iExpr.argExprs.stream().anyMatch(arg -> arg.getKind() != NodeKind.NAMED_ARGS_EXPR);
}
private boolean checkErrorReasonArg(BLangInvocation iExpr, BErrorType ctorType) {
if (iExpr.argExprs.isEmpty()) {
return false;
}
BLangExpression firstErrorArg = iExpr.argExprs.get(0);
if (firstErrorArg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
checkExpr(firstErrorArg, env, ctorType.reasonType, DiagnosticCode.INVALID_ERROR_REASON_TYPE);
return true;
}
return false;
}
private boolean checkNoArgErrorCtorInvocation(BErrorType errorType, DiagnosticPos pos) {
if (errorType.reasonType.tag != TypeTags.FINITE) {
dlog.error(pos, DiagnosticCode.INDIRECT_ERROR_CTOR_NOT_ALLOWED_ON_NON_CONST_REASON,
errorType.ctorSymbol.name);
resultType = symTable.semanticError;
return true;
} else {
BFiniteType finiteType = (BFiniteType) errorType.reasonType;
if (finiteType.valueSpace.size() != 1) {
if (errorType == symTable.errorType) {
dlog.error(pos, DiagnosticCode.CANNOT_INFER_ERROR_TYPE, expType.tsymbol.name);
} else {
dlog.error(pos, DiagnosticCode.INDIRECT_ERROR_CTOR_NOT_ALLOWED_ON_NON_CONST_REASON,
expType.tsymbol.name);
}
resultType = symTable.semanticError;
return true;
}
}
return false;
}
private void setErrorDetailArgsToNamedArgsList(BLangInvocation iExpr) {
List<BLangExpression> namedArgPositions = new ArrayList<>(iExpr.argExprs.size());
for (int i = 0; i < iExpr.argExprs.size(); i++) {
BLangExpression argExpr = iExpr.argExprs.get(i);
checkExpr(argExpr, env, symTable.pureType);
if (argExpr.getKind() == NodeKind.NAMED_ARGS_EXPR) {
iExpr.requiredArgs.add(argExpr);
namedArgPositions.add(argExpr);
} else {
dlog.error(argExpr.pos, DiagnosticCode.ERROR_DETAIL_ARG_IS_NOT_NAMED_ARG);
resultType = symTable.semanticError;
}
}
for (BLangExpression expr : namedArgPositions) {
iExpr.argExprs.remove(expr);
}
}
private void setErrorReasonParam(BLangInvocation iExpr, boolean reasonArgGiven, BErrorType ctorType) {
if (!reasonArgGiven && ctorType.reasonType.getKind() == TypeKind.FINITE) {
BFiniteType finiteType = (BFiniteType) ctorType.reasonType;
BLangExpression reasonExpr = (BLangExpression) finiteType.valueSpace.toArray()[0];
iExpr.requiredArgs.add(reasonExpr);
return;
}
iExpr.requiredArgs.add(iExpr.argExprs.get(0));
iExpr.argExprs.remove(0);
}
/**
* Create a error detail record using all metadata from {@code targetErrorDetailsType} and put actual error details
* from {@code iExpr} expression.
*
* @param iExpr error constructor invocation
* @param reasonArgGiven error reason is provided as first argument
* @param targetErrorDetailsType target error details type to extract metadata such as pkgId from
* @return error detail record
*/
private BRecordType createErrorDetailRecordType(BLangInvocation iExpr, boolean reasonArgGiven,
BRecordType targetErrorDetailsType) {
List<BLangNamedArgsExpression> namedArgs = getProvidedErrorDetails(iExpr, reasonArgGiven);
if (namedArgs == null) {
return null;
}
BRecordTypeSymbol recordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD, targetErrorDetailsType.tsymbol.flags, Names.EMPTY, targetErrorDetailsType.tsymbol.pkgID,
symTable.recordType, null);
BRecordType recordType = new BRecordType(recordTypeSymbol);
recordType.sealed = targetErrorDetailsType.sealed;
recordType.restFieldType = targetErrorDetailsType.restFieldType;
Set<Name> availableErrorDetailFields = new HashSet<>();
for (BLangNamedArgsExpression arg : namedArgs) {
Name fieldName = names.fromIdNode(arg.name);
BField field = new BField(fieldName, arg.pos, new BVarSymbol(0, fieldName, null, arg.type, null));
recordType.fields.add(field);
availableErrorDetailFields.add(fieldName);
}
for (BField field : targetErrorDetailsType.fields) {
boolean notRequired = (field.symbol.flags & Flags.REQUIRED) != Flags.REQUIRED;
if (notRequired && !availableErrorDetailFields.contains(field.name)) {
BField defaultableField = new BField(field.name, iExpr.pos,
new BVarSymbol(field.symbol.flags, field.name, null, field.type, null));
recordType.fields.add(defaultableField);
}
}
return recordType;
}
private List<BLangNamedArgsExpression> getProvidedErrorDetails(BLangInvocation iExpr, boolean reasonArgGiven) {
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
for (int i = reasonArgGiven ? 1 : 0; i < iExpr.argExprs.size(); i++) {
BLangExpression argExpr = iExpr.argExprs.get(i);
checkExpr(argExpr, env);
if (argExpr.getKind() != NodeKind.NAMED_ARGS_EXPR) {
dlog.error(argExpr.pos, DiagnosticCode.ERROR_DETAIL_ARG_IS_NOT_NAMED_ARG);
resultType = symTable.semanticError;
return null;
}
namedArgs.add((BLangNamedArgsExpression) argExpr);
}
return namedArgs;
}
private void checkObjectFunctionInvocationExpr(BLangInvocation iExpr, BObjectType objectType) {
Name funcName =
names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value, iExpr.name.value));
BSymbol funcSymbol =
symResolver.resolveObjectMethod(iExpr.pos, env, funcName, (BObjectTypeSymbol) objectType.tsymbol);
if (funcSymbol == symTable.notFoundSymbol || funcSymbol.type.tag != TypeTags.INVOKABLE) {
if (!checkLangLibMethodInvocationExpr(iExpr, objectType)) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_FUNCTION_IN_OBJECT, iExpr.name.value, objectType);
resultType = symTable.semanticError;
return;
}
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION_SYNTAX);
}
if (Symbols.isFlagOn(funcSymbol.flags, Flags.RESOURCE)) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_RESOURCE_FUNCTION_INVOCATION);
}
iExpr.symbol = funcSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private void checkActionInvocationExpr(BLangInvocation iExpr, BType epType) {
BType actualType = symTable.semanticError;
if (epType == symTable.semanticError || epType.tag != TypeTags.OBJECT
|| ((BLangVariableReference) iExpr.expr).symbol.tag != SymTag.ENDPOINT) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_ACTION_INVOCATION);
resultType = actualType;
return;
}
final BVarSymbol epSymbol = (BVarSymbol) ((BLangVariableReference) iExpr.expr).symbol;
Name remoteFuncQName = names
.fromString(Symbols.getAttachedFuncSymbolName(epType.tsymbol.name.value, iExpr.name.value));
Name actionName = names.fromIdNode(iExpr.name);
BSymbol remoteFuncSymbol = symResolver
.lookupMemberSymbol(iExpr.pos, ((BObjectTypeSymbol) epSymbol.type.tsymbol).methodScope, env,
remoteFuncQName, SymTag.FUNCTION);
if (remoteFuncSymbol == symTable.notFoundSymbol || !Symbols.isFlagOn(remoteFuncSymbol.flags, Flags.REMOTE)) {
dlog.error(iExpr.pos, DiagnosticCode.UNDEFINED_ACTION, actionName, epSymbol.type.tsymbol.name);
resultType = actualType;
return;
}
iExpr.symbol = remoteFuncSymbol;
checkInvocationParamAndReturnType(iExpr);
}
private boolean checkLangLibMethodInvocationExpr(BLangInvocation iExpr, BType bType) {
Name funcName = names.fromString(iExpr.name.value);
BSymbol funcSymbol = symResolver.lookupLangLibMethod(bType, funcName);
if (funcSymbol == symTable.notFoundSymbol) {
return false;
}
iExpr.symbol = funcSymbol;
iExpr.langLibInvocation = true;
SymbolEnv enclEnv = this.env;
this.env = SymbolEnv.createInvocationEnv(iExpr, this.env);
iExpr.argExprs.add(0, iExpr.expr);
checkInvocationParamAndReturnType(iExpr);
this.env = enclEnv;
return true;
}
private void checkInvocationParamAndReturnType(BLangInvocation iExpr) {
BType actualType = checkInvocationParam(iExpr);
resultType = types.checkType(iExpr, actualType, this.expType);
}
private BType checkInvocationParam(BLangInvocation iExpr) {
if (iExpr.symbol.type.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
List<BType> paramTypes = ((BInvokableType) iExpr.symbol.type).getParameterTypes();
Map<String, BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params
.stream().collect(Collectors.toMap(a -> a.name.getValue(), a -> a));
int parameterCount;
if (iExpr.symbol.tag == SymTag.VARIABLE) {
parameterCount = paramTypes.size();
} else {
parameterCount = ((BInvokableSymbol) iExpr.symbol).params.size();
}
iExpr.requiredArgs = new ArrayList<>();
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
BVarSymbol varSymbol = params.get(((BLangNamedArgsExpression) expr).name.value);
if (!env.enclPkg.packageID.equals(iExpr.symbol.pkgID)
&& !Symbols.isFlagOn(varSymbol.flags, Flags.PUBLIC)) {
dlog.error(expr.pos, DiagnosticCode.NON_PUBLIC_ARG_ACCESSED_WITH_NAMED_ARG,
((BLangNamedArgsExpression) expr).name.value, iExpr.toString());
}
foundNamedArg = true;
if (i < parameterCount) {
iExpr.requiredArgs.add(expr);
} else {
dlog.error(expr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCount) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg);
}
private BType checkInvocationArgs(BLangInvocation iExpr, List<BType> paramTypes, BLangExpression vararg) {
BType actualType = symTable.semanticError;
BInvokableSymbol invokableSymbol = (BInvokableSymbol) iExpr.symbol;
List<BVarSymbol> nonRestParams = new ArrayList<>(invokableSymbol.params);
checkNonRestArgs(nonRestParams, iExpr, paramTypes);
if (invokableSymbol.restParam == null && (vararg != null || !iExpr.restArgs.isEmpty())) {
dlog.error(iExpr.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return actualType;
}
checkRestArgs(iExpr.restArgs, vararg, invokableSymbol.restParam);
BType retType = typeParamAnalyzer.getReturnTypeParams(env, invokableSymbol.type.getReturnType());
if (iExpr.async) {
return this.generateFutureType(invokableSymbol, retType);
} else {
return retType;
}
}
private BFutureType generateFutureType(BInvokableSymbol invocableSymbol, BType retType) {
boolean isWorkerStart = invocableSymbol.name.value.startsWith(WORKER_LAMBDA_VAR_PREFIX);
return new BFutureType(TypeTags.FUTURE, retType, null, isWorkerStart);
}
private void checkNonRestArgs(List<BVarSymbol> nonRestParams, BLangInvocation iExpr, List<BType> paramTypes) {
List<BLangExpression> nonRestArgs = iExpr.requiredArgs;
List<BVarSymbol> requiredParams = nonRestParams.stream()
.filter(param -> !param.defaultableParam)
.collect(Collectors.toList());
if (nonRestArgs.size() < requiredParams.size()) {
dlog.error(iExpr.pos, DiagnosticCode.NOT_ENOUGH_ARGS_FUNC_CALL, iExpr.name.value);
}
List<BVarSymbol> valueProvidedParams = new ArrayList<>();
for (int i = 0; i < nonRestArgs.size(); i++) {
BLangExpression arg = nonRestArgs.get(i);
BType expectedType = paramTypes.get(i);
if (i == 0 && arg.typeChecked && iExpr.expr != null && iExpr.expr == arg) {
types.checkType(arg.pos, arg.type, expectedType, DiagnosticCode.INCOMPATIBLE_TYPES);
types.setImplicitCastExpr(arg, arg.type, expectedType);
}
if (iExpr.symbol.tag == SymTag.VARIABLE) {
if (i < paramTypes.size()) {
checkExpr(arg, this.env, paramTypes.get(i));
typeParamAnalyzer.checkForTypeParamsInArg(arg.type, this.env, expectedType);
continue;
}
dlog.error(arg.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return;
}
if (arg.getKind() != NodeKind.NAMED_ARGS_EXPR) {
if (i < nonRestParams.size()) {
BVarSymbol param = nonRestParams.get(i);
checkExpr(arg, this.env, param.type);
typeParamAnalyzer.checkForTypeParamsInArg(arg.type, this.env, expectedType);
valueProvidedParams.add(param);
requiredParams.remove(param);
continue;
}
dlog.error(arg.pos, DiagnosticCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
return;
}
if (arg.getKind() == NodeKind.NAMED_ARGS_EXPR) {
BLangIdentifier argName = ((NamedArgNode) arg).getName();
BVarSymbol varSym = nonRestParams.stream()
.filter(param -> param.getName().value.equals(argName.value))
.findAny()
.orElse(null);
if (varSym == null) {
dlog.error(arg.pos, DiagnosticCode.UNDEFINED_PARAMETER, argName);
break;
}
requiredParams.remove(varSym);
if (valueProvidedParams.contains(varSym)) {
dlog.error(arg.pos, DiagnosticCode.DUPLICATE_NAMED_ARGS, varSym.name.value);
continue;
}
checkExpr(arg, this.env, varSym.type);
typeParamAnalyzer.checkForTypeParamsInArg(arg.type, this.env, varSym.type);
valueProvidedParams.add(varSym);
}
}
for (BVarSymbol reqParam : requiredParams) {
dlog.error(iExpr.pos, DiagnosticCode.MISSING_REQUIRED_PARAMETER, reqParam.name, iExpr.name.value);
}
}
private void checkRestArgs(List<BLangExpression> restArgExprs, BLangExpression vararg, BVarSymbol restParam) {
if (vararg != null && !restArgExprs.isEmpty()) {
dlog.error(vararg.pos, DiagnosticCode.INVALID_REST_ARGS);
return;
}
if (vararg != null) {
checkExpr(vararg, this.env, restParam.type);
restArgExprs.add(vararg);
return;
}
for (BLangExpression arg : restArgExprs) {
BType restType = ((BArrayType) restParam.type).eType;
checkExpr(arg, this.env, restType);
typeParamAnalyzer.checkForTypeParamsInArg(arg.type, env, restType);
}
}
private void checkRecLiteralKeyValue(BLangRecordKeyValue keyValuePair, BType recType) {
BType fieldType = symTable.semanticError;
BLangExpression valueExpr = keyValuePair.valueExpr;
switch (recType.tag) {
case TypeTags.RECORD:
fieldType = checkRecordLiteralKeyExpr(keyValuePair.key, (BRecordType) recType);
break;
case TypeTags.MAP:
fieldType = checkValidJsonOrMapLiteralKeyExpr(keyValuePair.key) ? ((BMapType) recType).constraint :
symTable.semanticError;
break;
case TypeTags.JSON:
fieldType = checkValidJsonOrMapLiteralKeyExpr(keyValuePair.key) ? symTable.jsonType :
symTable.semanticError;
checkExpr(valueExpr, this.env, fieldType);
if (valueExpr.impConversionExpr == null) {
types.checkTypes(valueExpr, Lists.of(valueExpr.type), Lists.of(symTable.jsonType));
} else {
BType valueType = valueExpr.type;
types.checkType(valueExpr, valueExpr.impConversionExpr.type, symTable.jsonType);
valueExpr.type = valueType;
}
resultType = valueExpr.type;
return;
case TypeTags.ERROR:
checkExpr(valueExpr, this.env, fieldType);
}
checkExpr(valueExpr, this.env, fieldType);
}
private BType checkRecordLiteralKeyExpr(BLangRecordKey key, BRecordType recordType) {
Name fieldName;
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.type == symTable.semanticError) {
return symTable.semanticError;
}
LinkedHashSet<BType> fieldTypes = recordType.fields.stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (recordType.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(recordType.restFieldType);
}
return BUnionType.create(null, fieldTypes);
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) keyExpr;
fieldName = names.fromIdNode(varRef.variableName);
} else if (keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).type.tag == TypeTags.STRING) {
fieldName = names.fromString((String) ((BLangLiteral) keyExpr).value);
} else {
dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_KEY);
return symTable.semanticError;
}
BSymbol fieldSymbol = symResolver.resolveStructField(keyExpr.pos, this.env,
fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
if (recordType.sealed) {
dlog.error(keyExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
recordType.tsymbol.type.getKind().typeName(), recordType.tsymbol);
return symTable.semanticError;
}
return recordType.restFieldType;
}
return fieldSymbol.type;
}
private boolean checkValidJsonOrMapLiteralKeyExpr(BLangRecordKey key) {
BLangExpression keyExpr = key.expr;
if (key.computedKey) {
checkExpr(keyExpr, this.env, symTable.stringType);
if (keyExpr.type == symTable.semanticError) {
return false;
}
return true;
} else if (keyExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF ||
(keyExpr.getKind() == NodeKind.LITERAL && ((BLangLiteral) keyExpr).type.tag == TypeTags.STRING)) {
return true;
}
dlog.error(keyExpr.pos, DiagnosticCode.INVALID_RECORD_LITERAL_KEY);
return false;
}
private BType checkRecLiteralKeyExpr(BLangExpression keyExpr) {
if (keyExpr.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return checkExpr(keyExpr, this.env, symTable.stringType);
}
keyExpr.type = symTable.stringType;
return keyExpr.type;
}
private BType checkIndexExprForObjectFieldAccess(BLangExpression indexExpr) {
if (indexExpr.getKind() != NodeKind.LITERAL && indexExpr.getKind() != NodeKind.NUMERIC_LITERAL) {
indexExpr.type = symTable.semanticError;
dlog.error(indexExpr.pos, DiagnosticCode.INVALID_INDEX_EXPR_STRUCT_FIELD_ACCESS);
return indexExpr.type;
}
return checkExpr(indexExpr, this.env, symTable.stringType);
}
private BType addNilForNillableIndexBasedAccess(BType actualType) {
if (actualType.isNullable()) {
return actualType;
}
return BUnionType.create(null, actualType, symTable.nilType);
}
private BType checkRecordRequiredFieldAccess(BLangVariableReference varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordOptionalFieldAccess(BLangVariableReference varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol || !Symbols.isOptional(fieldSymbol)) {
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkRecordRestFieldAccess(BLangVariableReference varReferExpr, Name fieldName,
BRecordType recordType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, recordType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
return symTable.semanticError;
}
if (recordType.sealed) {
return symTable.semanticError;
}
return recordType.restFieldType;
}
private BType checkObjectFieldAccess(BLangVariableReference varReferExpr, Name fieldName, BObjectType objectType) {
BSymbol fieldSymbol = symResolver.resolveStructField(varReferExpr.pos, this.env, fieldName, objectType.tsymbol);
if (fieldSymbol != symTable.notFoundSymbol) {
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
Name objFuncName = names.fromString(Symbols.getAttachedFuncSymbolName(objectType.tsymbol.name.value,
fieldName.value));
fieldSymbol = symResolver.resolveObjectField(varReferExpr.pos, env, objFuncName, objectType.tsymbol);
if (fieldSymbol == symTable.notFoundSymbol) {
dlog.error(varReferExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
objectType.tsymbol.type.getKind().typeName(), objectType.tsymbol);
return symTable.semanticError;
}
varReferExpr.symbol = fieldSymbol;
return fieldSymbol.type;
}
private BType checkTupleFieldType(BType tupleType, int indexValue) {
BTupleType bTupleType = (BTupleType) tupleType;
if (bTupleType.tupleTypes.size() <= indexValue && bTupleType.restType != null) {
return bTupleType.restType;
} else if (indexValue < 0 || bTupleType.tupleTypes.size() <= indexValue) {
return symTable.semanticError;
}
return bTupleType.tupleTypes.get(indexValue);
}
private void validateTags(BLangXMLElementLiteral bLangXMLElementLiteral, SymbolEnv xmlElementEnv) {
BLangExpression startTagName = bLangXMLElementLiteral.startTagName;
checkExpr(startTagName, xmlElementEnv, symTable.stringType);
BLangExpression endTagName = bLangXMLElementLiteral.endTagName;
if (endTagName != null) {
checkExpr(endTagName, xmlElementEnv, symTable.stringType);
}
if (endTagName == null) {
return;
}
if (startTagName.getKind() == NodeKind.XML_QNAME && startTagName.getKind() == NodeKind.XML_QNAME
&& startTagName.equals(endTagName)) {
return;
}
if (startTagName.getKind() != NodeKind.XML_QNAME && startTagName.getKind() != NodeKind.XML_QNAME) {
return;
}
dlog.error(startTagName.pos, DiagnosticCode.XML_TAGS_MISMATCH);
}
private void checkStringTemplateExprs(List<BLangExpression> exprs, boolean allowXml) {
for (BLangExpression expr : exprs) {
checkExpr(expr, env);
BType type = expr.type;
if (type == symTable.semanticError) {
continue;
}
if (type.tag >= TypeTags.JSON) {
if (allowXml) {
if (type.tag != TypeTags.XML) {
dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType,
symTable.stringType, symTable.booleanType, symTable.xmlType),
type);
}
continue;
}
dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType,
symTable.stringType, symTable.booleanType), type);
}
}
}
/**
* Concatenate the consecutive text type nodes, and get the reduced set of children.
*
* @param exprs Child nodes
* @param xmlElementEnv
* @return Reduced set of children
*/
private List<BLangExpression> concatSimilarKindXMLNodes(List<BLangExpression> exprs, SymbolEnv xmlElementEnv) {
List<BLangExpression> newChildren = new ArrayList<>();
List<BLangExpression> tempConcatExpressions = new ArrayList<>();
for (BLangExpression expr : exprs) {
BType exprType = checkExpr(expr, xmlElementEnv);
if (exprType == symTable.xmlType) {
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
tempConcatExpressions = new ArrayList<>();
}
newChildren.add(expr);
continue;
}
BType type = expr.type;
if (type.tag >= TypeTags.JSON) {
if (type != symTable.semanticError) {
dlog.error(expr.pos, DiagnosticCode.INCOMPATIBLE_TYPES,
BUnionType.create(null, symTable.intType, symTable.floatType, symTable.decimalType,
symTable.stringType, symTable.booleanType, symTable.xmlType),
type);
}
continue;
}
tempConcatExpressions.add(expr);
}
if (!tempConcatExpressions.isEmpty()) {
newChildren.add(getXMLTextLiteral(tempConcatExpressions));
}
return newChildren;
}
private BLangExpression getBinaryAddExpr(BLangExpression lExpr, BLangExpression rExpr, BSymbol opSymbol) {
BLangBinaryExpr binaryExpressionNode = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpressionNode.lhsExpr = lExpr;
binaryExpressionNode.rhsExpr = rExpr;
binaryExpressionNode.pos = rExpr.pos;
binaryExpressionNode.opKind = OperatorKind.ADD;
if (opSymbol != symTable.notFoundSymbol) {
binaryExpressionNode.type = opSymbol.type.getReturnType();
binaryExpressionNode.opSymbol = (BOperatorSymbol) opSymbol;
} else {
binaryExpressionNode.type = symTable.semanticError;
}
types.checkType(binaryExpressionNode, binaryExpressionNode.type, symTable.stringType);
return binaryExpressionNode;
}
private BLangExpression getXMLTextLiteral(List<BLangExpression> exprs) {
BLangXMLTextLiteral xmlTextLiteral = (BLangXMLTextLiteral) TreeBuilder.createXMLTextLiteralNode();
xmlTextLiteral.textFragments = exprs;
xmlTextLiteral.pos = exprs.get(0).pos;
xmlTextLiteral.type = symTable.xmlType;
return xmlTextLiteral;
}
private BType getTypeOfExprInFieldAccess(BLangExpression expr) {
checkExpr(expr, this.env, symTable.noType);
return expr.type;
}
private BType getAccessExprFinalType(BLangAccessExpression accessExpr, BType actualType) {
accessExpr.originalType = actualType;
BUnionType unionType = BUnionType.create(null, actualType);
if (returnsNull(accessExpr)) {
unionType.add(symTable.nilType);
}
BType parentType = accessExpr.expr.type;
if (accessExpr.errorSafeNavigation
&& (parentType.tag == TypeTags.SEMANTIC_ERROR || (parentType.tag == TypeTags.UNION
&& ((BUnionType) parentType).getMemberTypes().contains(symTable.errorType)))) {
unionType.add(symTable.errorType);
}
if (unionType.getMemberTypes().size() == 1) {
return unionType.getMemberTypes().toArray(new BType[0])[0];
}
return unionType;
}
private boolean returnsNull(BLangAccessExpression accessExpr) {
BType parentType = accessExpr.expr.type;
if (parentType.isNullable() && parentType.tag != TypeTags.JSON) {
return true;
}
if (parentType.tag != TypeTags.MAP) {
return false;
}
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR && accessExpr.expr.type.tag == TypeTags.MAP) {
BType constraintType = ((BMapType) accessExpr.expr.type).constraint;
return constraintType != null && constraintType.tag != TypeTags.ANY && constraintType.tag != TypeTags.JSON;
}
return false;
}
private BType checkObjectFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.OBJECT) {
return checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkObjectFieldAccess(fieldAccessExpr, fieldName, (BObjectType) memType);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
return checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return individualFieldType;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkRecordFieldAccessLhsExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
return checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
return symTable.semanticError;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkOptionalRecordFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
if (varRefType.tag == TypeTags.RECORD) {
BType fieldType = checkRecordRequiredFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType != symTable.semanticError) {
return fieldType;
}
fieldType = checkRecordOptionalFieldAccess(fieldAccessExpr, fieldName, (BRecordType) varRefType);
if (fieldType == symTable.semanticError) {
return fieldType;
}
return BUnionType.create(null, fieldType, symTable.nilType);
}
Set<BType> memberTypes = ((BUnionType) varRefType).getMemberTypes();
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : memberTypes) {
BType individualFieldType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, memType, fieldName);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.isEmpty()) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType, Name fieldName) {
BType actualType = symTable.semanticError;
if (types.isSubTypeOfBaseType(varRefType, TypeTags.OBJECT)) {
actualType = checkObjectFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
} else if (types.isSubTypeOfBaseType(varRefType, TypeTags.RECORD)) {
actualType = checkRecordFieldAccessExpr(fieldAccessExpr, varRefType, fieldName);
if (actualType != symTable.semanticError) {
fieldAccessExpr.originalType = actualType;
return actualType;
}
if (!fieldAccessExpr.lhsVar) {
dlog.error(fieldAccessExpr.pos,
DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_NON_REQUIRED_FIELD, varRefType,
fieldName);
return actualType;
}
actualType = checkRecordFieldAccessLhsExpr(fieldAccessExpr, varRefType, fieldName);
fieldAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
varRefType.tsymbol.type.getKind().typeName(), varRefType);
}
} else if (types.isLax(varRefType)) {
if (fieldAccessExpr.lhsVar) {
dlog.error(fieldAccessExpr.pos,
DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS_FOR_ASSIGNMENT, varRefType);
return symTable.semanticError;
}
BType laxFieldAccessType = getLaxFieldAccessType(varRefType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = BUnionType.create(null, laxFieldAccessType, symTable.errorType);
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
} else if (varRefType.tag == TypeTags.XML) {
if (fieldAccessExpr.lhsVar) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE);
}
actualType = symTable.xmlType;
fieldAccessExpr.originalType = actualType;
} else if (varRefType.tag == TypeTags.STREAM || varRefType.tag == TypeTags.TABLE) {
BType constraint = (fieldAccessExpr.expr.type.tag == TypeTags.STREAM ?
((BStreamType) fieldAccessExpr.expr.type).constraint :
((BTableType) fieldAccessExpr.expr.type).constraint);
if (constraint.tag != TypeTags.RECORD) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS, varRefType);
return symTable.semanticError;
}
Optional<BField> fieldType =
((BRecordType) constraint).fields.stream().filter(field -> field.name.value.equals(fieldName.value))
.findFirst();
if (fieldType.isPresent()) {
actualType = fieldType.get().type;
} else {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD_WITH_TYPE, fieldName,
varRefType.tsymbol.type.getKind().typeName(), varRefType);
return symTable.semanticError;
}
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_FIELD_ACCESS, varRefType);
}
return actualType;
}
private boolean hasLaxOriginalType(BLangFieldBasedAccess fieldBasedAccess) {
return fieldBasedAccess.originalType != null && types.isLax(fieldBasedAccess.originalType);
}
private BType getLaxFieldAccessType(BType exprType) {
switch (exprType.tag) {
case TypeTags.JSON:
return symTable.jsonType;
case TypeTags.MAP:
return ((BMapType) exprType).constraint;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) exprType;
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
unionType.getMemberTypes().forEach(bType -> memberTypes.add(getLaxFieldAccessType(bType)));
return memberTypes.size() == 1 ? memberTypes.iterator().next() : BUnionType.create(null, memberTypes);
}
return symTable.semanticError;
}
private BType checkOptionalFieldAccessExpr(BLangFieldBasedAccess fieldAccessExpr, BType varRefType,
Name fieldName) {
BType actualType = symTable.semanticError;
boolean nillableExprType = false;
BType effectiveType = varRefType;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
effectiveType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
}
}
if (types.isSubTypeOfBaseType(effectiveType, TypeTags.RECORD)) {
actualType = checkOptionalRecordFieldAccessExpr(fieldAccessExpr, effectiveType, fieldName);
if (actualType == symTable.semanticError) {
dlog.error(fieldAccessExpr.pos,
DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS_FOR_FIELD,
varRefType, fieldName);
}
fieldAccessExpr.nilSafeNavigation = nillableExprType;
fieldAccessExpr.originalType = getSafeType(actualType, fieldAccessExpr);
} else if (types.isLax(effectiveType)) {
BType laxFieldAccessType = getLaxFieldAccessType(effectiveType);
actualType = couldHoldNonMappingJson(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (fieldAccessExpr.expr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR &&
hasLaxOriginalType(((BLangFieldBasedAccess) fieldAccessExpr.expr))) {
BType laxFieldAccessType =
getLaxFieldAccessType(((BLangFieldBasedAccess) fieldAccessExpr.expr).originalType);
actualType = couldHoldNonMappingJson(effectiveType) ?
BUnionType.create(null, laxFieldAccessType, symTable.errorType) : laxFieldAccessType;
fieldAccessExpr.errorSafeNavigation = true;
fieldAccessExpr.originalType = laxFieldAccessType;
fieldAccessExpr.nilSafeNavigation = true;
nillableExprType = true;
} else if (varRefType.tag != TypeTags.SEMANTIC_ERROR) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_OPTIONAL_FIELD_ACCESS,
varRefType);
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private boolean couldHoldNonMappingJson(BType type) {
if (type.tag == TypeTags.JSON) {
return true;
}
if (type.tag == TypeTags.MAP) {
return false;
}
return ((BUnionType) type).getMemberTypes().stream().anyMatch(this::couldHoldNonMappingJson);
}
private BType checkIndexAccessExpr(BLangIndexBasedAccess indexBasedAccessExpr) {
BType varRefType = indexBasedAccessExpr.expr.type;
boolean nillableExprType = false;
if (varRefType.tag == TypeTags.UNION) {
Set<BType> memTypes = ((BUnionType) varRefType).getMemberTypes();
if (memTypes.contains(symTable.nilType)) {
LinkedHashSet<BType> nilRemovedSet = new LinkedHashSet<>();
for (BType bType : memTypes) {
if (bType != symTable.nilType) {
nilRemovedSet.add(bType);
} else {
nillableExprType = true;
}
}
if (nillableExprType) {
varRefType = nilRemovedSet.size() == 1 ? nilRemovedSet.iterator().next() :
BUnionType.create(null, nilRemovedSet);
if (!types.isSubTypeOfMapping(varRefType)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEXING,
indexBasedAccessExpr.expr.type);
return symTable.semanticError;
}
if (indexBasedAccessExpr.lhsVar) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEX_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.type);
return symTable.semanticError;
}
}
}
}
BLangExpression indexExpr = indexBasedAccessExpr.indexExpr;
BType actualType = symTable.semanticError;
if (types.isSubTypeOfMapping(varRefType)) {
checkExpr(indexExpr, this.env, symTable.stringType);
if (indexExpr.type == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkMappingIndexBasedAccess(indexBasedAccessExpr, varRefType);
if (actualType == symTable.semanticError) {
if (indexExpr.type.tag == TypeTags.STRING && isConst(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.UNDEFINED_STRUCTURE_FIELD,
fieldName, indexBasedAccessExpr.expr.type);
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticCode.INVALID_RECORD_INDEX_EXPR, indexExpr.type);
return actualType;
}
indexBasedAccessExpr.nilSafeNavigation = nillableExprType;
indexBasedAccessExpr.originalType = getSafeType(actualType, indexBasedAccessExpr);
} else if (types.isSubTypeOfList(varRefType)) {
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.type == symTable.semanticError) {
return symTable.semanticError;
}
actualType = checkListIndexBasedAccess(indexBasedAccessExpr, varRefType);
indexBasedAccessExpr.originalType = actualType;
if (actualType == symTable.semanticError) {
if (indexExpr.type.tag == TypeTags.INT && isConst(indexExpr)) {
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.LIST_INDEX_OUT_OF_RANGE,
getConstIndex(indexExpr));
return actualType;
}
dlog.error(indexExpr.pos, DiagnosticCode.INVALID_LIST_INDEX_EXPR, indexExpr.type);
return actualType;
}
} else if (types.isAssignable(varRefType, symTable.stringType)) {
if (indexBasedAccessExpr.lhsVar) {
dlog.error(indexBasedAccessExpr.pos,
DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEX_ACCESS_FOR_ASSIGNMENT,
indexBasedAccessExpr.expr.type);
return symTable.semanticError;
}
checkExpr(indexExpr, this.env, symTable.intType);
if (indexExpr.type == symTable.semanticError) {
return symTable.semanticError;
}
indexBasedAccessExpr.originalType = symTable.stringType;
actualType = symTable.stringType;
} else if (varRefType.tag == TypeTags.XML) {
if (indexBasedAccessExpr.lhsVar) {
indexExpr.type = symTable.semanticError;
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.CANNOT_UPDATE_XML_SEQUENCE);
return actualType;
}
checkExpr(indexExpr, this.env);
actualType = symTable.xmlType;
indexBasedAccessExpr.originalType = actualType;
} else if (varRefType == symTable.semanticError) {
indexBasedAccessExpr.indexExpr.type = symTable.semanticError;
return symTable.semanticError;
} else {
indexBasedAccessExpr.indexExpr.type = symTable.semanticError;
dlog.error(indexBasedAccessExpr.pos, DiagnosticCode.OPERATION_DOES_NOT_SUPPORT_INDEXING,
indexBasedAccessExpr.expr.type);
return symTable.semanticError;
}
if (nillableExprType && !actualType.isNullable()) {
actualType = BUnionType.create(null, actualType, symTable.nilType);
}
return actualType;
}
private Long getConstIndex(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.NUMERIC_LITERAL ? (Long) ((BLangLiteral) indexExpr).value :
(Long) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private String getConstFieldName(BLangExpression indexExpr) {
return indexExpr.getKind() == NodeKind.LITERAL ? (String) ((BLangLiteral) indexExpr).value :
(String) ((BConstantSymbol) ((BLangSimpleVarRef) indexExpr).symbol).value.value;
}
private BType checkArrayIndexBasedAccess(BLangIndexBasedAccess indexBasedAccess, BType indexExprType,
BArrayType arrayType) {
BType actualType = symTable.semanticError;
switch (indexExprType.tag) {
case TypeTags.INT:
BLangExpression indexExpr = indexBasedAccess.indexExpr;
if (!isConst(indexExpr) || arrayType.state == BArrayState.UNSEALED) {
actualType = arrayType.eType;
break;
}
actualType = getConstIndex(indexExpr) >= arrayType.size ? symTable.semanticError : arrayType.eType;
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) indexExprType;
boolean validIndexExists = false;
for (BLangExpression finiteMember : finiteIndexExpr.valueSpace) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
if (indexValue >= 0 &&
(arrayType.state == BArrayState.UNSEALED || indexValue < arrayType.size)) {
validIndexExists = true;
break;
}
}
if (!validIndexExists) {
return symTable.semanticError;
}
actualType = arrayType.eType;
break;
case TypeTags.UNION:
List<BFiniteType> finiteTypes = ((BUnionType) indexExprType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.map(matchedType -> (BFiniteType) matchedType)
.collect(Collectors.toList());
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.valueSpace));
finiteType = new BFiniteType(null, valueSpace);
}
BType elementType = checkArrayIndexBasedAccess(indexBasedAccess, finiteType, arrayType);
if (elementType == symTable.semanticError) {
return symTable.semanticError;
}
actualType = arrayType.eType;
}
return actualType;
}
private BType checkListIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.ARRAY) {
return checkArrayIndexBasedAccess(accessExpr, accessExpr.indexExpr.type, (BArrayType) type);
}
if (type.tag == TypeTags.TUPLE) {
return checkTupleIndexBasedAccess(accessExpr, (BTupleType) type, accessExpr.indexExpr.type);
}
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkListIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
return fieldTypeMembers.iterator().next();
}
return BUnionType.create(null, fieldTypeMembers);
}
private BType checkTupleIndexBasedAccess(BLangIndexBasedAccess accessExpr, BTupleType tuple, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.INT:
if (isConst(indexExpr)) {
actualType = checkTupleFieldType(tuple, getConstIndex(indexExpr).intValue());
} else {
BTupleType tupleExpr = (BTupleType) accessExpr.expr.type;
LinkedHashSet<BType> tupleTypes = collectTupleFieldTypes(tupleExpr, new LinkedHashSet<>());
actualType = tupleTypes.size() == 1 ? tupleTypes.iterator().next() : BUnionType.create(null,
tupleTypes);
}
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.valueSpace) {
int indexValue = ((Long) ((BLangLiteral) finiteMember).value).intValue();
BType fieldType = checkTupleFieldType(tuple, indexValue);
if (fieldType.tag != TypeTags.SEMANTIC_ERROR) {
possibleTypes.add(fieldType);
}
}
if (possibleTypes.size() == 0) {
return symTable.semanticError;
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.valueSpace));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkTupleIndexBasedAccess(accessExpr, tuple, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private LinkedHashSet<BType> collectTupleFieldTypes(BTupleType tupleType, LinkedHashSet<BType> memberTypes) {
tupleType.tupleTypes
.forEach(memberType -> {
if (memberType.tag == TypeTags.UNION) {
collectMemberTypes((BUnionType) memberType, memberTypes);
} else {
memberTypes.add(memberType);
}
});
return memberTypes;
}
private BType checkMappingIndexBasedAccess(BLangIndexBasedAccess accessExpr, BType type) {
if (type.tag == TypeTags.MAP) {
BType constraint = ((BMapType) type).constraint;
return accessExpr.lhsVar ? constraint : addNilForNillableIndexBasedAccess(constraint);
}
if (type.tag == TypeTags.RECORD) {
return checkRecordIndexBasedAccess(accessExpr, (BRecordType) type, accessExpr.indexExpr.type);
}
BType fieldType;
boolean nonMatchedRecordExists = false;
LinkedHashSet<BType> fieldTypeMembers = new LinkedHashSet<>();
for (BType memType : ((BUnionType) type).getMemberTypes()) {
BType individualFieldType = checkMappingIndexBasedAccess(accessExpr, memType);
if (individualFieldType == symTable.semanticError) {
nonMatchedRecordExists = true;
continue;
}
fieldTypeMembers.add(individualFieldType);
}
if (fieldTypeMembers.size() == 0) {
return symTable.semanticError;
}
if (fieldTypeMembers.size() == 1) {
fieldType = fieldTypeMembers.iterator().next();
} else {
fieldType = BUnionType.create(null, fieldTypeMembers);
}
return nonMatchedRecordExists ? addNilForNillableIndexBasedAccess(fieldType) : fieldType;
}
private BType checkRecordIndexBasedAccess(BLangIndexBasedAccess accessExpr, BRecordType record, BType currentType) {
BType actualType = symTable.semanticError;
BLangExpression indexExpr = accessExpr.indexExpr;
switch (currentType.tag) {
case TypeTags.STRING:
if (isConst(indexExpr)) {
String fieldName = getConstFieldName(indexExpr);
actualType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType != symTable.semanticError) {
return actualType;
}
actualType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
actualType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
if (actualType == symTable.semanticError) {
return actualType;
}
return addNilForNillableIndexBasedAccess(actualType);
}
if (accessExpr.lhsVar) {
return actualType;
}
return addNilForNillableIndexBasedAccess(actualType);
}
LinkedHashSet<BType> fieldTypes = record.fields.stream()
.map(field -> field.type)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (record.restFieldType.tag != TypeTags.NONE) {
fieldTypes.add(record.restFieldType);
}
if (fieldTypes.stream().noneMatch(BType::isNullable)) {
fieldTypes.add(symTable.nilType);
}
actualType = BUnionType.create(null, fieldTypes);
break;
case TypeTags.FINITE:
BFiniteType finiteIndexExpr = (BFiniteType) currentType;
LinkedHashSet<BType> possibleTypes = new LinkedHashSet<>();
for (BLangExpression finiteMember : finiteIndexExpr.valueSpace) {
String fieldName = (String) ((BLangLiteral) finiteMember).value;
BType fieldType = checkRecordRequiredFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordOptionalFieldAccess(accessExpr, names.fromString(fieldName), record);
if (fieldType == symTable.semanticError) {
fieldType = checkRecordRestFieldAccess(accessExpr, names.fromString(fieldName), record);
}
if (fieldType != symTable.semanticError) {
fieldType = addNilForNillableIndexBasedAccess(fieldType);
}
}
if (fieldType.tag == TypeTags.SEMANTIC_ERROR) {
continue;
}
possibleTypes.add(fieldType);
}
if (possibleTypes.isEmpty()) {
return symTable.semanticError;
}
if (possibleTypes.stream().noneMatch(BType::isNullable)) {
possibleTypes.add(symTable.nilType);
}
actualType = possibleTypes.size() == 1 ? possibleTypes.iterator().next() :
BUnionType.create(null, possibleTypes);
break;
case TypeTags.UNION:
LinkedHashSet<BType> possibleTypesByMember = new LinkedHashSet<>();
List<BFiniteType> finiteTypes = new ArrayList<>();
((BUnionType) currentType).getMemberTypes().forEach(memType -> {
if (memType.tag == TypeTags.FINITE) {
finiteTypes.add((BFiniteType) memType);
} else {
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, memType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
}
});
BFiniteType finiteType;
if (finiteTypes.size() == 1) {
finiteType = finiteTypes.get(0);
} else {
Set<BLangExpression> valueSpace = new LinkedHashSet<>();
finiteTypes.forEach(constituent -> valueSpace.addAll(constituent.valueSpace));
finiteType = new BFiniteType(null, valueSpace);
}
BType possibleType = checkRecordIndexBasedAccess(accessExpr, record, finiteType);
if (possibleType.tag == TypeTags.UNION) {
possibleTypesByMember.addAll(((BUnionType) possibleType).getMemberTypes());
} else {
possibleTypesByMember.add(possibleType);
}
if (possibleTypesByMember.contains(symTable.semanticError)) {
return symTable.semanticError;
}
actualType = possibleTypesByMember.size() == 1 ? possibleTypesByMember.iterator().next() :
BUnionType.create(null, possibleTypesByMember);
}
return actualType;
}
private BType getSafeType(BType type, BLangAccessExpression accessExpr) {
if (type.tag != TypeTags.UNION) {
return type;
}
List<BType> lhsTypes = new ArrayList<>(((BUnionType) type).getMemberTypes());
if (accessExpr.errorSafeNavigation) {
if (!lhsTypes.contains(symTable.errorType)) {
dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type);
return symTable.semanticError;
}
lhsTypes = lhsTypes.stream()
.filter(memberType -> memberType != symTable.errorType)
.collect(Collectors.toList());
if (lhsTypes.isEmpty()) {
dlog.error(accessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, type);
return symTable.semanticError;
}
}
if (accessExpr.nilSafeNavigation) {
lhsTypes = lhsTypes.stream()
.filter(memberType -> memberType != symTable.nilType)
.collect(Collectors.toList());
}
if (lhsTypes.size() == 1) {
return lhsTypes.get(0);
}
return BUnionType.create(null, new LinkedHashSet<>(lhsTypes));
}
private List<BType> getTypesList(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
return new ArrayList<>(unionType.getMemberTypes());
} else {
return Lists.of(type);
}
}
private LinkedHashSet<BType> getMatchExpressionTypes(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes = getTypesList(bLangMatchExpression.expr.type);
LinkedHashSet<BType> matchExprTypes = new LinkedHashSet<>();
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
BType patternExprType = pattern.expr.type;
matchExprTypes.addAll(getTypesList(patternExprType));
if (type.tag == TypeTags.SEMANTIC_ERROR || patternExprType.tag == TypeTags.SEMANTIC_ERROR) {
return new LinkedHashSet<BType>() {
{
add(symTable.semanticError);
}
};
}
assignable = this.types.isAssignable(type, pattern.variable.type);
if (assignable) {
break;
}
}
if (!assignable) {
matchExprTypes.add(type);
}
}
return matchExprTypes;
}
private BSymbol getSymbolForBuiltinMethodWithDynamicRetType(BLangInvocation iExpr, BLangBuiltInMethod function) {
switch (function) {
case CLONE:
case FREEZE:
return getSymbolForAnydataReturningBuiltinMethods(iExpr);
case IS_FROZEN:
return getSymbolForIsFrozenBuiltinMethod(iExpr);
case STAMP:
List<BLangExpression> functionArgList = iExpr.argExprs;
for (BLangExpression expression : functionArgList) {
checkExpr(expression, env, symTable.noType);
}
return symResolver.createSymbolForStampOperator(iExpr.pos, new Name(function.getName()),
functionArgList, iExpr.expr);
case CONVERT:
functionArgList = iExpr.argExprs;
for (BLangExpression expression : functionArgList) {
checkExpr(expression, env, symTable.noType);
}
return symResolver.createSymbolForConvertOperator(iExpr.pos, new Name(function.getName()),
functionArgList, iExpr.expr);
case CALL:
return getFunctionPointerCallSymbol(iExpr);
case DETAIL:
return symResolver.createSymbolForDetailBuiltInMethod(iExpr.name, iExpr.expr.type);
default:
return symTable.notFoundSymbol;
}
}
private BSymbol getFunctionPointerCallSymbol(BLangInvocation iExpr) {
if (iExpr.expr == null) {
return symTable.notFoundSymbol;
}
BSymbol varSymbol = ((BLangVariableReference) iExpr.expr).symbol;
if (varSymbol == null) {
return symTable.notFoundSymbol;
}
BType varType = varSymbol.type;
if (varType.tag != TypeTags.INVOKABLE) {
return symTable.notFoundSymbol;
}
if (varSymbol.kind != SymbolKind.FUNCTION) {
varSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, varSymbol.name, env.enclPkg.symbol.pkgID, varType,
env.scope.owner);
varSymbol.kind = SymbolKind.FUNCTION;
}
iExpr.symbol = varSymbol;
return varSymbol;
}
private BSymbol getSymbolForAnydataReturningBuiltinMethods(BLangInvocation iExpr) {
BType type = iExpr.expr.type;
if (!types.isLikeAnydataOrNotNil(type)) {
return symTable.notFoundSymbol;
}
BType retType;
if (types.isAnydata(type)) {
retType = type;
} else {
retType = BUnionType.create(null, type, symTable.errorType);
}
return symResolver.createBuiltinMethodSymbol(BLangBuiltInMethod.FREEZE, type, retType);
}
private BSymbol getSymbolForIsFrozenBuiltinMethod(BLangInvocation iExpr) {
BType type = iExpr.expr.type;
if (!types.isLikeAnydataOrNotNil(type)) {
return symTable.notFoundSymbol;
}
return symResolver.createBuiltinMethodSymbol(BLangBuiltInMethod.IS_FROZEN, type, symTable.booleanType);
}
private boolean isSafeNavigable(BLangAccessExpression fieldAccessExpr, BType varRefType) {
if (fieldAccessExpr.errorSafeNavigation && varRefType.tag != TypeTags.UNION
&& varRefType != symTable.semanticError) {
dlog.error(fieldAccessExpr.pos, DiagnosticCode.SAFE_NAVIGATION_NOT_REQUIRED, varRefType);
return false;
}
return true;
}
private boolean couldHoldTableValues(BType type, List<BType> encounteredTypes) {
if (encounteredTypes.contains(type)) {
return false;
}
encounteredTypes.add(type);
switch (type.tag) {
case TypeTags.TABLE:
return true;
case TypeTags.UNION:
return ((BUnionType) type).getMemberTypes().stream()
.anyMatch(bType -> couldHoldTableValues(bType, encounteredTypes));
case TypeTags.MAP:
return couldHoldTableValues(((BMapType) type).constraint, encounteredTypes);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
return recordType.fields.stream()
.anyMatch(field -> couldHoldTableValues(field.type, encounteredTypes)) ||
(!recordType.sealed && couldHoldTableValues(recordType.restFieldType, encounteredTypes));
case TypeTags.ARRAY:
return couldHoldTableValues(((BArrayType) type).eType, encounteredTypes);
case TypeTags.TUPLE:
return ((BTupleType) type).getTupleTypes().stream()
.anyMatch(bType -> couldHoldTableValues(bType, encounteredTypes));
}
return false;
}
private boolean isConst(BLangExpression expression) {
if (symbolEnter.isValidConstantExpression(expression)) {
return true;
}
if (expression.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
return false;
}
return (((BLangSimpleVarRef) expression).symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT;
}
} |
How is `update` different from `updateInternal` From reading their comments, it seems they are different somehow, but the changelogger seems to be same. Similar to other states. | public void updateInternal(List<V> valueToStore) throws Exception {
changeLogger.stateUpdated(valueToStore, getCurrentNamespace());
delegatedState.updateInternal(valueToStore);
} | changeLogger.stateUpdated(valueToStore, getCurrentNamespace()); | public void updateInternal(List<V> valueToStore) throws Exception {
delegatedState.updateInternal(valueToStore);
changeLogger.valueUpdatedInternal(valueToStore, getCurrentNamespace());
} | class ChangelogListState<K, N, V>
extends AbstractChangelogState<K, N, List<V>, InternalListState<K, N, V>>
implements InternalListState<K, N, V> {
ChangelogListState(
InternalListState<K, N, V> delegatedState,
KvStateChangeLogger<List<V>, N> changeLogger) {
super(delegatedState, changeLogger);
}
@Override
public void update(List<V> values) throws Exception {
changeLogger.stateUpdated(values, getCurrentNamespace());
delegatedState.update(values);
}
@Override
public void addAll(List<V> values) throws Exception {
changeLogger.stateAdded(values, getCurrentNamespace());
delegatedState.addAll(values);
}
@Override
@Override
public void add(V value) throws Exception {
if (getValueSerializer() instanceof ListSerializer) {
changeLogger.stateElementChanged(
w ->
((ListSerializer<V>) getValueSerializer())
.getElementSerializer()
.serialize(value, w),
getCurrentNamespace());
} else {
changeLogger.stateAdded(singletonList(value), getCurrentNamespace());
}
delegatedState.add(value);
}
@Override
public void mergeNamespaces(N target, Collection<N> sources) throws Exception {
changeLogger.stateMerged(target, sources);
delegatedState.mergeNamespaces(target, sources);
}
@Override
public List<V> getInternal() throws Exception {
return delegatedState.getInternal();
}
@Override
public Iterable<V> get() throws Exception {
return delegatedState.get();
}
@Override
public void clear() {
try {
changeLogger.stateCleared(getCurrentNamespace());
} catch (IOException e) {
ExceptionUtils.rethrow(e);
}
delegatedState.clear();
}
@SuppressWarnings("unchecked")
static <K, N, SV, S extends State, IS extends S> IS create(
InternalKvState<K, N, SV> listState, KvStateChangeLogger<SV, N> changeLogger) {
return (IS)
new ChangelogListState<>(
(InternalListState<K, N, SV>) listState,
(KvStateChangeLogger<List<SV>, N>) changeLogger);
}
} | class ChangelogListState<K, N, V>
extends AbstractChangelogState<K, N, List<V>, InternalListState<K, N, V>>
implements InternalListState<K, N, V> {
ChangelogListState(
InternalListState<K, N, V> delegatedState,
KvStateChangeLogger<List<V>, N> changeLogger) {
super(delegatedState, changeLogger);
}
@Override
public void update(List<V> values) throws Exception {
delegatedState.update(values);
changeLogger.valueUpdated(values, getCurrentNamespace());
}
@Override
public void addAll(List<V> values) throws Exception {
delegatedState.addAll(values);
changeLogger.valueAdded(values, getCurrentNamespace());
}
@Override
@Override
public void add(V value) throws Exception {
delegatedState.add(value);
changeLogger.valueElementAdded(
w ->
((ListSerializer<V>) getValueSerializer())
.getElementSerializer()
.serialize(value, w),
getCurrentNamespace());
}
@Override
public void mergeNamespaces(N target, Collection<N> sources) throws Exception {
delegatedState.mergeNamespaces(target, sources);
changeLogger.namespacesMerged(target, sources);
}
@Override
public List<V> getInternal() throws Exception {
return delegatedState.getInternal();
}
@Override
public Iterable<V> get() throws Exception {
return delegatedState.get();
}
@Override
public void clear() {
delegatedState.clear();
try {
changeLogger.valueCleared(getCurrentNamespace());
} catch (IOException e) {
ExceptionUtils.rethrow(e);
}
}
@SuppressWarnings("unchecked")
static <K, N, SV, S extends State, IS extends S> IS create(
InternalKvState<K, N, SV> listState, KvStateChangeLogger<SV, N> changeLogger) {
return (IS)
new ChangelogListState<>(
(InternalListState<K, N, SV>) listState,
(KvStateChangeLogger<List<SV>, N>) changeLogger);
}
} |
I created an issue for this. #32034 | public void visit(BLangArrayType arrayTypeNode) {
resultType = resolveTypeNode(arrayTypeNode.elemtype, env, diagCode);
if (resultType == symTable.noType) {
return;
}
boolean isError = false;
for (int i = 0; i < arrayTypeNode.dimensions; i++) {
BTypeSymbol arrayTypeSymbol = Symbols.createTypeSymbol(SymTag.ARRAY_TYPE, Flags.PUBLIC, Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner,
arrayTypeNode.pos, SOURCE);
BArrayType arrType;
if (arrayTypeNode.sizes.length == 0) {
arrType = new BArrayType(resultType, arrayTypeSymbol);
} else {
BLangExpression size = arrayTypeNode.sizes[i];
if (size.getKind() == NodeKind.LITERAL || size.getKind() == NodeKind.NUMERIC_LITERAL) {
Integer sizeIndicator = (Integer) (((BLangLiteral) size).getValue());
BArrayState arrayState;
if (sizeIndicator == OPEN_ARRAY_INDICATOR) {
arrayState = BArrayState.OPEN;
} else if (sizeIndicator == INFERRED_ARRAY_INDICATOR) {
arrayState = BArrayState.INFERRED;
} else {
arrayState = BArrayState.CLOSED;
}
arrType = new BArrayType(resultType, arrayTypeSymbol, sizeIndicator, arrayState);
} else {
if (size.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
dlog.error(size.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType,
((BLangTypedescExpr) size).getTypeNode());
isError = true;
continue;
}
BLangSimpleVarRef sizeReference = (BLangSimpleVarRef) size;
Name pkgAlias = names.fromIdNode(sizeReference.pkgAlias);
Name typeName = names.fromIdNode(sizeReference.variableName);
BSymbol sizeSymbol = lookupMainSpaceSymbolInPackage(size.pos, env, pkgAlias, typeName);
if (symTable.notFoundSymbol == sizeSymbol) {
dlog.error(arrayTypeNode.pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, size);
isError = true;
continue;
}
if (sizeSymbol.tag != SymTag.CONSTANT) {
dlog.error(size.pos, DiagnosticErrorCode.INVALID_ARRAY_SIZE_REFERENCE, sizeSymbol);
isError = true;
continue;
}
BConstantSymbol sizeConstSymbol = (BConstantSymbol) sizeSymbol;
BType lengthLiteralType = sizeConstSymbol.literalType;
if (lengthLiteralType.tag != TypeTags.INT) {
dlog.error(size.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType,
sizeConstSymbol.literalType);
isError = true;
continue;
}
int length;
long lengthCheck = Long.parseLong(sizeConstSymbol.type.toString());
if (lengthCheck > MAX_ARRAY_SIZE) {
length = 0;
dlog.error(size.pos,
DiagnosticErrorCode.GREATER_THAN_SIGNED_INT32_MAX_ARRAY_SIZES_NOT_YET_SUPPORTED);
} else {
length = (int) lengthCheck;
}
arrType = new BArrayType(resultType, arrayTypeSymbol, length, BArrayState.CLOSED);
}
}
arrayTypeSymbol.type = arrType;
resultType = arrayTypeSymbol.type;
markParameterizedType(arrType, arrType.eType);
}
if (isError) {
resultType = symTable.semanticError;
}
} | long lengthCheck = Long.parseLong(sizeConstSymbol.type.toString()); | public void visit(BLangArrayType arrayTypeNode) {
resultType = resolveTypeNode(arrayTypeNode.elemtype, env, diagCode);
if (resultType == symTable.noType) {
return;
}
boolean isError = false;
for (int i = 0; i < arrayTypeNode.dimensions; i++) {
BTypeSymbol arrayTypeSymbol = Symbols.createTypeSymbol(SymTag.ARRAY_TYPE, Flags.PUBLIC, Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner,
arrayTypeNode.pos, SOURCE);
BArrayType arrType;
if (arrayTypeNode.sizes.length == 0) {
arrType = new BArrayType(resultType, arrayTypeSymbol);
} else {
BLangExpression size = arrayTypeNode.sizes[i];
if (size.getKind() == NodeKind.LITERAL || size.getKind() == NodeKind.NUMERIC_LITERAL) {
Integer sizeIndicator = (Integer) (((BLangLiteral) size).getValue());
BArrayState arrayState;
if (sizeIndicator == OPEN_ARRAY_INDICATOR) {
arrayState = BArrayState.OPEN;
} else if (sizeIndicator == INFERRED_ARRAY_INDICATOR) {
arrayState = BArrayState.INFERRED;
} else {
arrayState = BArrayState.CLOSED;
}
arrType = new BArrayType(resultType, arrayTypeSymbol, sizeIndicator, arrayState);
} else {
if (size.getKind() != NodeKind.SIMPLE_VARIABLE_REF) {
dlog.error(size.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType,
((BLangTypedescExpr) size).getTypeNode());
isError = true;
continue;
}
BLangSimpleVarRef sizeReference = (BLangSimpleVarRef) size;
Name pkgAlias = names.fromIdNode(sizeReference.pkgAlias);
Name typeName = names.fromIdNode(sizeReference.variableName);
BSymbol sizeSymbol = lookupMainSpaceSymbolInPackage(size.pos, env, pkgAlias, typeName);
sizeReference.symbol = sizeSymbol;
if (symTable.notFoundSymbol == sizeSymbol) {
dlog.error(arrayTypeNode.pos, DiagnosticErrorCode.UNDEFINED_SYMBOL, size);
isError = true;
continue;
}
if (sizeSymbol.tag != SymTag.CONSTANT) {
dlog.error(size.pos, DiagnosticErrorCode.INVALID_ARRAY_SIZE_REFERENCE, sizeSymbol);
isError = true;
continue;
}
BConstantSymbol sizeConstSymbol = (BConstantSymbol) sizeSymbol;
BType lengthLiteralType = sizeConstSymbol.literalType;
if (lengthLiteralType.tag != TypeTags.INT) {
dlog.error(size.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES, symTable.intType,
sizeConstSymbol.literalType);
isError = true;
continue;
}
int length;
long lengthCheck = Long.parseLong(sizeConstSymbol.type.toString());
if (lengthCheck > MAX_ARRAY_SIZE) {
length = 0;
dlog.error(size.pos,
DiagnosticErrorCode.ARRAY_LENGTH_GREATER_THAT_2147483637_NOT_YET_SUPPORTED);
} else {
length = (int) lengthCheck;
}
arrType = new BArrayType(resultType, arrayTypeSymbol, length, BArrayState.CLOSED);
}
}
arrayTypeSymbol.type = arrType;
resultType = arrayTypeSymbol.type;
markParameterizedType(arrType, arrType.eType);
}
if (isError) {
resultType = symTable.semanticError;
}
} | class SymbolResolver extends BLangNodeVisitor {
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 10;
private static final CompilerContext.Key<SymbolResolver> SYMBOL_RESOLVER_KEY =
new CompilerContext.Key<>();
private SymbolTable symTable;
private Names names;
private BLangDiagnosticLog dlog;
private Types types;
private SymbolEnv env;
private BType resultType;
private DiagnosticCode diagCode;
private SymbolEnter symbolEnter;
private BLangAnonymousModelHelper anonymousModelHelper;
private BLangMissingNodesHelper missingNodesHelper;
private Unifier unifier;
public static SymbolResolver getInstance(CompilerContext context) {
SymbolResolver symbolResolver = context.get(SYMBOL_RESOLVER_KEY);
if (symbolResolver == null) {
symbolResolver = new SymbolResolver(context);
}
return symbolResolver;
}
public SymbolResolver(CompilerContext context) {
context.put(SYMBOL_RESOLVER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.names = Names.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.types = Types.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.unifier = new Unifier();
}
public boolean checkForUniqueSymbol(Location pos, SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = symTable.notFoundSymbol;
int expSymTag = symbol.tag;
if ((expSymTag & SymTag.IMPORT) == SymTag.IMPORT) {
foundSym = lookupSymbolInPrefixSpace(env, symbol.name);
} else if ((expSymTag & SymTag.ANNOTATION) == SymTag.ANNOTATION) {
foundSym = lookupSymbolInAnnotationSpace(env, symbol.name);
} else if ((expSymTag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
foundSym = lookupSymbolInConstructorSpace(env, symbol.name);
} else if ((expSymTag & SymTag.MAIN) == SymTag.MAIN) {
foundSym = lookupSymbolForDecl(env, symbol.name, SymTag.MAIN);
}
if (foundSym == symTable.notFoundSymbol && symbol.tag == SymTag.FUNCTION) {
int dotPosition = symbol.name.value.indexOf('.');
if (dotPosition > 0 && dotPosition != symbol.name.value.length()) {
String funcName = symbol.name.value.substring(dotPosition + 1);
foundSym = lookupSymbolForDecl(env, names.fromString(funcName), SymTag.MAIN);
}
}
if (foundSym == symTable.notFoundSymbol) {
return true;
}
if (!isDistinctSymbol(pos, symbol, foundSym)) {
return false;
}
if (isRedeclaredSymbol(symbol, foundSym)) {
Name name = symbol.name;
if (Symbols.isRemote(symbol) && !Symbols.isRemote(foundSym)
|| !Symbols.isRemote(symbol) && Symbols.isRemote(foundSym)) {
dlog.error(pos, DiagnosticErrorCode.UNSUPPORTED_REMOTE_METHOD_NAME_IN_SCOPE, name);
return false;
}
dlog.error(pos, DiagnosticErrorCode.REDECLARED_SYMBOL, name);
return false;
}
if ((foundSym.tag & SymTag.SERVICE) == SymTag.SERVICE) {
return false;
}
return true;
}
private boolean isRedeclaredSymbol(BSymbol symbol, BSymbol foundSym) {
return hasSameOwner(symbol, foundSym) || isSymbolRedeclaredInTestPackage(symbol, foundSym);
}
public boolean checkForUniqueSymbol(SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = lookupSymbolInMainSpace(env, symbol.name);
if (foundSym == symTable.notFoundSymbol) {
return true;
}
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
return !hasSameOwner(symbol, foundSym);
}
/**
* This method will check whether the given symbol that is being defined is unique by only checking its current
* environment scope.
*
* @param pos symbol pos for diagnostic purpose.
* @param env symbol environment to lookup.
* @param symbol the symbol that is being defined.
* @param expSymTag expected tag of the symbol for.
* @return true if the symbol is unique, false otherwise.
*/
public boolean checkForUniqueSymbolInCurrentScope(Location pos, SymbolEnv env, BSymbol symbol,
int expSymTag) {
BSymbol foundSym = lookupSymbolInGivenScope(env, symbol.name, expSymTag);
if (foundSym == symTable.notFoundSymbol) {
return true;
}
return isDistinctSymbol(pos, symbol, foundSym);
}
/**
* This method will check whether the symbol being defined is unique comparing it with the found symbol
* from the scope.
*
* @param pos symbol pos for diagnostic purpose.
* @param symbol symbol that is being defined.
* @param foundSym symbol that is found from the scope.
* @return true if the symbol is unique, false otherwise.
*/
private boolean isDistinctSymbol(Location pos, BSymbol symbol, BSymbol foundSym) {
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
if (isSymbolDefinedInRootPkgLvl(foundSym)) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_BUILTIN_SYMBOL, symbol.name);
return false;
}
return true;
}
/**
* This method will check whether the symbol being defined is unique comparing it with the found symbol
* from the scope.
*
* @param symbol symbol that is being defined.
* @param foundSym symbol that is found from the scope.
* @return true if the symbol is unique, false otherwise.
*/
private boolean isDistinctSymbol(BSymbol symbol, BSymbol foundSym) {
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
if (isSymbolDefinedInRootPkgLvl(foundSym)) {
return false;
}
return !hasSameOwner(symbol, foundSym);
}
private boolean hasSameOwner(BSymbol symbol, BSymbol foundSym) {
if (foundSym.owner == symbol.owner) {
return true;
} else if (Symbols.isFlagOn(symbol.owner.flags, Flags.LAMBDA) &&
((foundSym.owner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE)) {
return true;
} else if (((symbol.owner.tag & SymTag.LET) == SymTag.LET) &&
((foundSym.owner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE)) {
return true;
}
return false;
}
private boolean isSymbolRedeclaredInTestPackage(BSymbol symbol, BSymbol foundSym) {
if (Symbols.isFlagOn(symbol.owner.flags, Flags.TESTABLE) &&
!Symbols.isFlagOn(foundSym.owner.flags, Flags.TESTABLE)) {
return true;
}
return false;
}
private boolean isSymbolDefinedInRootPkgLvl(BSymbol foundSym) {
return symTable.rootPkgSymbol.pkgID.equals(foundSym.pkgID) &&
(foundSym.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME;
}
/**
* Lookup the symbol using given name in the given environment scope only.
*
* @param env environment to lookup the symbol.
* @param name name of the symbol to lookup.
* @param expSymTag expected tag of the symbol.
* @return if a symbol is found return it.
*/
public BSymbol lookupSymbolInGivenScope(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if (symTable.rootPkgSymbol.pkgID.equals(entry.symbol.pkgID) &&
(entry.symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
return entry.symbol;
}
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
return symTable.notFoundSymbol;
}
public boolean checkForUniqueMemberSymbol(Location pos, SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = lookupMemberSymbol(pos, env.scope, env, symbol.name, symbol.tag);
if (foundSym != symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_SYMBOL, symbol.name);
return false;
}
return true;
}
public BSymbol resolveBinaryOperator(OperatorKind opKind,
BType lhsType,
BType rhsType) {
return resolveOperator(names.fromString(opKind.value()), Lists.of(lhsType, rhsType));
}
BSymbol createEqualityOperator(OperatorKind opKind, BType lhsType, BType rhsType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BType retType = symTable.booleanType;
BInvokableType opType = new BInvokableType(paramTypes, retType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
public BSymbol resolveUnaryOperator(Location pos,
OperatorKind opKind,
BType type) {
return resolveOperator(names.fromString(opKind.value()), Lists.of(type));
}
public BSymbol resolveOperator(Name name, List<BType> types) {
ScopeEntry entry = symTable.rootScope.lookup(name);
return resolveOperator(entry, types);
}
BSymbol createBinaryComparisonOperator(OperatorKind opKind, BType lhsType, BType rhsType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BInvokableType opType = new BInvokableType(paramTypes, symTable.booleanType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
BSymbol createBinaryOperator(OperatorKind opKind, BType lhsType, BType rhsType, BType retType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BInvokableType opType = new BInvokableType(paramTypes, retType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
public BSymbol resolvePkgSymbol(Location pos, SymbolEnv env, Name pkgAlias) {
if (pkgAlias == Names.EMPTY) {
return env.enclPkg.symbol;
}
BSymbol pkgSymbol = lookupSymbolInPrefixSpace(env, pkgAlias);
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
}
return pkgSymbol;
}
public BSymbol resolvePrefixSymbol(SymbolEnv env, Name pkgAlias, Name compUnit) {
if (pkgAlias == Names.EMPTY) {
return env.enclPkg.symbol;
}
ScopeEntry entry = env.scope.lookup(pkgAlias);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.XMLNS) == SymTag.XMLNS) {
return entry.symbol;
}
if ((entry.symbol.tag & SymTag.IMPORT) == SymTag.IMPORT &&
((BPackageSymbol) entry.symbol).compUnit.equals(compUnit)) {
((BPackageSymbol) entry.symbol).isUsed = true;
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return resolvePrefixSymbol(env.enclEnv, pkgAlias, compUnit);
}
return symTable.notFoundSymbol;
}
public BSymbol resolveAnnotation(Location pos, SymbolEnv env, Name pkgAlias, Name annotationName) {
return this.lookupAnnotationSpaceSymbolInPackage(pos, env, pkgAlias, annotationName);
}
public BSymbol resolveStructField(Location location, SymbolEnv env, Name fieldName,
BTypeSymbol structSymbol) {
return lookupMemberSymbol(location, structSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BSymbol resolveObjectField(Location location, SymbolEnv env, Name fieldName,
BTypeSymbol objectSymbol) {
return lookupMemberSymbol(location, objectSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BSymbol resolveObjectMethod(Location pos, SymbolEnv env, Name fieldName,
BObjectTypeSymbol objectSymbol) {
return lookupMemberSymbol(pos, objectSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BType resolveTypeNode(BLangType typeNode, SymbolEnv env) {
return resolveTypeNode(typeNode, env, DiagnosticErrorCode.UNKNOWN_TYPE);
}
public BType resolveTypeNode(BLangType typeNode, SymbolEnv env, DiagnosticCode diagCode) {
SymbolEnv prevEnv = this.env;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
typeNode.accept(this);
this.env = prevEnv;
this.diagCode = preDiagCode;
if (this.resultType != symTable.noType) {
if (typeNode.nullable && this.resultType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.resultType;
unionType.add(symTable.nilType);
} else if (typeNode.nullable && resultType.tag != TypeTags.JSON && resultType.tag != TypeTags.ANY) {
this.resultType = BUnionType.create(null, resultType, symTable.nilType);
}
}
typeNode.type = resultType;
return resultType;
}
/**
* Return the symbol associated with the given name in the current package. This method first searches the symbol in
* the current scope and proceeds the enclosing scope, if it is not there in the current scope. This process
* continues until the symbol is found or the root scope is reached. This method is mainly meant for checking
* whether a given symbol is already defined in the scope hierarchy.
*
* @param env current symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
private BSymbol lookupSymbolForDecl(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) == expSymTag) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return lookupSymbol(env.enclEnv, name, expSymTag);
}
return symTable.notFoundSymbol;
}
/**
* Return the symbol associated with the given name in the current package. This method first searches the symbol in
* the current scope and proceeds the enclosing scope, if it is not there in the current scope. This process
* continues until the symbol is found or the root scope is reached. This method is meant for looking up a symbol
* when they are referenced. If looking up a symbol from within a record type definition, this method ignores record
* fields. This is done so that default value expressions cannot refer to other record fields.
*
* @param env current symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
private BSymbol lookupSymbol(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return lookupSymbol(env.enclEnv, name, expSymTag);
}
return symTable.notFoundSymbol;
}
/**
* Checks whether the specified symbol is a symbol of a record field and whether that field is referred to from
* within a record type definition (not necessarily the owner of the field).
*
* @param symbol symbol to be tested
* @param env the environment in which the symbol was found
* @return returns `true` if the aboove described condition holds
*/
private boolean isFieldRefFromWithinARecord(BSymbol symbol, SymbolEnv env) {
return (symbol.owner.tag & SymTag.RECORD) == SymTag.RECORD &&
env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE;
}
public BSymbol lookupSymbolInMainSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.MAIN);
}
public BSymbol lookupSymbolInAnnotationSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.ANNOTATION);
}
public BSymbol lookupSymbolInPrefixSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.IMPORT);
}
public BSymbol lookupSymbolInConstructorSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.CONSTRUCTOR);
}
public BSymbol lookupLangLibMethod(BType type, Name name) {
if (symTable.langAnnotationModuleSymbol == null) {
return symTable.notFoundSymbol;
}
BSymbol bSymbol;
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
bSymbol = lookupMethodInModule(symTable.langArrayModuleSymbol, name, env);
break;
case TypeTags.DECIMAL:
bSymbol = lookupMethodInModule(symTable.langDecimalModuleSymbol, name, env);
break;
case TypeTags.ERROR:
bSymbol = lookupMethodInModule(symTable.langErrorModuleSymbol, name, env);
break;
case TypeTags.FLOAT:
bSymbol = lookupMethodInModule(symTable.langFloatModuleSymbol, name, env);
break;
case TypeTags.FUTURE:
bSymbol = lookupMethodInModule(symTable.langFutureModuleSymbol, name, env);
break;
case TypeTags.INT:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
bSymbol = lookupMethodInModule(symTable.langIntModuleSymbol, name, env);
break;
case TypeTags.MAP:
case TypeTags.RECORD:
bSymbol = lookupMethodInModule(symTable.langMapModuleSymbol, name, env);
break;
case TypeTags.OBJECT:
bSymbol = lookupMethodInModule(symTable.langObjectModuleSymbol, name, env);
break;
case TypeTags.STREAM:
bSymbol = lookupMethodInModule(symTable.langStreamModuleSymbol, name, env);
break;
case TypeTags.TABLE:
bSymbol = lookupMethodInModule(symTable.langTableModuleSymbol, name, env);
break;
case TypeTags.STRING:
case TypeTags.CHAR_STRING:
bSymbol = lookupMethodInModule(symTable.langStringModuleSymbol, name, env);
break;
case TypeTags.TYPEDESC:
bSymbol = lookupMethodInModule(symTable.langTypedescModuleSymbol, name, env);
break;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
case TypeTags.XML_COMMENT:
case TypeTags.XML_PI:
bSymbol = lookupMethodInModule(symTable.langXmlModuleSymbol, name, env);
break;
case TypeTags.XML_TEXT:
bSymbol = lookupMethodInModule(symTable.langXmlModuleSymbol, name, env);
if (bSymbol == symTable.notFoundSymbol) {
bSymbol = lookupMethodInModule(symTable.langStringModuleSymbol, name, env);
}
break;
case TypeTags.BOOLEAN:
bSymbol = lookupMethodInModule(symTable.langBooleanModuleSymbol, name, env);
break;
case TypeTags.UNION:
Iterator<BType> itr = ((BUnionType) type).getMemberTypes().iterator();
if (!itr.hasNext()) {
throw new IllegalArgumentException(
format("Union type '%s' does not have member types", type.toString()));
}
BType member = itr.next();
if (types.isSubTypeOfBaseType(type, member.tag)) {
bSymbol = lookupLangLibMethod(member, name);
} else {
bSymbol = symTable.notFoundSymbol;
}
break;
default:
bSymbol = symTable.notFoundSymbol;
}
if (bSymbol == symTable.notFoundSymbol && type.tag != TypeTags.OBJECT) {
bSymbol = lookupMethodInModule(symTable.langValueModuleSymbol, name, env);
}
if (bSymbol == symTable.notFoundSymbol) {
bSymbol = lookupMethodInModule(symTable.langInternalModuleSymbol, name, env);
}
return bSymbol;
}
/**
* Recursively analyse the symbol env to find the closure variable symbol that is being resolved.
*
* @param env symbol env to analyse and find the closure variable.
* @param name name of the symbol to lookup
* @param expSymTag symbol tag
* @return closure symbol wrapper along with the resolved count
*/
public BSymbol lookupClosureVarSymbol(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if (symTable.rootPkgSymbol.pkgID.equals(entry.symbol.pkgID) &&
(entry.symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
return entry.symbol;
}
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv == null || env.enclEnv.node == null) {
return symTable.notFoundSymbol;
}
return lookupClosureVarSymbol(env.enclEnv, name, expSymTag);
}
public BSymbol lookupMainSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInMainSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.MAIN);
}
public BSymbol lookupPrefixSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInPrefixSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.IMPORT);
}
public BSymbol lookupAnnotationSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInAnnotationSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.ANNOTATION);
}
public BSymbol lookupConstructorSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInConstructorSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.CONSTRUCTOR);
}
public BSymbol lookupMethodInModule(BPackageSymbol moduleSymbol, Name name, SymbolEnv env) {
ScopeEntry entry = moduleSymbol.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.FUNCTION) != SymTag.FUNCTION) {
entry = entry.next;
continue;
}
if (isMemberAccessAllowed(env, entry.symbol)) {
return entry.symbol;
}
return symTable.notFoundSymbol;
}
return symTable.notFoundSymbol;
}
/**
* Return the symbol with the given name.
* This method only looks at the symbol defined in the given scope.
*
* @param pos diagnostic position
* @param scope current scope
* @param env symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
public BSymbol lookupMemberSymbol(Location pos,
Scope scope,
SymbolEnv env,
Name name,
int expSymTag) {
ScopeEntry entry = scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) != expSymTag) {
entry = entry.next;
continue;
}
if (isMemberAccessAllowed(env, entry.symbol)) {
return entry.symbol;
} else {
dlog.error(pos, DiagnosticErrorCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, entry.symbol.name);
return symTable.notFoundSymbol;
}
}
return symTable.notFoundSymbol;
}
/**
* Resolve and return the namespaces visible to the given environment, as a map.
*
* @param env Environment to get the visible namespaces
* @return Map of namespace symbols visible to the given environment
*/
public Map<Name, BXMLNSSymbol> resolveAllNamespaces(SymbolEnv env) {
Map<Name, BXMLNSSymbol> namespaces = new LinkedHashMap<Name, BXMLNSSymbol>();
addNamespacesInScope(namespaces, env);
return namespaces;
}
public void boostrapErrorType() {
ScopeEntry entry = symTable.rootPkgSymbol.scope.lookup(Names.ERROR);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.errorType = (BErrorType) entry.symbol.type;
symTable.detailType = (BMapType) symTable.errorType.detailType;
return;
}
throw new IllegalStateException("built-in error not found ?");
}
public void defineOperators() {
symTable.defineOperators();
}
public void bootstrapAnydataType() {
ScopeEntry entry = symTable.langAnnotationModuleSymbol.scope.lookup(Names.ANYDATA);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
BUnionType type = (BUnionType) entry.symbol.type;
symTable.anydataType = new BAnydataType(type);
symTable.anydataOrReadonly = BUnionType.create(null, symTable.anydataType, symTable.readonlyType);
entry.symbol.type = symTable.anydataType;
entry.symbol.origin = BUILTIN;
symTable.anydataType.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.ANYDATA,
PackageID.ANNOTATIONS, symTable.anydataType, symTable.rootPkgSymbol, symTable.builtinPos, BUILTIN);
return;
}
throw new IllegalStateException("built-in 'anydata' type not found");
}
public void bootstrapJsonType() {
ScopeEntry entry = symTable.langAnnotationModuleSymbol.scope.lookup(Names.JSON);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
BUnionType type = (BUnionType) entry.symbol.type;
symTable.jsonType = new BJSONType(type);
symTable.jsonType.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.JSON, PackageID.ANNOTATIONS,
symTable.jsonType, symTable.langAnnotationModuleSymbol, symTable.builtinPos, BUILTIN);
entry.symbol.type = symTable.jsonType;
entry.symbol.origin = BUILTIN;
return;
}
throw new IllegalStateException("built-in 'json' type not found");
}
public void bootstrapCloneableType() {
if (symTable.langValueModuleSymbol != null) {
ScopeEntry entry = symTable.langValueModuleSymbol.scope.lookup(Names.CLONEABLE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.cloneableType = (BUnionType) entry.symbol.type;
symTable.cloneableType.tsymbol =
new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.CLONEABLE,
PackageID.VALUE, symTable.cloneableType, symTable.langValueModuleSymbol,
symTable.builtinPos, BUILTIN);
symTable.detailType = new BMapType(TypeTags.MAP, symTable.cloneableType, null);
symTable.errorType = new BErrorType(null, symTable.detailType);
symTable.errorType.tsymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.ERROR,
symTable.rootPkgSymbol.pkgID, symTable.errorType, symTable.rootPkgSymbol, symTable.builtinPos
, BUILTIN);
symTable.errorOrNilType = BUnionType.create(null, symTable.errorType, symTable.nilType);
symTable.anyOrErrorType = BUnionType.create(null, symTable.anyType, symTable.errorType);
symTable.mapAllType = new BMapType(TypeTags.MAP, symTable.anyOrErrorType, null);
symTable.arrayAllType = new BArrayType(symTable.anyOrErrorType);
symTable.typeDesc.constraint = symTable.anyOrErrorType;
symTable.futureType.constraint = symTable.anyOrErrorType;
symTable.pureType = BUnionType.create(null, symTable.anydataType, symTable.errorType);
return;
}
throw new IllegalStateException("built-in 'lang.value:Cloneable' type not found");
}
ScopeEntry entry = symTable.rootPkgSymbol.scope.lookup(Names.CLONEABLE_INTERNAL);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
entry.symbol.type = symTable.cloneableType;
break;
}
}
public void bootstrapIntRangeType() {
ScopeEntry entry = symTable.langInternalModuleSymbol.scope.lookup(Names.CREATE_INT_RANGE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE) {
entry = entry.next;
continue;
}
symTable.intRangeType = (BObjectType) ((BInvokableType) entry.symbol.type).retType;
symTable.defineIntRangeOperations();
return;
}
throw new IllegalStateException("built-in Integer Range type not found ?");
}
public void bootstrapIterableType() {
ScopeEntry entry = symTable.langObjectModuleSymbol.scope.lookup(Names.OBJECT_ITERABLE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.iterableType = (BObjectType) entry.symbol.type;
return;
}
throw new IllegalStateException("built-in distinct Iterable type not found ?");
}
public void loadRawTemplateType() {
ScopeEntry entry = symTable.langObjectModuleSymbol.scope.lookup(Names.RAW_TEMPLATE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.rawTemplateType = (BObjectType) entry.symbol.type;
return;
}
throw new IllegalStateException("'lang.object:RawTemplate' type not found");
}
public void visit(BLangValueType valueTypeNode) {
visitBuiltInTypeNode(valueTypeNode, valueTypeNode.typeKind, this.env);
}
public void visit(BLangBuiltInRefTypeNode builtInRefType) {
visitBuiltInTypeNode(builtInRefType, builtInRefType.typeKind, this.env);
}
public void visit(BLangUnionTypeNode unionTypeNode) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangType langType : unionTypeNode.memberTypeNodes) {
BType resolvedType = resolveTypeNode(langType, env);
if (resolvedType == symTable.noType) {
resultType = symTable.noType;
return;
}
memberTypes.add(resolvedType);
}
BTypeSymbol unionTypeSymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, env.enclPkg.symbol.pkgID, null,
env.scope.owner, unionTypeNode.pos, SOURCE);
BUnionType unionType = BUnionType.create(unionTypeSymbol, memberTypes);
unionTypeSymbol.type = unionType;
markParameterizedType(unionType, memberTypes);
resultType = unionType;
}
public void visit(BLangIntersectionTypeNode intersectionTypeNode) {
resultType = computeIntersectionType(intersectionTypeNode);
}
public void visit(BLangObjectTypeNode objectTypeNode) {
EnumSet<Flag> flags = EnumSet.copyOf(objectTypeNode.flagSet);
if (objectTypeNode.isAnonymous) {
flags.add(Flag.PUBLIC);
}
int typeFlags = 0;
if (flags.contains(Flag.READONLY)) {
typeFlags |= Flags.READONLY;
}
if (flags.contains(Flag.ISOLATED)) {
typeFlags |= Flags.ISOLATED;
}
if (flags.contains(Flag.SERVICE)) {
typeFlags |= Flags.SERVICE;
}
BTypeSymbol objectSymbol = Symbols.createObjectSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner, objectTypeNode.pos, SOURCE);
BObjectType objectType = new BObjectType(objectSymbol, typeFlags);
objectSymbol.type = objectType;
objectTypeNode.symbol = objectSymbol;
resultType = objectType;
}
public void visit(BLangRecordTypeNode recordTypeNode) {
if (recordTypeNode.symbol == null) {
EnumSet<Flag> flags = recordTypeNode.isAnonymous ? EnumSet.of(Flag.PUBLIC, Flag.ANONYMOUS)
: EnumSet.noneOf(Flag.class);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.symbol.pkgID, null,
env.scope.owner, recordTypeNode.pos,
recordTypeNode.isAnonymous ? VIRTUAL : SOURCE);
BRecordType recordType = new BRecordType(recordSymbol);
recordSymbol.type = recordType;
recordTypeNode.symbol = recordSymbol;
if (env.node.getKind() != NodeKind.PACKAGE) {
recordSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
symbolEnter.defineSymbol(recordTypeNode.pos, recordTypeNode.symbol, env);
symbolEnter.defineNode(recordTypeNode, env);
}
resultType = recordType;
} else {
resultType = recordTypeNode.symbol.type;
}
}
public void visit(BLangStreamType streamTypeNode) {
BType type = resolveTypeNode(streamTypeNode.type, env);
BType constraintType = resolveTypeNode(streamTypeNode.constraint, env);
BType error = streamTypeNode.error != null ? resolveTypeNode(streamTypeNode.error, env) : symTable.nilType;
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BType streamType = new BStreamType(TypeTags.STREAM, constraintType, error, null);
BTypeSymbol typeSymbol = type.tsymbol;
streamType.tsymbol = Symbols.createTypeSymbol(typeSymbol.tag, typeSymbol.flags, typeSymbol.name,
typeSymbol.pkgID, streamType, typeSymbol.owner,
streamTypeNode.pos, SOURCE);
markParameterizedType(streamType, constraintType);
if (error != null) {
markParameterizedType(streamType, error);
}
resultType = streamType;
}
public void visit(BLangTableTypeNode tableTypeNode) {
BType type = resolveTypeNode(tableTypeNode.type, env);
BType constraintType = resolveTypeNode(tableTypeNode.constraint, env);
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
BTypeSymbol typeSymbol = type.tsymbol;
tableType.tsymbol = Symbols.createTypeSymbol(SymTag.TYPE, Flags.asMask(EnumSet.noneOf(Flag.class)),
typeSymbol.name, env.enclPkg.symbol.pkgID, tableType,
env.scope.owner, tableTypeNode.pos, SOURCE);
tableType.tsymbol.flags = typeSymbol.flags;
tableType.constraintPos = tableTypeNode.constraint.pos;
tableType.isTypeInlineDefined = tableTypeNode.isTypeInlineDefined;
if (tableTypeNode.tableKeyTypeConstraint != null) {
tableType.keyTypeConstraint = resolveTypeNode(tableTypeNode.tableKeyTypeConstraint.keyType, env);
tableType.keyPos = tableTypeNode.tableKeyTypeConstraint.pos;
} else if (tableTypeNode.tableKeySpecifier != null) {
BLangTableKeySpecifier tableKeySpecifier = tableTypeNode.tableKeySpecifier;
List<String> fieldNameList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNameList.add(((BLangIdentifier) identifier).value);
}
tableType.fieldNameList = fieldNameList;
tableType.keyPos = tableKeySpecifier.pos;
}
markParameterizedType(tableType, constraintType);
tableTypeNode.tableType = tableType;
resultType = tableType;
}
public void visit(BLangFiniteTypeNode finiteTypeNode) {
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE,
Flags.asMask(EnumSet.noneOf(Flag.class)), Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner,
finiteTypeNode.pos, SOURCE);
BFiniteType finiteType = new BFiniteType(finiteTypeSymbol);
for (BLangExpression literal : finiteTypeNode.valueSpace) {
((BLangLiteral) literal).type = symTable.getTypeFromTag(((BLangLiteral) literal).type.tag);
finiteType.addValue(literal);
}
finiteTypeSymbol.type = finiteType;
resultType = finiteType;
}
public void visit(BLangTupleTypeNode tupleTypeNode) {
List<BType> memberTypes = new ArrayList<>();
for (BLangType memTypeNode : tupleTypeNode.memberTypeNodes) {
BType type = resolveTypeNode(memTypeNode, env);
if (type == symTable.noType) {
resultType = symTable.noType;
return;
}
memberTypes.add(type);
}
BTypeSymbol tupleTypeSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, env.enclPkg.symbol.pkgID, null,
env.scope.owner, tupleTypeNode.pos, SOURCE);
BTupleType tupleType = new BTupleType(tupleTypeSymbol, memberTypes);
tupleTypeSymbol.type = tupleType;
if (tupleTypeNode.restParamType != null) {
tupleType.restType = resolveTypeNode(tupleTypeNode.restParamType, env);
markParameterizedType(tupleType, tupleType.restType);
}
markParameterizedType(tupleType, memberTypes);
resultType = tupleType;
}
public void visit(BLangErrorType errorTypeNode) {
BType detailType = Optional.ofNullable(errorTypeNode.detailType)
.map(bLangType -> resolveTypeNode(bLangType, env)).orElse(symTable.detailType);
boolean distinctErrorDef = errorTypeNode.flagSet.contains(Flag.DISTINCT);
if (detailType == symTable.detailType && !distinctErrorDef &&
!this.env.enclPkg.packageID.equals(PackageID.ANNOTATIONS)) {
resultType = symTable.errorType;
return;
}
BErrorTypeSymbol errorTypeSymbol = Symbols
.createErrorSymbol(Flags.asMask(errorTypeNode.flagSet), Names.EMPTY, env.enclPkg.symbol.pkgID,
null, env.scope.owner, errorTypeNode.pos, SOURCE);
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorType.flags |= errorTypeSymbol.flags;
errorTypeSymbol.type = errorType;
markParameterizedType(errorType, detailType);
errorType.typeIdSet = BTypeIdSet.emptySet();
resultType = errorType;
}
public void visit(BLangConstrainedType constrainedTypeNode) {
BType type = resolveTypeNode(constrainedTypeNode.type, env);
BType constraintType = resolveTypeNode(constrainedTypeNode.constraint, env);
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BType constrainedType = null;
if (type.tag == TypeTags.FUTURE) {
constrainedType = new BFutureType(TypeTags.FUTURE, constraintType, null);
} else if (type.tag == TypeTags.MAP) {
constrainedType = new BMapType(TypeTags.MAP, constraintType, null);
} else if (type.tag == TypeTags.TYPEDESC) {
constrainedType = new BTypedescType(constraintType, null);
} else if (type.tag == TypeTags.XML) {
if (constraintType.tag == TypeTags.PARAMETERIZED_TYPE) {
BType typedescType = ((BParameterizedType) constraintType).paramSymbol.type;
BType typedescConstraint = ((BTypedescType) typedescType).constraint;
validateXMLConstraintType(typedescConstraint, constrainedTypeNode.pos);
} else {
validateXMLConstraintType(constraintType, constrainedTypeNode.pos);
}
constrainedType = new BXMLType(constraintType, null);
} else {
return;
}
BTypeSymbol typeSymbol = type.tsymbol;
constrainedType.tsymbol = Symbols.createTypeSymbol(typeSymbol.tag, typeSymbol.flags, typeSymbol.name,
typeSymbol.pkgID, constrainedType, typeSymbol.owner,
constrainedTypeNode.pos, SOURCE);
markParameterizedType(constrainedType, constraintType);
resultType = constrainedType;
}
private void validateXMLConstraintType(BType constraintType, Location pos) {
if (constraintType.tag == TypeTags.UNION) {
checkUnionTypeForXMLSubTypes((BUnionType) constraintType, pos);
return;
}
if (!TypeTags.isXMLTypeTag(constraintType.tag) && constraintType.tag != TypeTags.NEVER) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_CONSTRAINT, symTable.xmlType, constraintType);
}
}
private void checkUnionTypeForXMLSubTypes(BUnionType constraintUnionType, Location pos) {
for (BType memberType : constraintUnionType.getMemberTypes()) {
if (memberType.tag == TypeTags.UNION) {
checkUnionTypeForXMLSubTypes((BUnionType) memberType, pos);
}
if (!TypeTags.isXMLTypeTag(memberType.tag)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_CONSTRAINT, symTable.xmlType,
constraintUnionType);
}
}
}
public void visit(BLangUserDefinedType userDefinedTypeNode) {
Name pkgAlias = names.fromIdNode(userDefinedTypeNode.pkgAlias);
Name typeName = names.fromIdNode(userDefinedTypeNode.typeName);
BSymbol symbol = symTable.notFoundSymbol;
if (env.scope.owner.tag == SymTag.ANNOTATION) {
symbol = lookupAnnotationSpaceSymbolInPackage(userDefinedTypeNode.pos, env, pkgAlias, typeName);
}
if (symbol == symTable.notFoundSymbol) {
BSymbol tempSymbol = lookupMainSpaceSymbolInPackage(userDefinedTypeNode.pos, env, pkgAlias, typeName);
if ((tempSymbol.tag & SymTag.TYPE) == SymTag.TYPE) {
symbol = tempSymbol;
} else if (Symbols.isTagOn(tempSymbol, SymTag.VARIABLE) && env.node.getKind() == NodeKind.FUNCTION) {
BLangFunction func = (BLangFunction) env.node;
boolean errored = false;
if (func.returnTypeNode == null ||
(func.hasBody() && func.body.getKind() != NodeKind.EXTERN_FUNCTION_BODY)) {
dlog.error(userDefinedTypeNode.pos,
DiagnosticErrorCode.INVALID_NON_EXTERNAL_DEPENDENTLY_TYPED_FUNCTION);
errored = true;
}
if (tempSymbol.type != null && tempSymbol.type.tag != TypeTags.TYPEDESC) {
dlog.error(userDefinedTypeNode.pos, DiagnosticErrorCode.INVALID_PARAM_TYPE_FOR_RETURN_TYPE,
tempSymbol.type);
errored = true;
}
if (errored) {
this.resultType = symTable.semanticError;
return;
}
ParameterizedTypeInfo parameterizedTypeInfo =
getTypedescParamValueType(func.requiredParams, tempSymbol);
BType paramValType = parameterizedTypeInfo == null ? null : parameterizedTypeInfo.paramValueType;
if (paramValType == symTable.semanticError) {
this.resultType = symTable.semanticError;
return;
}
if (paramValType != null) {
BTypeSymbol tSymbol = new BTypeSymbol(SymTag.TYPE, Flags.PARAMETERIZED | tempSymbol.flags,
tempSymbol.name, tempSymbol.pkgID, null, func.symbol,
tempSymbol.pos, VIRTUAL);
tSymbol.type = new BParameterizedType(paramValType, (BVarSymbol) tempSymbol,
tSymbol, tempSymbol.name, parameterizedTypeInfo.index);
tSymbol.type.flags |= Flags.PARAMETERIZED;
this.resultType = tSymbol.type;
userDefinedTypeNode.symbol = tSymbol;
return;
}
}
}
if (symbol == symTable.notFoundSymbol) {
symbol = lookupMemberSymbol(userDefinedTypeNode.pos, symTable.rootScope, this.env, typeName,
SymTag.VARIABLE_NAME);
}
if (this.env.logErrors && symbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(pkgAlias) && !missingNodesHelper.isMissingNode(typeName) &&
!symbolEnter.isUnknownTypeRef(userDefinedTypeNode)) {
dlog.error(userDefinedTypeNode.pos, diagCode, typeName);
}
resultType = symTable.semanticError;
return;
}
userDefinedTypeNode.symbol = symbol;
resultType = symbol.type;
}
private ParameterizedTypeInfo getTypedescParamValueType(List<BLangSimpleVariable> params, BSymbol varSym) {
for (int i = 0; i < params.size(); i++) {
BLangSimpleVariable param = params.get(i);
if (param.name.value.equals(varSym.name.value)) {
if (param.expr == null || param.expr.getKind() == NodeKind.INFER_TYPEDESC_EXPR) {
return new ParameterizedTypeInfo(((BTypedescType) varSym.type).constraint, i);
}
NodeKind defaultValueExprKind = param.expr.getKind();
if (defaultValueExprKind == NodeKind.TYPEDESC_EXPRESSION) {
return new ParameterizedTypeInfo(
resolveTypeNode(((BLangTypedescExpr) param.expr).typeNode, this.env), i);
}
if (defaultValueExprKind == NodeKind.SIMPLE_VARIABLE_REF) {
Name varName = names.fromIdNode(((BLangSimpleVarRef) param.expr).variableName);
BSymbol typeRefSym = lookupSymbolInMainSpace(this.env, varName);
if (typeRefSym != symTable.notFoundSymbol) {
return new ParameterizedTypeInfo(typeRefSym.type, i);
}
return new ParameterizedTypeInfo(symTable.semanticError);
}
dlog.error(param.pos, DiagnosticErrorCode.INVALID_TYPEDESC_PARAM);
return new ParameterizedTypeInfo(symTable.semanticError);
}
}
return null;
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
List<BLangVariable> params = functionTypeNode.getParams();
Location pos = functionTypeNode.pos;
BLangType returnTypeNode = functionTypeNode.returnTypeNode;
BType invokableType = createInvokableType(params, functionTypeNode.restParam, returnTypeNode,
Flags.asMask(functionTypeNode.flagSet), env, pos);
resultType = validateInferTypedescParams(pos, params, returnTypeNode == null ? null : returnTypeNode.type) ?
invokableType : symTable.semanticError;
}
public BType createInvokableType(List<? extends BLangVariable> paramVars,
BLangVariable restVariable,
BLangType retTypeVar,
long flags,
SymbolEnv env,
Location location) {
List<BType> paramTypes = new ArrayList<>();
List<BVarSymbol> params = new ArrayList<>();
boolean foundDefaultableParam = false;
List<String> paramNames = new ArrayList<>();
if (Symbols.isFlagOn(flags, Flags.ANY_FUNCTION)) {
BInvokableType bInvokableType = new BInvokableType(null, null, null, null);
bInvokableType.flags = flags;
BInvokableTypeSymbol tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, flags,
env.enclPkg.symbol.pkgID, bInvokableType,
env.scope.owner, location, SOURCE);
tsymbol.params = null;
tsymbol.restParam = null;
tsymbol.returnType = null;
bInvokableType.tsymbol = tsymbol;
return bInvokableType;
}
for (BLangVariable paramNode : paramVars) {
BLangSimpleVariable param = (BLangSimpleVariable) paramNode;
Name paramName = names.fromIdNode(param.name);
if (paramName != Names.EMPTY) {
if (paramNames.contains(paramName.value)) {
dlog.error(param.name.pos, DiagnosticErrorCode.REDECLARED_SYMBOL, paramName.value);
} else {
paramNames.add(paramName.value);
}
}
BType type = resolveTypeNode(param.getTypeNode(), env);
if (type == symTable.noType) {
return symTable.noType;
}
paramNode.type = type;
paramTypes.add(type);
long paramFlags = Flags.asMask(paramNode.flagSet);
BVarSymbol symbol = new BVarSymbol(paramFlags, paramName, env.enclPkg.symbol.pkgID, type, env.scope.owner,
param.pos, SOURCE);
param.symbol = symbol;
if (param.expr != null) {
foundDefaultableParam = true;
symbol.isDefaultable = true;
symbol.flags |= Flags.OPTIONAL;
} else if (foundDefaultableParam) {
dlog.error(param.pos, DiagnosticErrorCode.REQUIRED_PARAM_DEFINED_AFTER_DEFAULTABLE_PARAM);
}
params.add(symbol);
}
BType retType = resolveTypeNode(retTypeVar, env);
if (retType == symTable.noType) {
return symTable.noType;
}
BVarSymbol restParam = null;
BType restType = null;
if (restVariable != null) {
restType = resolveTypeNode(restVariable.typeNode, env);
if (restType == symTable.noType) {
return symTable.noType;
}
restVariable.type = restType;
restParam = new BVarSymbol(Flags.asMask(restVariable.flagSet),
names.fromIdNode(((BLangSimpleVariable) restVariable).name),
env.enclPkg.symbol.pkgID, restType, env.scope.owner, restVariable.pos, SOURCE);
}
BInvokableType bInvokableType = new BInvokableType(paramTypes, restType, retType, null);
bInvokableType.flags = flags;
BInvokableTypeSymbol tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, flags,
env.enclPkg.symbol.pkgID, bInvokableType,
env.scope.owner, location, SOURCE);
tsymbol.params = params;
tsymbol.restParam = restParam;
tsymbol.returnType = retType;
bInvokableType.tsymbol = tsymbol;
List<BType> allConstituentTypes = new ArrayList<>(paramTypes);
allConstituentTypes.add(restType);
allConstituentTypes.add(retType);
markParameterizedType(bInvokableType, allConstituentTypes);
return bInvokableType;
}
/**
* Lookup all the visible in-scope symbols for a given environment scope.
*
* @param env Symbol environment
* @return all the visible symbols
*/
public Map<Name, List<ScopeEntry>> getAllVisibleInScopeSymbols(SymbolEnv env) {
Map<Name, List<ScopeEntry>> visibleEntries = new HashMap<>();
env.scope.entries.forEach((key, value) -> {
ArrayList<ScopeEntry> entryList = new ArrayList<>();
entryList.add(value);
visibleEntries.put(key, entryList);
});
if (env.enclEnv != null) {
getAllVisibleInScopeSymbols(env.enclEnv).forEach((name, entryList) -> {
if (!visibleEntries.containsKey(name)) {
visibleEntries.put(name, entryList);
} else {
List<ScopeEntry> scopeEntries = visibleEntries.get(name);
entryList.forEach(scopeEntry -> {
if (!scopeEntries.contains(scopeEntry) && !isModuleLevelVar(scopeEntry.symbol)) {
scopeEntries.add(scopeEntry);
}
});
}
});
}
return visibleEntries;
}
public BSymbol getBinaryEqualityForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType,
BLangBinaryExpr binaryExpr) {
boolean validEqualityIntersectionExists;
switch (opKind) {
case EQUAL:
case NOT_EQUAL:
validEqualityIntersectionExists = types.validEqualityIntersectionExists(lhsType, rhsType);
break;
case REF_EQUAL:
case REF_NOT_EQUAL:
validEqualityIntersectionExists =
types.isAssignable(lhsType, rhsType) || types.isAssignable(rhsType, lhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validEqualityIntersectionExists) {
if ((!types.isValueType(lhsType) && !types.isValueType(rhsType)) ||
(types.isValueType(lhsType) && types.isValueType(rhsType))) {
return createEqualityOperator(opKind, lhsType, rhsType);
} else {
types.setImplicitCastExpr(binaryExpr.rhsExpr, rhsType, symTable.anyType);
types.setImplicitCastExpr(binaryExpr.lhsExpr, lhsType, symTable.anyType);
switch (opKind) {
case REF_EQUAL:
return createEqualityOperator(OperatorKind.EQUAL, symTable.anyType,
symTable.anyType);
case REF_NOT_EQUAL:
return createEqualityOperator(OperatorKind.NOT_EQUAL, symTable.anyType,
symTable.anyType);
default:
return createEqualityOperator(opKind, symTable.anyType, symTable.anyType);
}
}
}
return symTable.notFoundSymbol;
}
public BSymbol getBitwiseShiftOpsForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validIntTypesExists;
switch (opKind) {
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
validIntTypesExists = types.validIntegerTypeExists(lhsType) && types.validIntegerTypeExists(rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validIntTypesExists) {
switch (opKind) {
case BITWISE_LEFT_SHIFT:
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
switch (lhsType.tag) {
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.BYTE:
return createBinaryOperator(opKind, lhsType, rhsType, lhsType);
default:
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
}
}
}
return symTable.notFoundSymbol;
}
public BSymbol getArithmeticOpsForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validNumericOrStringTypeExists;
switch (opKind) {
case ADD:
validNumericOrStringTypeExists = (types.validNumericTypeExists(lhsType) &&
types.validNumericTypeExists(rhsType)) || (types.validStringOrXmlTypeExists(lhsType) &&
types.validStringOrXmlTypeExists(rhsType));
break;
case SUB:
case DIV:
case MUL:
case MOD:
validNumericOrStringTypeExists = types.validNumericTypeExists(lhsType) &&
types.validNumericTypeExists(rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validNumericOrStringTypeExists) {
BType compatibleType1 = types.findCompatibleType(lhsType);
BType compatibleType2 = types.findCompatibleType(rhsType);
if (types.isBasicNumericType(compatibleType1) && compatibleType1 != compatibleType2) {
return symTable.notFoundSymbol;
}
if (compatibleType1.tag < compatibleType2.tag) {
return createBinaryOperator(opKind, lhsType, rhsType, compatibleType2);
}
return createBinaryOperator(opKind, lhsType, rhsType, compatibleType1);
}
return symTable.notFoundSymbol;
}
/**
* Define binary comparison operator for valid ordered types.
*
* @param opKind Binary operator kind
* @param lhsType Type of the left hand side value
* @param rhsType Type of the right hand side value
* @return <, <=, >, or >= symbol
*/
public BSymbol getBinaryComparisonOpForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validOrderedTypesExist;
switch (opKind) {
case LESS_THAN:
case LESS_EQUAL:
case GREATER_THAN:
case GREATER_EQUAL:
validOrderedTypesExist = types.isOrderedType(lhsType, false) &&
types.isOrderedType(rhsType, false) && types.isSameOrderedType(lhsType, rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validOrderedTypesExist) {
switch (opKind) {
case LESS_THAN:
return createBinaryComparisonOperator(OperatorKind.LESS_THAN, lhsType, rhsType);
case LESS_EQUAL:
return createBinaryComparisonOperator(OperatorKind.LESS_EQUAL, lhsType, rhsType);
case GREATER_THAN:
return createBinaryComparisonOperator(OperatorKind.GREATER_THAN, lhsType, rhsType);
default:
return createBinaryComparisonOperator(OperatorKind.GREATER_EQUAL, lhsType, rhsType);
}
}
return symTable.notFoundSymbol;
}
public boolean isBinaryShiftOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryOpKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryOpKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public boolean isArithmeticOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.DIV || binaryOpKind == OperatorKind.MUL ||
binaryOpKind == OperatorKind.MOD;
}
public boolean isBinaryComparisonOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.LESS_THAN ||
binaryOpKind == OperatorKind.LESS_EQUAL || binaryOpKind == OperatorKind.GREATER_THAN ||
binaryOpKind == OperatorKind.GREATER_EQUAL;
}
public boolean markParameterizedType(BType type, BType constituentType) {
if (Symbols.isFlagOn(constituentType.flags, Flags.PARAMETERIZED)) {
type.tsymbol.flags |= Flags.PARAMETERIZED;
type.flags |= Flags.PARAMETERIZED;
return true;
}
return false;
}
public void markParameterizedType(BType enclosingType, Collection<BType> constituentTypes) {
if (Symbols.isFlagOn(enclosingType.flags, Flags.PARAMETERIZED)) {
return;
}
for (BType type : constituentTypes) {
if (type == null) {
continue;
}
if (markParameterizedType(enclosingType, type)) {
break;
}
}
}
private BSymbol resolveOperator(ScopeEntry entry, List<BType> types) {
BSymbol foundSymbol = symTable.notFoundSymbol;
while (entry != NOT_FOUND_ENTRY) {
BInvokableType opType = (BInvokableType) entry.symbol.type;
if (types.size() == opType.paramTypes.size()) {
boolean match = true;
for (int i = 0; i < types.size(); i++) {
if (types.get(i).tag != opType.paramTypes.get(i).tag) {
match = false;
}
}
if (match) {
foundSymbol = entry.symbol;
break;
}
}
entry = entry.next;
}
return foundSymbol;
}
private void visitBuiltInTypeNode(BLangType typeNode, TypeKind typeKind, SymbolEnv env) {
Name typeName = names.fromTypeKind(typeKind);
BSymbol typeSymbol = lookupMemberSymbol(typeNode.pos, symTable.rootScope,
env, typeName, SymTag.TYPE);
if (typeSymbol == symTable.notFoundSymbol) {
dlog.error(typeNode.pos, diagCode, typeName);
}
resultType = typeNode.type = typeSymbol.type;
}
private void addNamespacesInScope(Map<Name, BXMLNSSymbol> namespaces, SymbolEnv env) {
if (env == null) {
return;
}
env.scope.entries.forEach((name, scopeEntry) -> {
if (scopeEntry.symbol.kind == SymbolKind.XMLNS) {
BXMLNSSymbol nsSymbol = (BXMLNSSymbol) scopeEntry.symbol;
if (!namespaces.containsKey(name)) {
namespaces.put(name, nsSymbol);
}
}
});
addNamespacesInScope(namespaces, env.enclEnv);
}
private boolean isMemberAccessAllowed(SymbolEnv env, BSymbol symbol) {
if (Symbols.isPublic(symbol)) {
return true;
}
if (!Symbols.isPrivate(symbol)) {
return env.enclPkg.symbol.pkgID == symbol.pkgID;
}
if (env.enclType != null) {
return env.enclType.type.tsymbol == symbol.owner;
}
return isMemberAllowed(env, symbol);
}
private boolean isMemberAllowed(SymbolEnv env, BSymbol symbol) {
return env != null && (env.enclInvokable != null
&& env.enclInvokable.symbol.receiverSymbol != null
&& env.enclInvokable.symbol.receiverSymbol.type.tsymbol == symbol.owner
|| isMemberAllowed(env.enclEnv, symbol));
}
private BType computeIntersectionType(BLangIntersectionTypeNode intersectionTypeNode) {
List<BLangType> constituentTypeNodes = intersectionTypeNode.constituentTypeNodes;
Map<BType, BLangType> typeBLangTypeMap = new HashMap<>();
boolean validIntersection = true;
boolean isErrorIntersection = false;
boolean isAlreadyExistingType = false;
BLangType bLangTypeOne = constituentTypeNodes.get(0);
BType typeOne = resolveTypeNode(bLangTypeOne, env);
if (typeOne == symTable.noType) {
return symTable.noType;
}
typeBLangTypeMap.put(typeOne, bLangTypeOne);
BLangType bLangTypeTwo = constituentTypeNodes.get(1);
BType typeTwo = resolveTypeNode(bLangTypeTwo, env);
if (typeTwo == symTable.noType) {
return symTable.noType;
}
typeBLangTypeMap.put(typeTwo, bLangTypeTwo);
boolean hasReadOnlyType = typeOne == symTable.readonlyType || typeTwo == symTable.readonlyType;
if (typeOne.tag == TypeTags.ERROR || typeTwo.tag == TypeTags.ERROR) {
isErrorIntersection = true;
}
BType potentialIntersectionType = getPotentialIntersection(
Types.IntersectionContext.from(dlog, bLangTypeOne.pos, bLangTypeTwo.pos),
typeOne, typeTwo, this.env);
if (typeOne == potentialIntersectionType || typeTwo == potentialIntersectionType) {
isAlreadyExistingType = true;
}
LinkedHashSet<BType> constituentBTypes = new LinkedHashSet<>() {
{
add(typeOne);
add(typeTwo);
}
};
if (potentialIntersectionType == symTable.semanticError) {
validIntersection = false;
} else {
for (int i = 2; i < constituentTypeNodes.size(); i++) {
BLangType bLangType = constituentTypeNodes.get(i);
BType type = resolveTypeNode(bLangType, env);
if (type.tag == TypeTags.ERROR) {
isErrorIntersection = true;
}
typeBLangTypeMap.put(type, bLangType);
if (!hasReadOnlyType) {
hasReadOnlyType = type == symTable.readonlyType;
}
if (type == symTable.noType) {
return symTable.noType;
}
BType tempIntersectionType = getPotentialIntersection(
Types.IntersectionContext.from(dlog, bLangTypeOne.pos, bLangTypeTwo.pos),
potentialIntersectionType, type, this.env);
if (tempIntersectionType == symTable.semanticError) {
validIntersection = false;
break;
}
if (type == tempIntersectionType) {
potentialIntersectionType = type;
isAlreadyExistingType = true;
} else if (potentialIntersectionType != tempIntersectionType) {
potentialIntersectionType = tempIntersectionType;
isAlreadyExistingType = false;
}
constituentBTypes.add(type);
}
}
if (!validIntersection) {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_INTERSECTION_TYPE, intersectionTypeNode);
return symTable.semanticError;
}
if (isErrorIntersection) {
BType detailType = ((BErrorType) potentialIntersectionType).detailType;
if (isAlreadyExistingType) {
potentialIntersectionType = types.createErrorType(detailType, potentialIntersectionType.flags, env);
}
boolean existingErrorDetailType = false;
if (detailType.tsymbol != null) {
BSymbol detailTypeSymbol = lookupSymbolInMainSpace(env, detailType.tsymbol.name);
if (detailTypeSymbol != symTable.notFoundSymbol) {
existingErrorDetailType = true;
}
}
return defineIntersectionType((BErrorType) potentialIntersectionType, intersectionTypeNode.pos,
constituentBTypes, existingErrorDetailType, env);
}
if (!hasReadOnlyType) {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_NON_READONLY_INTERSECTION_TYPE,
intersectionTypeNode);
return symTable.semanticError;
}
if (types.isInherentlyImmutableType(potentialIntersectionType)) {
return potentialIntersectionType;
}
if (!types.isSelectivelyImmutableType(potentialIntersectionType, false)) {
if (types.isSelectivelyImmutableType(potentialIntersectionType)) {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_READONLY_OBJECT_INTERSECTION_TYPE);
} else {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_READONLY_INTERSECTION_TYPE,
potentialIntersectionType);
}
return symTable.semanticError;
}
BLangType typeNode = typeBLangTypeMap.get(potentialIntersectionType);
Set<Flag> flagSet;
if (typeNode == null) {
flagSet = new HashSet<>();
} else if (typeNode.getKind() == NodeKind.OBJECT_TYPE) {
flagSet = ((BLangObjectTypeNode) typeNode).flagSet;
} else if (typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
flagSet = ((BLangUserDefinedType) typeNode).flagSet;
} else {
flagSet = new HashSet<>();
}
return ImmutableTypeCloner.getImmutableIntersectionType(intersectionTypeNode.pos, types,
(SelectivelyImmutableReferenceType)
potentialIntersectionType,
env, symTable, anonymousModelHelper, names, flagSet);
}
private BIntersectionType defineIntersectionType(BErrorType intersectionErrorType,
Location pos,
LinkedHashSet<BType> constituentBTypes,
boolean isAlreadyDefinedDetailType, SymbolEnv env) {
BSymbol owner = intersectionErrorType.tsymbol.owner;
PackageID pkgId = intersectionErrorType.tsymbol.pkgID;
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol);
if (!isAlreadyDefinedDetailType && intersectionErrorType.detailType.tag == TypeTags.RECORD) {
defineErrorDetailRecord((BRecordType) intersectionErrorType.detailType, pos, pkgEnv);
}
return defineErrorIntersectionType(intersectionErrorType, constituentBTypes, pkgId, owner);
}
private BLangTypeDefinition defineErrorDetailRecord(BRecordType detailRecord, Location pos, SymbolEnv env) {
BRecordTypeSymbol detailRecordSymbol = (BRecordTypeSymbol) detailRecord.tsymbol;
for (BField field : detailRecord.fields.values()) {
BVarSymbol fieldSymbol = field.symbol;
detailRecordSymbol.scope.define(fieldSymbol.name, fieldSymbol);
}
BLangRecordTypeNode detailRecordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(new ArrayList<>(),
detailRecord, pos);
TypeDefBuilderHelper.createInitFunctionForRecordType(detailRecordTypeNode, env, names, symTable);
BLangTypeDefinition detailRecordTypeDefinition = TypeDefBuilderHelper.addTypeDefinition(detailRecord,
detailRecordSymbol,
detailRecordTypeNode,
env);
detailRecordTypeDefinition.pos = pos;
return detailRecordTypeDefinition;
}
private BIntersectionType defineErrorIntersectionType(IntersectableReferenceType effectiveType,
LinkedHashSet<BType> constituentBTypes, PackageID pkgId,
BSymbol owner) {
BTypeSymbol intersectionTypeSymbol = Symbols.createTypeSymbol(SymTag.INTERSECTION_TYPE,
Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, pkgId, null, owner,
symTable.builtinPos, VIRTUAL);
BIntersectionType intersectionType = new BIntersectionType(intersectionTypeSymbol, constituentBTypes,
effectiveType);
intersectionTypeSymbol.type = intersectionType;
return intersectionType;
}
private BType getPotentialIntersection(Types.IntersectionContext intersectionContext,
BType lhsType, BType rhsType, SymbolEnv env) {
if (lhsType == symTable.readonlyType) {
return rhsType;
}
if (rhsType == symTable.readonlyType) {
return lhsType;
}
return types.getTypeIntersection(intersectionContext, lhsType, rhsType, env);
}
boolean validateInferTypedescParams(Location pos, List<? extends BLangVariable> parameters, BType retType) {
int inferTypedescParamCount = 0;
BVarSymbol paramWithInferredTypedescDefault = null;
Location inferDefaultLocation = null;
for (BLangVariable parameter : parameters) {
BType type = parameter.type;
BLangExpression expr = parameter.expr;
if (type != null && type.tag == TypeTags.TYPEDESC && expr != null &&
expr.getKind() == NodeKind.INFER_TYPEDESC_EXPR) {
paramWithInferredTypedescDefault = parameter.symbol;
inferDefaultLocation = expr.pos;
inferTypedescParamCount++;
}
}
if (inferTypedescParamCount > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_INFER_TYPEDESC_PARAMS);
return false;
}
if (paramWithInferredTypedescDefault == null) {
return true;
}
if (retType == null) {
dlog.error(inferDefaultLocation,
DiagnosticErrorCode.CANNOT_USE_INFERRED_TYPEDESC_DEFAULT_WITH_UNREFERENCED_PARAM);
return false;
}
if (unifier.refersInferableParamName(paramWithInferredTypedescDefault.name.value, retType)) {
return true;
}
dlog.error(inferDefaultLocation,
DiagnosticErrorCode.CANNOT_USE_INFERRED_TYPEDESC_DEFAULT_WITH_UNREFERENCED_PARAM);
return false;
}
private boolean isModuleLevelVar(BSymbol symbol) {
return symbol.getKind() == SymbolKind.VARIABLE && symbol.owner.getKind() == SymbolKind.PACKAGE;
}
private static class ParameterizedTypeInfo {
BType paramValueType;
int index = -1;
private ParameterizedTypeInfo(BType paramValueType) {
this.paramValueType = paramValueType;
}
private ParameterizedTypeInfo(BType paramValueType, int index) {
this.paramValueType = paramValueType;
this.index = index;
}
}
} | class SymbolResolver extends BLangNodeVisitor {
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 10;
private static final CompilerContext.Key<SymbolResolver> SYMBOL_RESOLVER_KEY =
new CompilerContext.Key<>();
private SymbolTable symTable;
private Names names;
private BLangDiagnosticLog dlog;
private Types types;
private SymbolEnv env;
private BType resultType;
private DiagnosticCode diagCode;
private SymbolEnter symbolEnter;
private BLangAnonymousModelHelper anonymousModelHelper;
private BLangMissingNodesHelper missingNodesHelper;
private Unifier unifier;
public static SymbolResolver getInstance(CompilerContext context) {
SymbolResolver symbolResolver = context.get(SYMBOL_RESOLVER_KEY);
if (symbolResolver == null) {
symbolResolver = new SymbolResolver(context);
}
return symbolResolver;
}
public SymbolResolver(CompilerContext context) {
context.put(SYMBOL_RESOLVER_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.names = Names.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.types = Types.getInstance(context);
this.symbolEnter = SymbolEnter.getInstance(context);
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
this.missingNodesHelper = BLangMissingNodesHelper.getInstance(context);
this.unifier = new Unifier();
}
public boolean checkForUniqueSymbol(Location pos, SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = symTable.notFoundSymbol;
int expSymTag = symbol.tag;
if ((expSymTag & SymTag.IMPORT) == SymTag.IMPORT) {
foundSym = lookupSymbolInPrefixSpace(env, symbol.name);
} else if ((expSymTag & SymTag.ANNOTATION) == SymTag.ANNOTATION) {
foundSym = lookupSymbolInAnnotationSpace(env, symbol.name);
} else if ((expSymTag & SymTag.CONSTRUCTOR) == SymTag.CONSTRUCTOR) {
foundSym = lookupSymbolInConstructorSpace(env, symbol.name);
} else if ((expSymTag & SymTag.MAIN) == SymTag.MAIN) {
foundSym = lookupSymbolForDecl(env, symbol.name, SymTag.MAIN);
}
if (foundSym == symTable.notFoundSymbol && symbol.tag == SymTag.FUNCTION) {
int dotPosition = symbol.name.value.indexOf('.');
if (dotPosition > 0 && dotPosition != symbol.name.value.length()) {
String funcName = symbol.name.value.substring(dotPosition + 1);
foundSym = lookupSymbolForDecl(env, names.fromString(funcName), SymTag.MAIN);
}
}
if (foundSym == symTable.notFoundSymbol) {
return true;
}
if (!isDistinctSymbol(pos, symbol, foundSym)) {
return false;
}
if (isRedeclaredSymbol(symbol, foundSym)) {
Name name = symbol.name;
if (Symbols.isRemote(symbol) && !Symbols.isRemote(foundSym)
|| !Symbols.isRemote(symbol) && Symbols.isRemote(foundSym)) {
dlog.error(pos, DiagnosticErrorCode.UNSUPPORTED_REMOTE_METHOD_NAME_IN_SCOPE, name);
return false;
}
if (symbol.kind != SymbolKind.CONSTANT) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_SYMBOL, name);
}
return false;
}
if ((foundSym.tag & SymTag.SERVICE) == SymTag.SERVICE) {
return false;
}
return true;
}
private boolean isRedeclaredSymbol(BSymbol symbol, BSymbol foundSym) {
return hasSameOwner(symbol, foundSym) || isSymbolRedeclaredInTestPackage(symbol, foundSym);
}
public boolean checkForUniqueSymbol(SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = lookupSymbolInMainSpace(env, symbol.name);
if (foundSym == symTable.notFoundSymbol) {
return true;
}
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
return !hasSameOwner(symbol, foundSym);
}
/**
* This method will check whether the given symbol that is being defined is unique by only checking its current
* environment scope.
*
* @param pos symbol pos for diagnostic purpose.
* @param env symbol environment to lookup.
* @param symbol the symbol that is being defined.
* @param expSymTag expected tag of the symbol for.
* @return true if the symbol is unique, false otherwise.
*/
public boolean checkForUniqueSymbolInCurrentScope(Location pos, SymbolEnv env, BSymbol symbol,
int expSymTag) {
BSymbol foundSym = lookupSymbolInGivenScope(env, symbol.name, expSymTag);
if (foundSym == symTable.notFoundSymbol) {
return true;
}
return isDistinctSymbol(pos, symbol, foundSym);
}
/**
* This method will check whether the symbol being defined is unique comparing it with the found symbol
* from the scope.
*
* @param pos symbol pos for diagnostic purpose.
* @param symbol symbol that is being defined.
* @param foundSym symbol that is found from the scope.
* @return true if the symbol is unique, false otherwise.
*/
private boolean isDistinctSymbol(Location pos, BSymbol symbol, BSymbol foundSym) {
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
if (isSymbolDefinedInRootPkgLvl(foundSym)) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_BUILTIN_SYMBOL, symbol.name);
return false;
}
return true;
}
/**
* This method will check whether the symbol being defined is unique comparing it with the found symbol
* from the scope.
*
* @param symbol symbol that is being defined.
* @param foundSym symbol that is found from the scope.
* @return true if the symbol is unique, false otherwise.
*/
private boolean isDistinctSymbol(BSymbol symbol, BSymbol foundSym) {
if (symbol.tag == SymTag.CONSTRUCTOR && foundSym.tag == SymTag.ERROR) {
return false;
}
if (isSymbolDefinedInRootPkgLvl(foundSym)) {
return false;
}
return !hasSameOwner(symbol, foundSym);
}
private boolean hasSameOwner(BSymbol symbol, BSymbol foundSym) {
if (foundSym.owner == symbol.owner ||
(foundSym.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
(symbol.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE &&
foundSym.pkgID.equals(symbol.pkgID)) {
return true;
} else if (Symbols.isFlagOn(symbol.owner.flags, Flags.LAMBDA) &&
((foundSym.owner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE)) {
return true;
} else if (((symbol.owner.tag & SymTag.LET) == SymTag.LET) &&
((foundSym.owner.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE)) {
return true;
}
return false;
}
private boolean isSymbolRedeclaredInTestPackage(BSymbol symbol, BSymbol foundSym) {
if (Symbols.isFlagOn(symbol.owner.flags, Flags.TESTABLE) &&
!Symbols.isFlagOn(foundSym.owner.flags, Flags.TESTABLE)) {
return true;
}
return false;
}
private boolean isSymbolDefinedInRootPkgLvl(BSymbol foundSym) {
return symTable.rootPkgSymbol.pkgID.equals(foundSym.pkgID) &&
(foundSym.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME;
}
/**
* Lookup the symbol using given name in the given environment scope only.
*
* @param env environment to lookup the symbol.
* @param name name of the symbol to lookup.
* @param expSymTag expected tag of the symbol.
* @return if a symbol is found return it.
*/
public BSymbol lookupSymbolInGivenScope(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if (symTable.rootPkgSymbol.pkgID.equals(entry.symbol.pkgID) &&
(entry.symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
return entry.symbol;
}
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
return symTable.notFoundSymbol;
}
public boolean checkForUniqueMemberSymbol(Location pos, SymbolEnv env, BSymbol symbol) {
BSymbol foundSym = lookupMemberSymbol(pos, env.scope, env, symbol.name, symbol.tag);
if (foundSym != symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.REDECLARED_SYMBOL, symbol.name);
return false;
}
return true;
}
public BSymbol resolveBinaryOperator(OperatorKind opKind,
BType lhsType,
BType rhsType) {
return resolveOperator(names.fromString(opKind.value()), Lists.of(lhsType, rhsType));
}
BSymbol createEqualityOperator(OperatorKind opKind, BType lhsType, BType rhsType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BType retType = symTable.booleanType;
BInvokableType opType = new BInvokableType(paramTypes, retType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
public BSymbol resolveUnaryOperator(Location pos,
OperatorKind opKind,
BType type) {
return resolveOperator(names.fromString(opKind.value()), Lists.of(type));
}
public BSymbol resolveOperator(Name name, List<BType> types) {
ScopeEntry entry = symTable.rootScope.lookup(name);
return resolveOperator(entry, types);
}
BSymbol createBinaryComparisonOperator(OperatorKind opKind, BType lhsType, BType rhsType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BInvokableType opType = new BInvokableType(paramTypes, symTable.booleanType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
BSymbol createBinaryOperator(OperatorKind opKind, BType lhsType, BType rhsType, BType retType) {
List<BType> paramTypes = Lists.of(lhsType, rhsType);
BInvokableType opType = new BInvokableType(paramTypes, retType, null);
return new BOperatorSymbol(names.fromString(opKind.value()), null, opType, null, symTable.builtinPos, VIRTUAL);
}
public BSymbol resolvePkgSymbol(Location pos, SymbolEnv env, Name pkgAlias) {
if (pkgAlias == Names.EMPTY) {
return env.enclPkg.symbol;
}
BSymbol pkgSymbol = lookupSymbolInPrefixSpace(env, pkgAlias);
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
}
return pkgSymbol;
}
public BSymbol resolvePrefixSymbol(SymbolEnv env, Name pkgAlias, Name compUnit) {
if (pkgAlias == Names.EMPTY) {
return env.enclPkg.symbol;
}
ScopeEntry entry = env.scope.lookup(pkgAlias);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.XMLNS) == SymTag.XMLNS) {
return entry.symbol;
}
if ((entry.symbol.tag & SymTag.IMPORT) == SymTag.IMPORT &&
((BPackageSymbol) entry.symbol).compUnit.equals(compUnit)) {
((BPackageSymbol) entry.symbol).isUsed = true;
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return resolvePrefixSymbol(env.enclEnv, pkgAlias, compUnit);
}
return symTable.notFoundSymbol;
}
public BSymbol resolveAnnotation(Location pos, SymbolEnv env, Name pkgAlias, Name annotationName) {
return this.lookupAnnotationSpaceSymbolInPackage(pos, env, pkgAlias, annotationName);
}
public BSymbol resolveStructField(Location location, SymbolEnv env, Name fieldName,
BTypeSymbol structSymbol) {
return lookupMemberSymbol(location, structSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BSymbol resolveObjectField(Location location, SymbolEnv env, Name fieldName,
BTypeSymbol objectSymbol) {
return lookupMemberSymbol(location, objectSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BSymbol resolveObjectMethod(Location pos, SymbolEnv env, Name fieldName,
BObjectTypeSymbol objectSymbol) {
return lookupMemberSymbol(pos, objectSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BSymbol resolveInvocableObjectField(Location pos, SymbolEnv env, Name fieldName,
BObjectTypeSymbol objectTypeSymbol) {
return lookupMemberSymbol(pos, objectTypeSymbol.scope, env, fieldName, SymTag.VARIABLE);
}
public BType resolveTypeNode(BLangType typeNode, SymbolEnv env) {
return resolveTypeNode(typeNode, env, DiagnosticErrorCode.UNKNOWN_TYPE);
}
public BType resolveTypeNode(BLangType typeNode, SymbolEnv env, DiagnosticCode diagCode) {
SymbolEnv prevEnv = this.env;
DiagnosticCode preDiagCode = this.diagCode;
this.env = env;
this.diagCode = diagCode;
typeNode.accept(this);
this.env = prevEnv;
this.diagCode = preDiagCode;
if (this.resultType != symTable.noType) {
if (typeNode.nullable && this.resultType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) this.resultType;
unionType.add(symTable.nilType);
} else if (typeNode.nullable && resultType.tag != TypeTags.JSON && resultType.tag != TypeTags.ANY) {
this.resultType = BUnionType.create(null, resultType, symTable.nilType);
}
}
typeNode.setBType(resultType);
return resultType;
}
/**
* Return the symbol associated with the given name in the current package. This method first searches the symbol in
* the current scope and proceeds the enclosing scope, if it is not there in the current scope. This process
* continues until the symbol is found or the root scope is reached. This method is mainly meant for checking
* whether a given symbol is already defined in the scope hierarchy.
*
* @param env current symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
private BSymbol lookupSymbolForDecl(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) == expSymTag) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return lookupSymbol(env.enclEnv, name, expSymTag);
}
return symTable.notFoundSymbol;
}
/**
* Return the symbol associated with the given name in the current package. This method first searches the symbol in
* the current scope and proceeds the enclosing scope, if it is not there in the current scope. This process
* continues until the symbol is found or the root scope is reached. This method is meant for looking up a symbol
* when they are referenced. If looking up a symbol from within a record type definition, this method ignores record
* fields. This is done so that default value expressions cannot refer to other record fields.
*
* @param env current symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
private BSymbol lookupSymbol(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv != null) {
return lookupSymbol(env.enclEnv, name, expSymTag);
}
return symTable.notFoundSymbol;
}
/**
* Checks whether the specified symbol is a symbol of a record field and whether that field is referred to from
* within a record type definition (not necessarily the owner of the field).
*
* @param symbol symbol to be tested
* @param env the environment in which the symbol was found
* @return returns `true` if the aboove described condition holds
*/
private boolean isFieldRefFromWithinARecord(BSymbol symbol, SymbolEnv env) {
return (symbol.owner.tag & SymTag.RECORD) == SymTag.RECORD &&
env.enclType != null && env.enclType.getKind() == NodeKind.RECORD_TYPE;
}
public BSymbol lookupSymbolInMainSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.MAIN);
}
public BSymbol lookupSymbolInAnnotationSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.ANNOTATION);
}
public BSymbol lookupSymbolInPrefixSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.IMPORT);
}
public BSymbol lookupSymbolInConstructorSpace(SymbolEnv env, Name name) {
return lookupSymbol(env, name, SymTag.CONSTRUCTOR);
}
public BSymbol lookupLangLibMethod(BType type, Name name) {
if (symTable.langAnnotationModuleSymbol == null) {
return symTable.notFoundSymbol;
}
BSymbol bSymbol;
switch (type.tag) {
case TypeTags.ARRAY:
case TypeTags.TUPLE:
bSymbol = lookupMethodInModule(symTable.langArrayModuleSymbol, name, env);
break;
case TypeTags.DECIMAL:
bSymbol = lookupMethodInModule(symTable.langDecimalModuleSymbol, name, env);
break;
case TypeTags.ERROR:
bSymbol = lookupMethodInModule(symTable.langErrorModuleSymbol, name, env);
break;
case TypeTags.FLOAT:
bSymbol = lookupMethodInModule(symTable.langFloatModuleSymbol, name, env);
break;
case TypeTags.FUTURE:
bSymbol = lookupMethodInModule(symTable.langFutureModuleSymbol, name, env);
break;
case TypeTags.INT:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.BYTE:
bSymbol = lookupMethodInModule(symTable.langIntModuleSymbol, name, env);
break;
case TypeTags.MAP:
case TypeTags.RECORD:
bSymbol = lookupMethodInModule(symTable.langMapModuleSymbol, name, env);
break;
case TypeTags.OBJECT:
bSymbol = lookupMethodInModule(symTable.langObjectModuleSymbol, name, env);
break;
case TypeTags.STREAM:
bSymbol = lookupMethodInModule(symTable.langStreamModuleSymbol, name, env);
break;
case TypeTags.TABLE:
bSymbol = lookupMethodInModule(symTable.langTableModuleSymbol, name, env);
break;
case TypeTags.STRING:
case TypeTags.CHAR_STRING:
bSymbol = lookupMethodInModule(symTable.langStringModuleSymbol, name, env);
break;
case TypeTags.TYPEDESC:
bSymbol = lookupMethodInModule(symTable.langTypedescModuleSymbol, name, env);
break;
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
case TypeTags.XML_COMMENT:
case TypeTags.XML_PI:
bSymbol = lookupMethodInModule(symTable.langXmlModuleSymbol, name, env);
break;
case TypeTags.XML_TEXT:
bSymbol = lookupMethodInModule(symTable.langXmlModuleSymbol, name, env);
if (bSymbol == symTable.notFoundSymbol) {
bSymbol = lookupMethodInModule(symTable.langStringModuleSymbol, name, env);
}
break;
case TypeTags.BOOLEAN:
bSymbol = lookupMethodInModule(symTable.langBooleanModuleSymbol, name, env);
break;
case TypeTags.UNION:
Iterator<BType> itr = ((BUnionType) type).getMemberTypes().iterator();
if (!itr.hasNext()) {
throw new IllegalArgumentException(
format("Union type '%s' does not have member types", type.toString()));
}
BType member = itr.next();
if (types.isSubTypeOfBaseType(type, member.tag)) {
bSymbol = lookupLangLibMethod(member, name);
} else {
bSymbol = symTable.notFoundSymbol;
}
break;
case TypeTags.FINITE:
if (types.isAssignable(type, symTable.intType)) {
return lookupLangLibMethod(symTable.intType, name);
}
if (types.isAssignable(type, symTable.stringType)) {
return lookupLangLibMethod(symTable.stringType, name);
}
if (types.isAssignable(type, symTable.decimalType)) {
return lookupLangLibMethod(symTable.decimalType, name);
}
if (types.isAssignable(type, symTable.floatType)) {
return lookupLangLibMethod(symTable.floatType, name);
}
if (types.isAssignable(type, symTable.booleanType)) {
return lookupLangLibMethod(symTable.booleanType, name);
}
bSymbol = symTable.notFoundSymbol;
break;
default:
bSymbol = symTable.notFoundSymbol;
}
if (bSymbol == symTable.notFoundSymbol && type.tag != TypeTags.OBJECT) {
bSymbol = lookupMethodInModule(symTable.langValueModuleSymbol, name, env);
}
if (bSymbol == symTable.notFoundSymbol) {
bSymbol = lookupMethodInModule(symTable.langInternalModuleSymbol, name, env);
}
return bSymbol;
}
/**
* Recursively analyse the symbol env to find the closure variable symbol that is being resolved.
*
* @param env symbol env to analyse and find the closure variable.
* @param name name of the symbol to lookup
* @param expSymTag symbol tag
* @return closure symbol wrapper along with the resolved count
*/
public BSymbol lookupClosureVarSymbol(SymbolEnv env, Name name, int expSymTag) {
ScopeEntry entry = env.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if (symTable.rootPkgSymbol.pkgID.equals(entry.symbol.pkgID) &&
(entry.symbol.tag & SymTag.VARIABLE_NAME) == SymTag.VARIABLE_NAME) {
return entry.symbol;
}
if ((entry.symbol.tag & expSymTag) == expSymTag && !isFieldRefFromWithinARecord(entry.symbol, env)) {
return entry.symbol;
}
entry = entry.next;
}
if (env.enclEnv == null || env.enclEnv.node == null) {
return symTable.notFoundSymbol;
}
return lookupClosureVarSymbol(env.enclEnv, name, expSymTag);
}
public BSymbol lookupMainSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInMainSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.MAIN);
}
public BSymbol lookupPrefixSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInPrefixSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.IMPORT);
}
public BSymbol lookupAnnotationSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInAnnotationSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.ANNOTATION);
}
public BSymbol lookupConstructorSpaceSymbolInPackage(Location pos,
SymbolEnv env,
Name pkgAlias,
Name name) {
if (pkgAlias == Names.EMPTY) {
return lookupSymbolInConstructorSpace(env, name);
}
BSymbol pkgSymbol =
resolvePrefixSymbol(env, pkgAlias, names.fromString(pos.lineRange().filePath()));
if (pkgSymbol == symTable.notFoundSymbol) {
dlog.error(pos, DiagnosticErrorCode.UNDEFINED_MODULE, pkgAlias.value);
return pkgSymbol;
}
return lookupMemberSymbol(pos, pkgSymbol.scope, env, name, SymTag.CONSTRUCTOR);
}
public BSymbol lookupMethodInModule(BPackageSymbol moduleSymbol, Name name, SymbolEnv env) {
ScopeEntry entry = moduleSymbol.scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.FUNCTION) != SymTag.FUNCTION) {
entry = entry.next;
continue;
}
if (isMemberAccessAllowed(env, entry.symbol)) {
return entry.symbol;
}
return symTable.notFoundSymbol;
}
return symTable.notFoundSymbol;
}
/**
* Return the symbol with the given name.
* This method only looks at the symbol defined in the given scope.
*
* @param pos diagnostic position
* @param scope current scope
* @param env symbol environment
* @param name symbol name
* @param expSymTag expected symbol type/tag
* @return resolved symbol
*/
public BSymbol lookupMemberSymbol(Location pos,
Scope scope,
SymbolEnv env,
Name name,
int expSymTag) {
ScopeEntry entry = scope.lookup(name);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & expSymTag) != expSymTag) {
entry = entry.next;
continue;
}
if (isMemberAccessAllowed(env, entry.symbol)) {
return entry.symbol;
} else {
dlog.error(pos, DiagnosticErrorCode.ATTEMPT_REFER_NON_ACCESSIBLE_SYMBOL, entry.symbol.name);
return symTable.notFoundSymbol;
}
}
return symTable.notFoundSymbol;
}
/**
* Resolve and return the namespaces visible to the given environment, as a map.
*
* @param env Environment to get the visible namespaces
* @return Map of namespace symbols visible to the given environment
*/
public Map<Name, BXMLNSSymbol> resolveAllNamespaces(SymbolEnv env) {
Map<Name, BXMLNSSymbol> namespaces = new LinkedHashMap<Name, BXMLNSSymbol>();
addNamespacesInScope(namespaces, env);
return namespaces;
}
public void boostrapErrorType() {
ScopeEntry entry = symTable.rootPkgSymbol.scope.lookup(Names.ERROR);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.errorType = (BErrorType) entry.symbol.type;
symTable.detailType = (BMapType) symTable.errorType.detailType;
return;
}
throw new IllegalStateException("built-in error not found ?");
}
public void defineOperators() {
symTable.defineOperators();
}
public void bootstrapAnydataType() {
ScopeEntry entry = symTable.langAnnotationModuleSymbol.scope.lookup(Names.ANYDATA);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
BUnionType type = (BUnionType) entry.symbol.type;
symTable.anydataType = new BAnydataType(type);
symTable.anydataOrReadonly = BUnionType.create(null, symTable.anydataType, symTable.readonlyType);
entry.symbol.type = symTable.anydataType;
entry.symbol.origin = BUILTIN;
symTable.anydataType.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.ANYDATA,
PackageID.ANNOTATIONS, symTable.anydataType, symTable.rootPkgSymbol, symTable.builtinPos, BUILTIN);
return;
}
throw new IllegalStateException("built-in 'anydata' type not found");
}
public void bootstrapJsonType() {
ScopeEntry entry = symTable.langAnnotationModuleSymbol.scope.lookup(Names.JSON);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
BUnionType type = (BUnionType) entry.symbol.type;
symTable.jsonType = new BJSONType(type);
symTable.jsonType.tsymbol = new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.JSON, PackageID.ANNOTATIONS,
symTable.jsonType, symTable.langAnnotationModuleSymbol, symTable.builtinPos, BUILTIN);
entry.symbol.type = symTable.jsonType;
entry.symbol.origin = BUILTIN;
return;
}
throw new IllegalStateException("built-in 'json' type not found");
}
public void bootstrapCloneableType() {
if (symTable.langValueModuleSymbol != null) {
ScopeEntry entry = symTable.langValueModuleSymbol.scope.lookup(Names.CLONEABLE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.cloneableType = (BUnionType) entry.symbol.type;
symTable.cloneableType.tsymbol =
new BTypeSymbol(SymTag.TYPE, Flags.PUBLIC, Names.CLONEABLE,
PackageID.VALUE, symTable.cloneableType, symTable.langValueModuleSymbol,
symTable.builtinPos, BUILTIN);
symTable.detailType = new BMapType(TypeTags.MAP, symTable.cloneableType, null);
symTable.errorType = new BErrorType(null, symTable.detailType);
symTable.errorType.tsymbol = new BErrorTypeSymbol(SymTag.ERROR, Flags.PUBLIC, Names.ERROR,
symTable.rootPkgSymbol.pkgID, symTable.errorType, symTable.rootPkgSymbol, symTable.builtinPos
, BUILTIN);
symTable.errorOrNilType = BUnionType.create(null, symTable.errorType, symTable.nilType);
symTable.anyOrErrorType = BUnionType.create(null, symTable.anyType, symTable.errorType);
symTable.mapAllType = new BMapType(TypeTags.MAP, symTable.anyOrErrorType, null);
symTable.arrayAllType = new BArrayType(symTable.anyOrErrorType);
symTable.typeDesc.constraint = symTable.anyOrErrorType;
symTable.futureType.constraint = symTable.anyOrErrorType;
symTable.pureType = BUnionType.create(null, symTable.anydataType, symTable.errorType);
return;
}
throw new IllegalStateException("built-in 'lang.value:Cloneable' type not found");
}
ScopeEntry entry = symTable.rootPkgSymbol.scope.lookup(Names.CLONEABLE_INTERNAL);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
entry.symbol.type = symTable.cloneableType;
break;
}
}
public void bootstrapIntRangeType() {
ScopeEntry entry = symTable.langInternalModuleSymbol.scope.lookup(Names.CREATE_INT_RANGE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.INVOKABLE) != SymTag.INVOKABLE) {
entry = entry.next;
continue;
}
symTable.intRangeType = (BObjectType) ((BInvokableType) entry.symbol.type).retType;
symTable.defineIntRangeOperations();
return;
}
throw new IllegalStateException("built-in Integer Range type not found ?");
}
public void bootstrapIterableType() {
ScopeEntry entry = symTable.langObjectModuleSymbol.scope.lookup(Names.OBJECT_ITERABLE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.iterableType = (BObjectType) entry.symbol.type;
return;
}
throw new IllegalStateException("built-in distinct Iterable type not found ?");
}
public void loadRawTemplateType() {
ScopeEntry entry = symTable.langObjectModuleSymbol.scope.lookup(Names.RAW_TEMPLATE);
while (entry != NOT_FOUND_ENTRY) {
if ((entry.symbol.tag & SymTag.TYPE) != SymTag.TYPE) {
entry = entry.next;
continue;
}
symTable.rawTemplateType = (BObjectType) entry.symbol.type;
return;
}
throw new IllegalStateException("'lang.object:RawTemplate' type not found");
}
public void visit(BLangValueType valueTypeNode) {
visitBuiltInTypeNode(valueTypeNode, valueTypeNode.typeKind, this.env);
}
public void visit(BLangBuiltInRefTypeNode builtInRefType) {
visitBuiltInTypeNode(builtInRefType, builtInRefType.typeKind, this.env);
}
public void visit(BLangUnionTypeNode unionTypeNode) {
LinkedHashSet<BType> memberTypes = new LinkedHashSet<>();
for (BLangType langType : unionTypeNode.memberTypeNodes) {
BType resolvedType = resolveTypeNode(langType, env);
if (resolvedType == symTable.noType) {
resultType = symTable.noType;
return;
}
memberTypes.add(resolvedType);
}
BTypeSymbol unionTypeSymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, env.enclPkg.symbol.pkgID, null,
env.scope.owner, unionTypeNode.pos, SOURCE);
BUnionType unionType = BUnionType.create(unionTypeSymbol, memberTypes);
unionTypeSymbol.type = unionType;
markParameterizedType(unionType, memberTypes);
resultType = unionType;
}
public void visit(BLangIntersectionTypeNode intersectionTypeNode) {
resultType = computeIntersectionType(intersectionTypeNode);
}
public void visit(BLangObjectTypeNode objectTypeNode) {
EnumSet<Flag> flags = EnumSet.copyOf(objectTypeNode.flagSet);
if (objectTypeNode.isAnonymous) {
flags.add(Flag.PUBLIC);
}
int typeFlags = 0;
if (flags.contains(Flag.READONLY)) {
typeFlags |= Flags.READONLY;
}
if (flags.contains(Flag.ISOLATED)) {
typeFlags |= Flags.ISOLATED;
}
if (flags.contains(Flag.SERVICE)) {
typeFlags |= Flags.SERVICE;
}
BTypeSymbol objectSymbol = Symbols.createObjectSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner, objectTypeNode.pos, SOURCE);
BObjectType objectType = new BObjectType(objectSymbol, typeFlags);
objectSymbol.type = objectType;
objectTypeNode.symbol = objectSymbol;
resultType = objectType;
}
public void visit(BLangRecordTypeNode recordTypeNode) {
if (recordTypeNode.symbol == null) {
EnumSet<Flag> flags = recordTypeNode.isAnonymous ? EnumSet.of(Flag.PUBLIC, Flag.ANONYMOUS)
: EnumSet.noneOf(Flag.class);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.symbol.pkgID, null,
env.scope.owner, recordTypeNode.pos,
recordTypeNode.isAnonymous ? VIRTUAL : SOURCE);
BRecordType recordType = new BRecordType(recordSymbol);
recordSymbol.type = recordType;
recordTypeNode.symbol = recordSymbol;
if (env.node.getKind() != NodeKind.PACKAGE) {
recordSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
symbolEnter.defineSymbol(recordTypeNode.pos, recordTypeNode.symbol, env);
symbolEnter.defineNode(recordTypeNode, env);
}
resultType = recordType;
} else {
resultType = recordTypeNode.symbol.type;
}
}
public void visit(BLangStreamType streamTypeNode) {
BType type = resolveTypeNode(streamTypeNode.type, env);
BType constraintType = resolveTypeNode(streamTypeNode.constraint, env);
BType error = streamTypeNode.error != null ? resolveTypeNode(streamTypeNode.error, env) : symTable.nilType;
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BType streamType = new BStreamType(TypeTags.STREAM, constraintType, error, null);
BTypeSymbol typeSymbol = type.tsymbol;
streamType.tsymbol = Symbols.createTypeSymbol(typeSymbol.tag, typeSymbol.flags, typeSymbol.name,
typeSymbol.originalName, typeSymbol.pkgID, streamType,
typeSymbol.owner, streamTypeNode.pos, SOURCE);
markParameterizedType(streamType, constraintType);
if (error != null) {
markParameterizedType(streamType, error);
}
resultType = streamType;
}
public void visit(BLangTableTypeNode tableTypeNode) {
BType type = resolveTypeNode(tableTypeNode.type, env);
BType constraintType = resolveTypeNode(tableTypeNode.constraint, env);
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BTableType tableType = new BTableType(TypeTags.TABLE, constraintType, null);
BTypeSymbol typeSymbol = type.tsymbol;
tableType.tsymbol = Symbols.createTypeSymbol(SymTag.TYPE, Flags.asMask(EnumSet.noneOf(Flag.class)),
typeSymbol.name, typeSymbol.originalName, env.enclPkg.symbol.pkgID, tableType,
env.scope.owner, tableTypeNode.pos, SOURCE);
tableType.tsymbol.flags = typeSymbol.flags;
tableType.constraintPos = tableTypeNode.constraint.pos;
tableType.isTypeInlineDefined = tableTypeNode.isTypeInlineDefined;
if (tableTypeNode.tableKeyTypeConstraint != null) {
tableType.keyTypeConstraint = resolveTypeNode(tableTypeNode.tableKeyTypeConstraint.keyType, env);
tableType.keyPos = tableTypeNode.tableKeyTypeConstraint.pos;
} else if (tableTypeNode.tableKeySpecifier != null) {
BLangTableKeySpecifier tableKeySpecifier = tableTypeNode.tableKeySpecifier;
List<String> fieldNameList = new ArrayList<>();
for (IdentifierNode identifier : tableKeySpecifier.fieldNameIdentifierList) {
fieldNameList.add(((BLangIdentifier) identifier).value);
}
tableType.fieldNameList = fieldNameList;
tableType.keyPos = tableKeySpecifier.pos;
}
if (constraintType.tag == TypeTags.MAP &&
(tableType.fieldNameList != null || tableType.keyTypeConstraint != null) &&
!tableType.tsymbol.owner.getFlags().contains(Flag.LANG_LIB)) {
dlog.error(tableType.keyPos,
DiagnosticErrorCode.KEY_CONSTRAINT_NOT_SUPPORTED_FOR_TABLE_WITH_MAP_CONSTRAINT);
resultType = symTable.semanticError;
return;
}
markParameterizedType(tableType, constraintType);
tableTypeNode.tableType = tableType;
resultType = tableType;
}
public void visit(BLangFiniteTypeNode finiteTypeNode) {
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE,
Flags.asMask(EnumSet.noneOf(Flag.class)), Names.EMPTY,
env.enclPkg.symbol.pkgID, null, env.scope.owner,
finiteTypeNode.pos, SOURCE);
BFiniteType finiteType = new BFiniteType(finiteTypeSymbol);
for (BLangExpression literal : finiteTypeNode.valueSpace) {
((BLangLiteral) literal).setBType(symTable.getTypeFromTag(((BLangLiteral) literal).getBType().tag));
finiteType.addValue(literal);
}
finiteTypeSymbol.type = finiteType;
resultType = finiteType;
}
public void visit(BLangTupleTypeNode tupleTypeNode) {
List<BType> memberTypes = new ArrayList<>();
for (BLangType memTypeNode : tupleTypeNode.memberTypeNodes) {
BType type = resolveTypeNode(memTypeNode, env);
if (type == symTable.noType) {
resultType = symTable.noType;
return;
}
memberTypes.add(type);
}
BTypeSymbol tupleTypeSymbol = Symbols.createTypeSymbol(SymTag.TUPLE_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, env.enclPkg.symbol.pkgID, null,
env.scope.owner, tupleTypeNode.pos, SOURCE);
BTupleType tupleType = new BTupleType(tupleTypeSymbol, memberTypes);
tupleTypeSymbol.type = tupleType;
if (tupleTypeNode.restParamType != null) {
BType tupleRestType = resolveTypeNode(tupleTypeNode.restParamType, env);
if (tupleRestType == symTable.noType) {
resultType = symTable.noType;
return;
}
tupleType.restType = tupleRestType;
markParameterizedType(tupleType, tupleType.restType);
}
markParameterizedType(tupleType, memberTypes);
resultType = tupleType;
}
public void visit(BLangErrorType errorTypeNode) {
BType detailType = Optional.ofNullable(errorTypeNode.detailType)
.map(bLangType -> resolveTypeNode(bLangType, env)).orElse(symTable.detailType);
if (errorTypeNode.isAnonymous) {
errorTypeNode.flagSet.add(Flag.PUBLIC);
errorTypeNode.flagSet.add(Flag.ANONYMOUS);
}
boolean distinctErrorDef = errorTypeNode.flagSet.contains(Flag.DISTINCT);
if (detailType == symTable.detailType && !distinctErrorDef &&
!this.env.enclPkg.packageID.equals(PackageID.ANNOTATIONS)) {
resultType = symTable.errorType;
return;
}
BErrorTypeSymbol errorTypeSymbol = Symbols
.createErrorSymbol(Flags.asMask(errorTypeNode.flagSet), Names.EMPTY, env.enclPkg.symbol.pkgID,
null, env.scope.owner, errorTypeNode.pos, SOURCE);
if (env.node.getKind() != NodeKind.PACKAGE) {
errorTypeSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
symbolEnter.defineSymbol(errorTypeNode.pos, errorTypeSymbol, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorType.flags |= errorTypeSymbol.flags;
errorTypeSymbol.type = errorType;
markParameterizedType(errorType, detailType);
errorType.typeIdSet = BTypeIdSet.emptySet();
resultType = errorType;
}
public void visit(BLangConstrainedType constrainedTypeNode) {
BType type = resolveTypeNode(constrainedTypeNode.type, env);
BType constraintType = resolveTypeNode(constrainedTypeNode.constraint, env);
if (constraintType == symTable.noType) {
resultType = symTable.noType;
return;
}
BType constrainedType = null;
if (type.tag == TypeTags.FUTURE) {
constrainedType = new BFutureType(TypeTags.FUTURE, constraintType, null);
} else if (type.tag == TypeTags.MAP) {
constrainedType = new BMapType(TypeTags.MAP, constraintType, null);
} else if (type.tag == TypeTags.TYPEDESC) {
constrainedType = new BTypedescType(constraintType, null);
} else if (type.tag == TypeTags.XML) {
if (constraintType.tag == TypeTags.PARAMETERIZED_TYPE) {
BType typedescType = ((BParameterizedType) constraintType).paramSymbol.type;
BType typedescConstraint = ((BTypedescType) typedescType).constraint;
validateXMLConstraintType(typedescConstraint, constrainedTypeNode.pos);
} else {
validateXMLConstraintType(constraintType, constrainedTypeNode.pos);
}
constrainedType = new BXMLType(constraintType, null);
} else {
return;
}
BTypeSymbol typeSymbol = type.tsymbol;
constrainedType.tsymbol = Symbols.createTypeSymbol(typeSymbol.tag, typeSymbol.flags, typeSymbol.name,
typeSymbol.originalName, typeSymbol.pkgID, constrainedType,
typeSymbol.owner, constrainedTypeNode.pos, SOURCE);
markParameterizedType(constrainedType, constraintType);
resultType = constrainedType;
}
private void validateXMLConstraintType(BType constraintType, Location pos) {
if (constraintType.tag == TypeTags.UNION) {
checkUnionTypeForXMLSubTypes((BUnionType) constraintType, pos);
return;
}
if (!TypeTags.isXMLTypeTag(constraintType.tag) && constraintType.tag != TypeTags.NEVER) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_CONSTRAINT, symTable.xmlType, constraintType);
}
}
private void checkUnionTypeForXMLSubTypes(BUnionType constraintUnionType, Location pos) {
for (BType memberType : constraintUnionType.getMemberTypes()) {
if (memberType.tag == TypeTags.UNION) {
checkUnionTypeForXMLSubTypes((BUnionType) memberType, pos);
}
if (!TypeTags.isXMLTypeTag(memberType.tag)) {
dlog.error(pos, DiagnosticErrorCode.INCOMPATIBLE_TYPE_CONSTRAINT, symTable.xmlType,
constraintUnionType);
}
}
}
public void visit(BLangUserDefinedType userDefinedTypeNode) {
Name pkgAlias = names.fromIdNode(userDefinedTypeNode.pkgAlias);
Name typeName = names.fromIdNode(userDefinedTypeNode.typeName);
BSymbol symbol = symTable.notFoundSymbol;
if (env.scope.owner.tag == SymTag.ANNOTATION) {
symbol = lookupAnnotationSpaceSymbolInPackage(userDefinedTypeNode.pos, env, pkgAlias, typeName);
}
if (symbol == symTable.notFoundSymbol) {
BSymbol tempSymbol = lookupMainSpaceSymbolInPackage(userDefinedTypeNode.pos, env, pkgAlias, typeName);
if ((tempSymbol.tag & SymTag.TYPE) == SymTag.TYPE) {
symbol = tempSymbol;
} else if (Symbols.isTagOn(tempSymbol, SymTag.VARIABLE) && env.node.getKind() == NodeKind.FUNCTION) {
BLangFunction func = (BLangFunction) env.node;
boolean errored = false;
if (func.returnTypeNode == null ||
(func.hasBody() && func.body.getKind() != NodeKind.EXTERN_FUNCTION_BODY)) {
dlog.error(userDefinedTypeNode.pos,
DiagnosticErrorCode.INVALID_NON_EXTERNAL_DEPENDENTLY_TYPED_FUNCTION);
errored = true;
}
if (tempSymbol.type != null && tempSymbol.type.tag != TypeTags.TYPEDESC) {
dlog.error(userDefinedTypeNode.pos, DiagnosticErrorCode.INVALID_PARAM_TYPE_FOR_RETURN_TYPE,
tempSymbol.type);
errored = true;
}
if (errored) {
this.resultType = symTable.semanticError;
return;
}
ParameterizedTypeInfo parameterizedTypeInfo =
getTypedescParamValueType(func.requiredParams, tempSymbol);
BType paramValType = parameterizedTypeInfo == null ? null : parameterizedTypeInfo.paramValueType;
if (paramValType == symTable.semanticError) {
this.resultType = symTable.semanticError;
return;
}
if (paramValType != null) {
BTypeSymbol tSymbol = new BTypeSymbol(SymTag.TYPE, Flags.PARAMETERIZED | tempSymbol.flags,
tempSymbol.name, tempSymbol.originalName, tempSymbol.pkgID,
null, func.symbol, tempSymbol.pos, VIRTUAL);
tSymbol.type = new BParameterizedType(paramValType, (BVarSymbol) tempSymbol,
tSymbol, tempSymbol.name, parameterizedTypeInfo.index);
tSymbol.type.flags |= Flags.PARAMETERIZED;
this.resultType = tSymbol.type;
userDefinedTypeNode.symbol = tSymbol;
return;
}
}
}
if (symbol == symTable.notFoundSymbol) {
symbol = lookupMemberSymbol(userDefinedTypeNode.pos, symTable.rootScope, this.env, typeName,
SymTag.VARIABLE_NAME);
}
if (this.env.logErrors && symbol == symTable.notFoundSymbol) {
if (!missingNodesHelper.isMissingNode(pkgAlias) && !missingNodesHelper.isMissingNode(typeName) &&
!symbolEnter.isUnknownTypeRef(userDefinedTypeNode)) {
dlog.error(userDefinedTypeNode.pos, diagCode, typeName);
}
resultType = symTable.semanticError;
return;
}
userDefinedTypeNode.symbol = symbol;
resultType = symbol.type;
}
private ParameterizedTypeInfo getTypedescParamValueType(List<BLangSimpleVariable> params, BSymbol varSym) {
for (int i = 0; i < params.size(); i++) {
BLangSimpleVariable param = params.get(i);
if (param.name.value.equals(varSym.name.value)) {
if (param.expr == null || param.expr.getKind() == NodeKind.INFER_TYPEDESC_EXPR) {
return new ParameterizedTypeInfo(((BTypedescType) varSym.type).constraint, i);
}
NodeKind defaultValueExprKind = param.expr.getKind();
if (defaultValueExprKind == NodeKind.TYPEDESC_EXPRESSION) {
return new ParameterizedTypeInfo(
resolveTypeNode(((BLangTypedescExpr) param.expr).typeNode, this.env), i);
}
if (defaultValueExprKind == NodeKind.SIMPLE_VARIABLE_REF) {
Name varName = names.fromIdNode(((BLangSimpleVarRef) param.expr).variableName);
BSymbol typeRefSym = lookupSymbolInMainSpace(this.env, varName);
if (typeRefSym != symTable.notFoundSymbol) {
return new ParameterizedTypeInfo(typeRefSym.type, i);
}
return new ParameterizedTypeInfo(symTable.semanticError);
}
dlog.error(param.pos, DiagnosticErrorCode.INVALID_TYPEDESC_PARAM);
return new ParameterizedTypeInfo(symTable.semanticError);
}
}
return null;
}
@Override
public void visit(BLangFunctionTypeNode functionTypeNode) {
List<BLangVariable> params = functionTypeNode.getParams();
Location pos = functionTypeNode.pos;
BLangType returnTypeNode = functionTypeNode.returnTypeNode;
BType invokableType = createInvokableType(params, functionTypeNode.restParam, returnTypeNode,
Flags.asMask(functionTypeNode.flagSet), env, pos);
resultType = validateInferTypedescParams(pos, params, returnTypeNode == null ?
null : returnTypeNode.getBType()) ? invokableType : symTable.semanticError;
}
public BType createInvokableType(List<? extends BLangVariable> paramVars,
BLangVariable restVariable,
BLangType retTypeVar,
long flags,
SymbolEnv env,
Location location) {
List<BType> paramTypes = new ArrayList<>();
List<BVarSymbol> params = new ArrayList<>();
boolean foundDefaultableParam = false;
List<String> paramNames = new ArrayList<>();
if (Symbols.isFlagOn(flags, Flags.ANY_FUNCTION)) {
BInvokableType bInvokableType = new BInvokableType(null, null, null, null);
bInvokableType.flags = flags;
BInvokableTypeSymbol tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, flags,
env.enclPkg.symbol.pkgID, bInvokableType,
env.scope.owner, location, SOURCE);
tsymbol.params = null;
tsymbol.restParam = null;
tsymbol.returnType = null;
bInvokableType.tsymbol = tsymbol;
return bInvokableType;
}
for (BLangVariable paramNode : paramVars) {
BLangSimpleVariable param = (BLangSimpleVariable) paramNode;
Name paramName = names.fromIdNode(param.name);
Name paramOrigName = names.originalNameFromIdNode(param.name);
if (paramName != Names.EMPTY) {
if (paramNames.contains(paramName.value)) {
dlog.error(param.name.pos, DiagnosticErrorCode.REDECLARED_SYMBOL, paramName.value);
} else {
paramNames.add(paramName.value);
}
}
BType type = resolveTypeNode(param.getTypeNode(), env);
if (type == symTable.noType) {
return symTable.noType;
}
paramNode.setBType(type);
paramTypes.add(type);
long paramFlags = Flags.asMask(paramNode.flagSet);
BVarSymbol symbol = new BVarSymbol(paramFlags, paramName, paramOrigName, env.enclPkg.symbol.pkgID,
type, env.scope.owner, param.pos, SOURCE);
param.symbol = symbol;
if (param.expr != null) {
foundDefaultableParam = true;
symbol.isDefaultable = true;
symbol.flags |= Flags.OPTIONAL;
} else if (foundDefaultableParam) {
dlog.error(param.pos, DiagnosticErrorCode.REQUIRED_PARAM_DEFINED_AFTER_DEFAULTABLE_PARAM);
}
params.add(symbol);
}
BType retType = resolveTypeNode(retTypeVar, env);
if (retType == symTable.noType) {
return symTable.noType;
}
BVarSymbol restParam = null;
BType restType = null;
if (restVariable != null) {
restType = resolveTypeNode(restVariable.typeNode, env);
if (restType == symTable.noType) {
return symTable.noType;
}
BLangIdentifier id = ((BLangSimpleVariable) restVariable).name;
restVariable.setBType(restType);
restParam = new BVarSymbol(Flags.asMask(restVariable.flagSet),
names.fromIdNode(id), names.originalNameFromIdNode(id),
env.enclPkg.symbol.pkgID, restType, env.scope.owner, restVariable.pos, SOURCE);
}
BInvokableType bInvokableType = new BInvokableType(paramTypes, restType, retType, null);
bInvokableType.flags = flags;
BInvokableTypeSymbol tsymbol = Symbols.createInvokableTypeSymbol(SymTag.FUNCTION_TYPE, flags,
env.enclPkg.symbol.pkgID, bInvokableType,
env.scope.owner, location, SOURCE);
tsymbol.params = params;
tsymbol.restParam = restParam;
tsymbol.returnType = retType;
bInvokableType.tsymbol = tsymbol;
List<BType> allConstituentTypes = new ArrayList<>(paramTypes);
allConstituentTypes.add(restType);
allConstituentTypes.add(retType);
markParameterizedType(bInvokableType, allConstituentTypes);
return bInvokableType;
}
/**
* Lookup all the visible in-scope symbols for a given environment scope.
*
* @param env Symbol environment
* @return all the visible symbols
*/
public Map<Name, List<ScopeEntry>> getAllVisibleInScopeSymbols(SymbolEnv env) {
Map<Name, List<ScopeEntry>> visibleEntries = new HashMap<>();
env.scope.entries.forEach((key, value) -> {
ArrayList<ScopeEntry> entryList = new ArrayList<>();
entryList.add(value);
visibleEntries.put(key, entryList);
});
if (env.enclEnv != null) {
getAllVisibleInScopeSymbols(env.enclEnv).forEach((name, entryList) -> {
if (!visibleEntries.containsKey(name)) {
visibleEntries.put(name, entryList);
} else {
List<ScopeEntry> scopeEntries = visibleEntries.get(name);
entryList.forEach(scopeEntry -> {
if (!scopeEntries.contains(scopeEntry) && !isModuleLevelVar(scopeEntry.symbol)) {
scopeEntries.add(scopeEntry);
}
});
}
});
}
return visibleEntries;
}
public BSymbol getBinaryEqualityForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType,
BLangBinaryExpr binaryExpr, SymbolEnv env) {
boolean validEqualityIntersectionExists;
switch (opKind) {
case EQUAL:
case NOT_EQUAL:
validEqualityIntersectionExists = types.validEqualityIntersectionExists(lhsType, rhsType);
break;
case REF_EQUAL:
case REF_NOT_EQUAL:
validEqualityIntersectionExists =
types.getTypeIntersection(Types.IntersectionContext.compilerInternalIntersectionTestContext(),
lhsType, rhsType, env) != symTable.semanticError;
break;
default:
return symTable.notFoundSymbol;
}
if (validEqualityIntersectionExists) {
if ((!types.isValueType(lhsType) && !types.isValueType(rhsType)) ||
(types.isValueType(lhsType) && types.isValueType(rhsType))) {
return createEqualityOperator(opKind, lhsType, rhsType);
} else {
types.setImplicitCastExpr(binaryExpr.rhsExpr, rhsType, symTable.anyType);
types.setImplicitCastExpr(binaryExpr.lhsExpr, lhsType, symTable.anyType);
switch (opKind) {
case REF_EQUAL:
return createEqualityOperator(OperatorKind.EQUAL, symTable.anyType,
symTable.anyType);
case REF_NOT_EQUAL:
return createEqualityOperator(OperatorKind.NOT_EQUAL, symTable.anyType,
symTable.anyType);
default:
return createEqualityOperator(opKind, symTable.anyType, symTable.anyType);
}
}
}
return symTable.notFoundSymbol;
}
public BSymbol getBitwiseShiftOpsForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validIntTypesExists;
switch (opKind) {
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
validIntTypesExists = types.validIntegerTypeExists(lhsType) && types.validIntegerTypeExists(rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validIntTypesExists) {
switch (opKind) {
case BITWISE_LEFT_SHIFT:
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
switch (lhsType.tag) {
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.BYTE:
return createBinaryOperator(opKind, lhsType, rhsType, lhsType);
default:
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
}
}
}
return symTable.notFoundSymbol;
}
public BSymbol getArithmeticOpsForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validNumericOrStringTypeExists;
switch (opKind) {
case ADD:
validNumericOrStringTypeExists = (types.validNumericTypeExists(lhsType) &&
types.validNumericTypeExists(rhsType)) || (types.validStringOrXmlTypeExists(lhsType) &&
types.validStringOrXmlTypeExists(rhsType));
break;
case SUB:
case DIV:
case MUL:
case MOD:
validNumericOrStringTypeExists = types.validNumericTypeExists(lhsType) &&
types.validNumericTypeExists(rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validNumericOrStringTypeExists) {
BType compatibleType1 = types.findCompatibleType(lhsType);
BType compatibleType2 = types.findCompatibleType(rhsType);
if (types.isBasicNumericType(compatibleType1) && compatibleType1 != compatibleType2) {
return symTable.notFoundSymbol;
}
if (compatibleType1.tag < compatibleType2.tag) {
return createBinaryOperator(opKind, lhsType, rhsType, compatibleType2);
}
return createBinaryOperator(opKind, lhsType, rhsType, compatibleType1);
}
return symTable.notFoundSymbol;
}
public BSymbol getBinaryBitwiseOpsForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validIntTypesExists;
switch (opKind) {
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
validIntTypesExists = types.validIntegerTypeExists(lhsType) && types.validIntegerTypeExists(rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validIntTypesExists) {
switch (opKind) {
case BITWISE_AND:
switch (lhsType.tag) {
case TypeTags.UNSIGNED8_INT:
case TypeTags.BYTE:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED32_INT:
return createBinaryOperator(opKind, lhsType, rhsType, lhsType);
}
switch (rhsType.tag) {
case TypeTags.UNSIGNED8_INT:
case TypeTags.BYTE:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED32_INT:
return createBinaryOperator(opKind, lhsType, rhsType, rhsType);
}
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
case BITWISE_OR:
case BITWISE_XOR:
return createBinaryOperator(opKind, lhsType, rhsType, symTable.intType);
}
}
return symTable.notFoundSymbol;
}
/**
* Define binary comparison operator for valid ordered types.
*
* @param opKind Binary operator kind
* @param lhsType Type of the left hand side value
* @param rhsType Type of the right hand side value
* @return <, <=, >, or >= symbol
*/
public BSymbol getBinaryComparisonOpForTypeSets(OperatorKind opKind, BType lhsType, BType rhsType) {
boolean validOrderedTypesExist;
switch (opKind) {
case LESS_THAN:
case LESS_EQUAL:
case GREATER_THAN:
case GREATER_EQUAL:
validOrderedTypesExist = types.isOrderedType(lhsType, false) &&
types.isOrderedType(rhsType, false) && types.isSameOrderedType(lhsType, rhsType);
break;
default:
return symTable.notFoundSymbol;
}
if (validOrderedTypesExist) {
switch (opKind) {
case LESS_THAN:
return createBinaryComparisonOperator(OperatorKind.LESS_THAN, lhsType, rhsType);
case LESS_EQUAL:
return createBinaryComparisonOperator(OperatorKind.LESS_EQUAL, lhsType, rhsType);
case GREATER_THAN:
return createBinaryComparisonOperator(OperatorKind.GREATER_THAN, lhsType, rhsType);
default:
return createBinaryComparisonOperator(OperatorKind.GREATER_EQUAL, lhsType, rhsType);
}
}
return symTable.notFoundSymbol;
}
public boolean isBinaryShiftOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryOpKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryOpKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public boolean isArithmeticOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.DIV || binaryOpKind == OperatorKind.MUL ||
binaryOpKind == OperatorKind.MOD;
}
public boolean isBinaryComparisonOperator(OperatorKind binaryOpKind) {
return binaryOpKind == OperatorKind.LESS_THAN ||
binaryOpKind == OperatorKind.LESS_EQUAL || binaryOpKind == OperatorKind.GREATER_THAN ||
binaryOpKind == OperatorKind.GREATER_EQUAL;
}
public boolean markParameterizedType(BType type, BType constituentType) {
if (Symbols.isFlagOn(constituentType.flags, Flags.PARAMETERIZED)) {
type.tsymbol.flags |= Flags.PARAMETERIZED;
type.flags |= Flags.PARAMETERIZED;
return true;
}
return false;
}
public void markParameterizedType(BType enclosingType, Collection<BType> constituentTypes) {
if (Symbols.isFlagOn(enclosingType.flags, Flags.PARAMETERIZED)) {
return;
}
for (BType type : constituentTypes) {
if (type == null) {
continue;
}
if (markParameterizedType(enclosingType, type)) {
break;
}
}
}
private BSymbol resolveOperator(ScopeEntry entry, List<BType> types) {
BSymbol foundSymbol = symTable.notFoundSymbol;
while (entry != NOT_FOUND_ENTRY) {
BInvokableType opType = (BInvokableType) entry.symbol.type;
if (types.size() == opType.paramTypes.size()) {
boolean match = true;
for (int i = 0; i < types.size(); i++) {
if (types.get(i).tag != opType.paramTypes.get(i).tag) {
match = false;
}
}
if (match) {
foundSymbol = entry.symbol;
break;
}
}
entry = entry.next;
}
return foundSymbol;
}
private void visitBuiltInTypeNode(BLangType typeNode, TypeKind typeKind, SymbolEnv env) {
Name typeName = names.fromTypeKind(typeKind);
BSymbol typeSymbol = lookupMemberSymbol(typeNode.pos, symTable.rootScope,
env, typeName, SymTag.TYPE);
if (typeSymbol == symTable.notFoundSymbol) {
dlog.error(typeNode.pos, diagCode, typeName);
}
typeNode.setBType(typeSymbol.type);
resultType = typeSymbol.type;
}
private void addNamespacesInScope(Map<Name, BXMLNSSymbol> namespaces, SymbolEnv env) {
if (env == null) {
return;
}
env.scope.entries.forEach((name, scopeEntry) -> {
if (scopeEntry.symbol.kind == SymbolKind.XMLNS) {
BXMLNSSymbol nsSymbol = (BXMLNSSymbol) scopeEntry.symbol;
if (!namespaces.containsKey(name)) {
namespaces.put(name, nsSymbol);
}
}
});
addNamespacesInScope(namespaces, env.enclEnv);
}
private boolean isMemberAccessAllowed(SymbolEnv env, BSymbol symbol) {
if (Symbols.isPublic(symbol)) {
return true;
}
if (!Symbols.isPrivate(symbol)) {
return env.enclPkg.symbol.pkgID == symbol.pkgID;
}
if (env.enclType != null) {
return env.enclType.getBType().tsymbol == symbol.owner;
}
return isMemberAllowed(env, symbol);
}
private boolean isMemberAllowed(SymbolEnv env, BSymbol symbol) {
return env != null && (env.enclInvokable != null
&& env.enclInvokable.symbol.receiverSymbol != null
&& env.enclInvokable.symbol.receiverSymbol.type.tsymbol == symbol.owner
|| isMemberAllowed(env.enclEnv, symbol));
}
private BType computeIntersectionType(BLangIntersectionTypeNode intersectionTypeNode) {
List<BLangType> constituentTypeNodes = intersectionTypeNode.constituentTypeNodes;
Map<BType, BLangType> typeBLangTypeMap = new HashMap<>();
boolean validIntersection = true;
boolean isErrorIntersection = false;
boolean isAlreadyExistingType = false;
BLangType bLangTypeOne = constituentTypeNodes.get(0);
BType typeOne = resolveTypeNode(bLangTypeOne, env);
if (typeOne == symTable.noType) {
return symTable.noType;
}
typeBLangTypeMap.put(typeOne, bLangTypeOne);
BLangType bLangTypeTwo = constituentTypeNodes.get(1);
BType typeTwo = resolveTypeNode(bLangTypeTwo, env);
if (typeTwo == symTable.noType) {
return symTable.noType;
}
typeBLangTypeMap.put(typeTwo, bLangTypeTwo);
boolean hasReadOnlyType = typeOne == symTable.readonlyType || typeTwo == symTable.readonlyType;
if (typeOne.tag == TypeTags.ERROR || typeTwo.tag == TypeTags.ERROR) {
isErrorIntersection = true;
}
if (!(hasReadOnlyType || isErrorIntersection)) {
dlog.error(intersectionTypeNode.pos,
DiagnosticErrorCode.UNSUPPORTED_TYPE_INTERSECTION, intersectionTypeNode);
return symTable.semanticError;
}
BType potentialIntersectionType = getPotentialIntersection(
Types.IntersectionContext.from(dlog, bLangTypeOne.pos, bLangTypeTwo.pos),
typeOne, typeTwo, this.env);
if (typeOne == potentialIntersectionType || typeTwo == potentialIntersectionType) {
isAlreadyExistingType = true;
}
LinkedHashSet<BType> constituentBTypes = new LinkedHashSet<>();
constituentBTypes.add(typeOne);
constituentBTypes.add(typeTwo);
if (potentialIntersectionType == symTable.semanticError) {
validIntersection = false;
} else {
for (int i = 2; i < constituentTypeNodes.size(); i++) {
BLangType bLangType = constituentTypeNodes.get(i);
BType type = resolveTypeNode(bLangType, env);
if (type.tag == TypeTags.ERROR) {
isErrorIntersection = true;
}
typeBLangTypeMap.put(type, bLangType);
if (!hasReadOnlyType) {
hasReadOnlyType = type == symTable.readonlyType;
}
if (type == symTable.noType) {
return symTable.noType;
}
BType tempIntersectionType = getPotentialIntersection(
Types.IntersectionContext.from(dlog, bLangTypeOne.pos, bLangTypeTwo.pos),
potentialIntersectionType, type, this.env);
if (tempIntersectionType == symTable.semanticError) {
validIntersection = false;
break;
}
if (type == tempIntersectionType) {
potentialIntersectionType = type;
isAlreadyExistingType = true;
} else if (potentialIntersectionType != tempIntersectionType) {
potentialIntersectionType = tempIntersectionType;
isAlreadyExistingType = false;
}
constituentBTypes.add(type);
}
}
if (!validIntersection) {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_INTERSECTION_TYPE, intersectionTypeNode);
return symTable.semanticError;
}
if (isErrorIntersection && !isAlreadyExistingType) {
BType detailType = ((BErrorType) potentialIntersectionType).detailType;
boolean existingErrorDetailType = false;
if (detailType.tsymbol != null) {
BSymbol detailTypeSymbol = lookupSymbolInMainSpace(env, detailType.tsymbol.name);
if (detailTypeSymbol != symTable.notFoundSymbol) {
existingErrorDetailType = true;
}
}
return defineIntersectionType((BErrorType) potentialIntersectionType, intersectionTypeNode.pos,
constituentBTypes, existingErrorDetailType, env);
}
if (types.isInherentlyImmutableType(potentialIntersectionType) ||
(Symbols.isFlagOn(potentialIntersectionType.flags, Flags.READONLY) &&
!types.isSubTypeOfBaseType(potentialIntersectionType, TypeTags.OBJECT))) {
return potentialIntersectionType;
}
if (!types.isSelectivelyImmutableType(potentialIntersectionType, false)) {
if (types.isSelectivelyImmutableType(potentialIntersectionType)) {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_READONLY_OBJECT_INTERSECTION_TYPE);
} else {
dlog.error(intersectionTypeNode.pos, DiagnosticErrorCode.INVALID_READONLY_INTERSECTION_TYPE,
potentialIntersectionType);
}
return symTable.semanticError;
}
BLangType typeNode = typeBLangTypeMap.get(potentialIntersectionType);
Set<Flag> flagSet;
if (typeNode == null) {
flagSet = new HashSet<>();
} else if (typeNode.getKind() == NodeKind.OBJECT_TYPE) {
flagSet = ((BLangObjectTypeNode) typeNode).flagSet;
} else if (typeNode.getKind() == NodeKind.USER_DEFINED_TYPE) {
flagSet = ((BLangUserDefinedType) typeNode).flagSet;
} else {
flagSet = new HashSet<>();
}
return ImmutableTypeCloner.getImmutableIntersectionType(intersectionTypeNode.pos, types,
(SelectivelyImmutableReferenceType)
potentialIntersectionType,
env, symTable, anonymousModelHelper, names, flagSet);
}
private BIntersectionType defineIntersectionType(BErrorType intersectionErrorType,
Location pos,
LinkedHashSet<BType> constituentBTypes,
boolean isAlreadyDefinedDetailType, SymbolEnv env) {
BSymbol owner = intersectionErrorType.tsymbol.owner;
PackageID pkgId = intersectionErrorType.tsymbol.pkgID;
SymbolEnv pkgEnv = symTable.pkgEnvMap.get(env.enclPkg.symbol);
if (!isAlreadyDefinedDetailType && intersectionErrorType.detailType.tag == TypeTags.RECORD) {
defineErrorDetailRecord((BRecordType) intersectionErrorType.detailType, pos, pkgEnv);
}
return defineErrorIntersectionType(intersectionErrorType, constituentBTypes, pkgId, owner);
}
private BLangTypeDefinition defineErrorDetailRecord(BRecordType detailRecord, Location pos, SymbolEnv env) {
BRecordTypeSymbol detailRecordSymbol = (BRecordTypeSymbol) detailRecord.tsymbol;
for (BField field : detailRecord.fields.values()) {
BVarSymbol fieldSymbol = field.symbol;
detailRecordSymbol.scope.define(fieldSymbol.name, fieldSymbol);
}
BLangRecordTypeNode detailRecordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(new ArrayList<>(),
detailRecord, pos);
TypeDefBuilderHelper.createInitFunctionForRecordType(detailRecordTypeNode, env, names, symTable);
BLangTypeDefinition detailRecordTypeDefinition = TypeDefBuilderHelper.addTypeDefinition(detailRecord,
detailRecordSymbol,
detailRecordTypeNode,
env);
detailRecordTypeDefinition.pos = pos;
return detailRecordTypeDefinition;
}
private BIntersectionType defineErrorIntersectionType(IntersectableReferenceType effectiveType,
LinkedHashSet<BType> constituentBTypes, PackageID pkgId,
BSymbol owner) {
BTypeSymbol intersectionTypeSymbol = Symbols.createTypeSymbol(SymTag.INTERSECTION_TYPE,
Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, pkgId, null, owner,
symTable.builtinPos, VIRTUAL);
BIntersectionType intersectionType = new BIntersectionType(intersectionTypeSymbol, constituentBTypes,
effectiveType);
intersectionTypeSymbol.type = intersectionType;
return intersectionType;
}
private BType getPotentialIntersection(Types.IntersectionContext intersectionContext,
BType lhsType, BType rhsType, SymbolEnv env) {
if (lhsType == symTable.readonlyType) {
return rhsType;
}
if (rhsType == symTable.readonlyType) {
return lhsType;
}
return types.getTypeIntersection(intersectionContext, lhsType, rhsType, env);
}
boolean validateInferTypedescParams(Location pos, List<? extends BLangVariable> parameters, BType retType) {
int inferTypedescParamCount = 0;
BVarSymbol paramWithInferredTypedescDefault = null;
Location inferDefaultLocation = null;
for (BLangVariable parameter : parameters) {
BType type = parameter.getBType();
BLangExpression expr = parameter.expr;
if (type != null && type.tag == TypeTags.TYPEDESC && expr != null &&
expr.getKind() == NodeKind.INFER_TYPEDESC_EXPR) {
paramWithInferredTypedescDefault = parameter.symbol;
inferDefaultLocation = expr.pos;
inferTypedescParamCount++;
}
}
if (inferTypedescParamCount > 1) {
dlog.error(pos, DiagnosticErrorCode.MULTIPLE_INFER_TYPEDESC_PARAMS);
return false;
}
if (paramWithInferredTypedescDefault == null) {
return true;
}
if (retType == null) {
dlog.error(inferDefaultLocation,
DiagnosticErrorCode.CANNOT_USE_INFERRED_TYPEDESC_DEFAULT_WITH_UNREFERENCED_PARAM);
return false;
}
if (unifier.refersInferableParamName(paramWithInferredTypedescDefault.name.value, retType)) {
return true;
}
dlog.error(inferDefaultLocation,
DiagnosticErrorCode.CANNOT_USE_INFERRED_TYPEDESC_DEFAULT_WITH_UNREFERENCED_PARAM);
return false;
}
private boolean isModuleLevelVar(BSymbol symbol) {
return symbol.getKind() == SymbolKind.VARIABLE && symbol.owner.getKind() == SymbolKind.PACKAGE;
}
public Set<BVarSymbol> getConfigVarSymbolsIncludingImportedModules(BPackageSymbol packageSymbol) {
Set<BVarSymbol> configVars = new HashSet<>();
populateConfigurableVars(packageSymbol, configVars);
if (!packageSymbol.imports.isEmpty()) {
for (BPackageSymbol importSymbol : packageSymbol.imports) {
populateConfigurableVars(importSymbol, configVars);
}
}
return configVars;
}
private void populateConfigurableVars(BPackageSymbol pkgSymbol, Set<BVarSymbol> configVars) {
for (Scope.ScopeEntry entry : pkgSymbol.scope.entries.values()) {
BSymbol symbol = entry.symbol;
if (symbol != null && symbol.tag == SymTag.VARIABLE && Symbols.isFlagOn(symbol.flags, Flags.CONFIGURABLE)) {
configVars.add((BVarSymbol) symbol);
}
}
}
private static class ParameterizedTypeInfo {
BType paramValueType;
int index = -1;
private ParameterizedTypeInfo(BType paramValueType) {
this.paramValueType = paramValueType;
}
private ParameterizedTypeInfo(BType paramValueType, int index) {
this.paramValueType = paramValueType;
this.index = index;
}
}
} |
Why is Vert.x mentioned here? | AdditionalBeanBuildItem createCDIEventConsumer() {
return AdditionalBeanBuildItem.builder()
.addBeanClass(KAFKA_EVENT_CONSUMER_CLASS_NAME)
.setUnremovable().build();
} | AdditionalBeanBuildItem createCDIEventConsumer() {
return AdditionalBeanBuildItem.builder()
.addBeanClass(KAFKA_EVENT_CONSUMER_CLASS_NAME)
.setUnremovable().build();
} | class KafkaSupportEnabled implements BooleanSupplier {
MicrometerConfig mConfig;
public boolean getAsBoolean() {
return KAFKA_CONSUMER_CLASS_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder.kafka);
}
} | class KafkaSupportEnabled implements BooleanSupplier {
MicrometerConfig mConfig;
public boolean getAsBoolean() {
return KAFKA_CONSUMER_CLASS_CLASS != null && mConfig.checkBinderEnabledWithDefault(mConfig.binder.kafka);
}
} | |
Hm, this means that we only support field injection. In fact, we can simply iterate over all injection points: ```java for (InjectionPointInfo injectionPoint : validationContext.get(Key.INJECTION_POINTS)) { // TODO } ``` I'll try to update this PR in a moment... | public void validate(ValidationContext validationContext) {
AnnotationStore annotationStore = validationContext.get(Key.ANNOTATION_STORE);
for (BeanInfo bean : validationContext.get(Key.BEANS)) {
if (bean.isClassBean()) {
ClassInfo ci = bean.getTarget()
.orElseThrow(() -> new IllegalStateException("Target expected")).asClass();
for (MethodInfo method : ci.methods()) {
if (annotationStore.hasAnnotation(method, NAME_INCOMING)
|| annotationStore.hasAnnotation(method, NAME_OUTGOING)) {
mediatorMethods.produce(new MediatorBuildItem(bean, method));
LOGGER.debugf("Found mediator business method %s declared on %s", method, bean);
}
}
for (FieldInfo field : ci.fields()) {
if (annotationStore.hasAnnotation(field, NAME_STREAM)) {
if (field.type().name().equals(NAME_EMITTER)) {
String name = annotationStore.getAnnotation(field, NAME_STREAM).value().asString();
LOGGER.debugf("Emitter field '%s' detected, stream name: '%s'", field.name(), name);
emitterFields.produce(new EmitterBuildItem(name));
}
}
}
}
}
} | for (FieldInfo field : ci.fields()) { | public void validate(ValidationContext validationContext) {
AnnotationStore annotationStore = validationContext.get(Key.ANNOTATION_STORE);
for (BeanInfo bean : validationContext.get(Key.BEANS)) {
if (bean.isClassBean()) {
for (MethodInfo method : bean.getTarget().get().asClass().methods()) {
if (annotationStore.hasAnnotation(method, NAME_INCOMING)
|| annotationStore.hasAnnotation(method, NAME_OUTGOING)) {
mediatorMethods.produce(new MediatorBuildItem(bean, method));
LOGGER.debugf("Found mediator business method %s declared on %s", method, bean);
}
}
}
}
for (InjectionPointInfo injectionPoint : validationContext.get(Key.INJECTION_POINTS)) {
if (injectionPoint.getRequiredType().name().equals(NAME_EMITTER)) {
AnnotationInstance stream = injectionPoint.getRequiredQualifier(NAME_STREAM);
if (stream != null) {
String name = stream.value().asString();
LOGGER.debugf("Emitter injection point '%s' detected, stream name: '%s'",
injectionPoint.getTargetInfo(), name);
emitters.produce(new EmitterBuildItem(name));
}
}
}
} | class SmallRyeReactiveMessagingProcessor {
private static final Logger LOGGER = Logger.getLogger("io.quarkus.smallrye-reactive-messaging.deployment.processor");
static final DotName NAME_INCOMING = DotName.createSimple(Incoming.class.getName());
static final DotName NAME_OUTGOING = DotName.createSimple(Outgoing.class.getName());
static final DotName NAME_STREAM = DotName.createSimple(Stream.class.getName());
static final DotName NAME_EMITTER = DotName.createSimple(Emitter.class.getName());
@BuildStep
AdditionalBeanBuildItem beans() {
return new AdditionalBeanBuildItem(SmallRyeReactiveMessagingLifecycle.class);
}
@BuildStep
BeanDeploymentValidatorBuildItem beanDeploymentValidator(BuildProducer<MediatorBuildItem> mediatorMethods,
BuildProducer<EmitterBuildItem> emitterFields,
BuildProducer<FeatureBuildItem> feature) {
feature.produce(new FeatureBuildItem(FeatureBuildItem.SMALLRYE_REACTIVE_MESSAGING));
return new BeanDeploymentValidatorBuildItem(new BeanDeploymentValidator() {
@Override
});
}
@BuildStep
public List<UnremovableBeanBuildItem> removalExclusions() {
return Arrays.asList(new UnremovableBeanBuildItem(new BeanClassAnnotationExclusion(NAME_INCOMING)),
new UnremovableBeanBuildItem(new BeanClassAnnotationExclusion(NAME_OUTGOING)));
}
@BuildStep
@Record(STATIC_INIT)
public void build(SmallRyeReactiveMessagingTemplate template, BeanContainerBuildItem beanContainer,
List<MediatorBuildItem> mediatorMethods,
List<EmitterBuildItem> emitterFields,
BuildProducer<ReflectiveClassBuildItem> reflectiveClass) {
/*
* IMPLEMENTATION NOTE/FUTURE IMPROVEMENTS: It would be possible to replace the reflection completely and use Jandex and
* generated
* io.smallrye.reactive.messaging.Invoker instead. However, we would have to mirror the logic from
* io.smallrye.reactive.messaging.MediatorConfiguration
* too.
*/
Map<String, String> beanClassToBeanId = new HashMap<>();
for (MediatorBuildItem mediatorMethod : mediatorMethods) {
String beanClass = mediatorMethod.getBean()
.getBeanClass()
.toString();
if (!beanClassToBeanId.containsKey(beanClass)) {
reflectiveClass.produce(new ReflectiveClassBuildItem(true, false, beanClass));
beanClassToBeanId.put(beanClass, mediatorMethod.getBean()
.getIdentifier());
}
}
template.registerMediators(beanClassToBeanId, beanContainer.getValue(),
emitterFields.stream().map(EmitterBuildItem::getName).collect(Collectors.toList()));
}
} | class SmallRyeReactiveMessagingProcessor {
private static final Logger LOGGER = Logger.getLogger("io.quarkus.smallrye-reactive-messaging.deployment.processor");
static final DotName NAME_INCOMING = DotName.createSimple(Incoming.class.getName());
static final DotName NAME_OUTGOING = DotName.createSimple(Outgoing.class.getName());
static final DotName NAME_STREAM = DotName.createSimple(Stream.class.getName());
static final DotName NAME_EMITTER = DotName.createSimple(Emitter.class.getName());
@BuildStep
AdditionalBeanBuildItem beans() {
return new AdditionalBeanBuildItem(SmallRyeReactiveMessagingLifecycle.class);
}
@BuildStep
BeanDeploymentValidatorBuildItem beanDeploymentValidator(BuildProducer<MediatorBuildItem> mediatorMethods,
BuildProducer<EmitterBuildItem> emitters,
BuildProducer<FeatureBuildItem> feature) {
feature.produce(new FeatureBuildItem(FeatureBuildItem.SMALLRYE_REACTIVE_MESSAGING));
return new BeanDeploymentValidatorBuildItem(new BeanDeploymentValidator() {
@Override
});
}
@BuildStep
public List<UnremovableBeanBuildItem> removalExclusions() {
return Arrays.asList(new UnremovableBeanBuildItem(new BeanClassAnnotationExclusion(NAME_INCOMING)),
new UnremovableBeanBuildItem(new BeanClassAnnotationExclusion(NAME_OUTGOING)));
}
@BuildStep
@Record(STATIC_INIT)
public void build(SmallRyeReactiveMessagingTemplate template, BeanContainerBuildItem beanContainer,
List<MediatorBuildItem> mediatorMethods,
List<EmitterBuildItem> emitterFields,
BuildProducer<ReflectiveClassBuildItem> reflectiveClass) {
/*
* IMPLEMENTATION NOTE/FUTURE IMPROVEMENTS: It would be possible to replace the reflection completely and use Jandex and
* generated
* io.smallrye.reactive.messaging.Invoker instead. However, we would have to mirror the logic from
* io.smallrye.reactive.messaging.MediatorConfiguration
* too.
*/
Map<String, String> beanClassToBeanId = new HashMap<>();
for (MediatorBuildItem mediatorMethod : mediatorMethods) {
String beanClass = mediatorMethod.getBean()
.getBeanClass()
.toString();
if (!beanClassToBeanId.containsKey(beanClass)) {
reflectiveClass.produce(new ReflectiveClassBuildItem(true, false, beanClass));
beanClassToBeanId.put(beanClass, mediatorMethod.getBean()
.getIdentifier());
}
}
template.registerMediators(beanClassToBeanId, beanContainer.getValue(),
emitterFields.stream().map(EmitterBuildItem::getName).collect(Collectors.toList()));
}
} |
I've now replaced this with a `RestAssured.get("/")` call to trigger the application restart (I am not aware of any better way to do this) and instead verify the response to `GET /flyway/current-version` inside the Awaitility assertion. | public void testRepairUsingDevMode() {
String version = RestAssured.get("/flyway/current-version").then().statusCode(200).extract().asString();
assertEquals("1.0.0", version);
config.clearLogRecords();
config.modifyResourceFile("db/migration/V1.0.0__Quarkus.sql", s -> s + "\nalter table FRUIT add column color VARCHAR;");
config.modifyResourceFile("application.properties", s -> s + "\nquarkus.flyway.validate-on-migrate=true");
RestAssured.get("/flyway/current-version").thenReturn();
await().atMost(30, TimeUnit.SECONDS).untilAsserted(() -> {
assertThat(config.getLogRecords()).anySatisfy(r -> {
assertThat(r.getMessage()).contains("Failed to start application");
assertThat(r.getThrown().getMessage()).contains("Migration checksum mismatch for migration version 1.0.0");
});
});
config.clearLogRecords();
config.modifyResourceFile("application.properties", s -> s + "\nquarkus.flyway.repair-at-start=true");
RestAssured.get("/flyway/current-version").thenReturn();
await().atMost(30, TimeUnit.SECONDS).untilAsserted(() -> {
assertThat(config.getLogRecords()).anySatisfy(
r -> assertThat(r.getMessage()).contains("Successfully repaired schema history table"));
});
} | RestAssured.get("/flyway/current-version").thenReturn(); | public void testRepairUsingDevMode() {
assertThat(RestAssured.get("/flyway/current-version").then().statusCode(200).extract().asString()).isEqualTo("1.0.0");
config.clearLogRecords();
config.modifyResourceFile("db/migration/V1.0.0__Quarkus.sql", s -> s + "\nNONSENSE STATEMENT CHANGING CHECKSUM;");
config.modifyResourceFile("application.properties", s -> s + "\nquarkus.flyway.validate-on-migrate=true");
RestAssured.get("/");
await().atMost(30, TimeUnit.SECONDS).untilAsserted(() -> {
assertThat(config.getLogRecords()).anySatisfy(r -> {
assertThat(r.getMessage()).contains("Failed to start application");
assertThat(r.getThrown().getMessage()).contains("Migration checksum mismatch for migration version 1.0.0");
});
RestAssured.get("/flyway/current-version").then().statusCode(500);
});
config.clearLogRecords();
config.modifyResourceFile("application.properties", s -> s + "\nquarkus.flyway.repair-at-start=true");
RestAssured.get("/");
await().atMost(30, TimeUnit.SECONDS).untilAsserted(() -> {
assertThat(config.getLogRecords()).anySatisfy(
r -> assertThat(r.getMessage()).contains("Successfully repaired schema history table"));
assertThat(RestAssured.get("/flyway/current-version").then().statusCode(200).extract().asString())
.isEqualTo("1.0.0");
});
} | class FlywayExtensionRepairAtStartTest {
@RegisterExtension
static final QuarkusDevModeTest config = new QuarkusDevModeTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClass(FlywayResource.class)
.addAsResource("db/migration/V1.0.0__Quarkus.sql")
.addAsResource("migrate-at-start-config.properties", "application.properties"))
.setLogRecordPredicate(r -> true)
.setAllowFailedStart(true);
@Test
@DisplayName("Repair at start works correctly")
@Path("flyway")
public static class FlywayResource {
@Inject
Flyway flyway;
@Path("current-version")
@GET
public String currentVersion() {
return flyway.info().current().getVersion().toString();
}
}
} | class FlywayExtensionRepairAtStartTest {
@RegisterExtension
static final QuarkusDevModeTest config = new QuarkusDevModeTest()
.setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class)
.addClass(FlywayResource.class)
.addAsResource("db/migration/V1.0.0__Quarkus.sql")
.addAsResource("migrate-at-start-config.properties", "application.properties"))
.setLogRecordPredicate(r -> true)
.setAllowFailedStart(true);
@Test
@DisplayName("Repair at start works correctly")
@Path("flyway")
public static class FlywayResource {
@Inject
Flyway flyway;
@Path("current-version")
@GET
public String currentVersion() {
return flyway.info().current().getVersion().toString();
}
}
} |
It says in the javadoc for ResourceId.java that a resource id represents a file-like resource, so AzfsResourceId does not support resource id's without a container. | public boolean isContainer() {
return blob == null;
} | return blob == null; | public boolean isContainer() {
return blob == null;
} | class AzfsResourceId implements ResourceId {
static final String SCHEME = "azfs";
private static final Pattern AZFS_URI =
Pattern.compile("(?<SCHEME>[^:]+):
/** Matches a glob containing a wildcard, capturing the portion before the first wildcard. */
private static final Pattern GLOB_PREFIX = Pattern.compile("(?<PREFIX>[^\\[*?]*)[\\[*?].*");
private final String account;
private final String container;
private final String blob;
private final Long size;
private final Date lastModified;
private AzfsResourceId(
String account,
String container,
@Nullable String blob,
@Nullable Long size,
@Nullable Date lastModified) {
checkArgument(!Strings.isNullOrEmpty(container), "container");
checkArgument(!container.contains("/"), "container must not contain '/': [%s]", container);
this.account = account;
this.container = container;
if (blob == null || blob.isEmpty()) {
this.blob = null;
} else {
this.blob = blob;
}
this.size = size;
this.lastModified = lastModified;
}
static AzfsResourceId fromComponents(String account, String container, String blob) {
return new AzfsResourceId(account, container, blob, null, null);
}
static AzfsResourceId fromComponents(String account, String container) {
return new AzfsResourceId(account, container, null, null, null);
}
static AzfsResourceId fromUri(String uri) {
Matcher m = AZFS_URI.matcher(uri);
checkArgument(m.matches(), "Invalid AZFS URI: [%s]", uri);
checkArgument(m.group("SCHEME").equalsIgnoreCase(SCHEME), "Invalid AZFS URI scheme: [%s]", uri);
String account = m.group("ACCOUNT");
String container = m.group("CONTAINER");
String blob = m.group("BLOB");
return fromComponents(account, container, blob);
}
public String getAccount() {
return account;
}
public String getContainer() {
return container;
}
public String getBlob() {
return blob;
}
@Override
public String getScheme() {
return SCHEME;
}
Optional<Long> getSize() {
return Optional.fromNullable(size);
}
AzfsResourceId withSize(long size) {
return new AzfsResourceId(account, container, blob, size, lastModified);
}
Optional<Date> getLastModified() {
return Optional.fromNullable(lastModified);
}
AzfsResourceId withLastModified(Date lastModified) {
return new AzfsResourceId(account, container, blob, size, lastModified);
}
@Override
public boolean isDirectory() {
return (blob == null) || (blob.endsWith("/"));
}
boolean isWildcard() {
if (blob == null) {
return false;
}
return GLOB_PREFIX.matcher(blob).matches();
}
String getBlobNonWildcardPrefix() {
Matcher m = GLOB_PREFIX.matcher(getBlob());
checkArgument(
m.matches(), String.format("Glob expression: [%s] is not expandable.", getBlob()));
return m.group("PREFIX");
}
@Override
public ResourceId getCurrentDirectory() {
if (isDirectory()) {
return this;
}
if (blob.lastIndexOf('/') == -1) {
return fromComponents(account, container);
}
return fromComponents(account, container, blob.substring(0, blob.lastIndexOf('/') + 1));
}
@Nullable
@Override
public String getFilename() {
if (blob == null) {
return null;
}
if (!isDirectory()) {
return blob.substring(blob.lastIndexOf('/') + 1);
}
String blobWithoutTrailingSlash = blob.substring(0, blob.length() - 1);
return blobWithoutTrailingSlash.substring(blobWithoutTrailingSlash.lastIndexOf('/') + 1);
}
@Override
public String toString() {
if (blob != null) {
return String.format("%s:
}
return String.format("%s:
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof AzfsResourceId)) {
return false;
}
return account.equals(((AzfsResourceId) obj).account)
&& container.equals(((AzfsResourceId) obj).container)
&& Objects.equals(blob, ((AzfsResourceId) obj).blob);
}
@Override
public int hashCode() {
return Objects.hash(account, container, blob);
}
@Override
public ResourceId resolve(String other, ResolveOptions resolveOptions) {
checkState(isDirectory(), "Expected this resource to be a directory, but was [%s]", toString());
if (resolveOptions == ResolveOptions.StandardResolveOptions.RESOLVE_DIRECTORY) {
if ("..".equals(other)) {
if ("/".equals(blob)) {
return this;
}
int parentStopsAt = blob.substring(0, blob.length() - 1).lastIndexOf('/');
return fromComponents(account, container, blob.substring(0, parentStopsAt + 1));
}
if ("".equals(other)) {
return this;
}
if (!other.endsWith("/")) {
other += "/";
}
if (AZFS_URI.matcher(other).matches()) {
return fromUri(other);
}
if (blob == null) {
return fromComponents(account, container, other);
}
return fromComponents(account, container, blob + other);
}
if (resolveOptions == ResolveOptions.StandardResolveOptions.RESOLVE_FILE) {
checkArgument(
!other.endsWith("/"), "Cannot resolve a file with a directory path: [%s]", other);
checkArgument(!"..".equals(other), "Cannot resolve parent as file: [%s]", other);
if (AZFS_URI.matcher(other).matches()) {
return fromUri(other);
}
if (blob == null) {
return fromComponents(account, container, other);
}
return fromComponents(account, container, blob + other);
}
throw new UnsupportedOperationException(
String.format("Unexpected StandardResolveOptions [%s]", resolveOptions));
}
} | class AzfsResourceId implements ResourceId {
static final String SCHEME = "azfs";
private static final Pattern AZFS_URI =
Pattern.compile("(?<SCHEME>[^:]+):
/** Matches a glob containing a wildcard, capturing the portion before the first wildcard. */
private static final Pattern GLOB_PREFIX = Pattern.compile("(?<PREFIX>[^\\[*?]*)[\\[*?].*");
private final String account;
private final String container;
private final String blob;
private final Long size;
private final Date lastModified;
private AzfsResourceId(
String account,
String container,
@Nullable String blob,
@Nullable Long size,
@Nullable Date lastModified) {
checkArgument(!Strings.isNullOrEmpty(container), "container");
checkArgument(!container.contains("/"), "container must not contain '/': [%s]", container);
this.account = account;
this.container = container;
if (blob == null || blob.isEmpty()) {
this.blob = null;
} else {
this.blob = blob;
}
this.size = size;
this.lastModified = lastModified;
}
static AzfsResourceId fromComponents(String account, String container, String blob) {
return new AzfsResourceId(account, container, blob, null, null);
}
static AzfsResourceId fromComponents(String account, String container) {
return new AzfsResourceId(account, container, null, null, null);
}
static AzfsResourceId fromUri(String uri) {
Matcher m = AZFS_URI.matcher(uri);
checkArgument(m.matches(), "Invalid AZFS URI: [%s]", uri);
checkArgument(m.group("SCHEME").equalsIgnoreCase(SCHEME), "Invalid AZFS URI scheme: [%s]", uri);
String account = m.group("ACCOUNT");
String container = m.group("CONTAINER");
String blob = m.group("BLOB");
return fromComponents(account, container, blob);
}
public String getAccount() {
return account;
}
public String getContainer() {
return container;
}
public String getBlob() {
return blob;
}
@Override
public String getScheme() {
return SCHEME;
}
Long getSize() {
return size;
}
AzfsResourceId withSize(long size) {
return new AzfsResourceId(account, container, blob, size, lastModified);
}
Optional<Date> getLastModified() {
return Optional.fromNullable(lastModified);
}
AzfsResourceId withLastModified(Date lastModified) {
return new AzfsResourceId(account, container, blob, size, lastModified);
}
@Override
public boolean isDirectory() {
return (blob == null) || (blob.endsWith("/"));
}
boolean isWildcard() {
if (blob == null) {
return false;
}
return GLOB_PREFIX.matcher(blob).matches();
}
String getBlobNonWildcardPrefix() {
Matcher m = GLOB_PREFIX.matcher(getBlob());
checkArgument(
m.matches(), String.format("Glob expression: [%s] is not expandable.", getBlob()));
return m.group("PREFIX");
}
@Override
public ResourceId getCurrentDirectory() {
if (isDirectory()) {
return this;
}
if (blob.lastIndexOf('/') == -1) {
return fromComponents(account, container);
}
return fromComponents(account, container, blob.substring(0, blob.lastIndexOf('/') + 1));
}
@Nullable
@Override
public String getFilename() {
if (blob == null) {
return null;
}
if (!isDirectory()) {
return blob.substring(blob.lastIndexOf('/') + 1);
}
String blobWithoutTrailingSlash = blob.substring(0, blob.length() - 1);
return blobWithoutTrailingSlash.substring(blobWithoutTrailingSlash.lastIndexOf('/') + 1);
}
@Override
public String toString() {
if (blob != null) {
return String.format("%s:
}
return String.format("%s:
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof AzfsResourceId)) {
return false;
}
return account.equals(((AzfsResourceId) obj).account)
&& container.equals(((AzfsResourceId) obj).container)
&& Objects.equals(blob, ((AzfsResourceId) obj).blob);
}
@Override
public int hashCode() {
return Objects.hash(account, container, blob);
}
@Override
public ResourceId resolve(String other, ResolveOptions resolveOptions) {
checkState(isDirectory(), "Expected this resource to be a directory, but was [%s]", toString());
if (resolveOptions == ResolveOptions.StandardResolveOptions.RESOLVE_DIRECTORY) {
if ("..".equals(other)) {
if ("/".equals(blob)) {
return this;
}
int parentStopsAt = blob.substring(0, blob.length() - 1).lastIndexOf('/');
return fromComponents(account, container, blob.substring(0, parentStopsAt + 1));
}
if ("".equals(other)) {
return this;
}
if (!other.endsWith("/")) {
other += "/";
}
if (AZFS_URI.matcher(other).matches()) {
return fromUri(other);
}
if (blob == null) {
return fromComponents(account, container, other);
}
return fromComponents(account, container, blob + other);
}
if (resolveOptions == ResolveOptions.StandardResolveOptions.RESOLVE_FILE) {
checkArgument(
!other.endsWith("/"), "Cannot resolve a file with a directory path: [%s]", other);
checkArgument(!"..".equals(other), "Cannot resolve parent as file: [%s]", other);
if (AZFS_URI.matcher(other).matches()) {
return fromUri(other);
}
if (blob == null) {
return fromComponents(account, container, other);
}
return fromComponents(account, container, blob + other);
}
throw new UnsupportedOperationException(
String.format("Unexpected StandardResolveOptions [%s]", resolveOptions));
}
} |
Is it possible to find a more specific exception type? RuntimeException is generally only viewed as abstract base class. Maybe IllegalStateException, or InvalidArgumentException (if we take that defaultSubscription only works for 1 subscription). | public Azure withDefaultSubscription() {
if (profile.subscriptionId() == null) {
List<Subscription> subscriptions = new ArrayList<>();
this.subscriptions().list().forEach(subscription -> {
subscriptions.add(subscription);
});
if (subscriptions.size() == 0) {
throw logger.logExceptionAsError(
new RuntimeException("Please create a subscription before you start resource management. "
+ "To learn more, see: https:
} else if (subscriptions.size() > 1) {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("More than one subscription found in your tenant. "
+ "Please specify which one below is desired for resource management.");
subscriptions.forEach(subscription -> {
stringBuilder.append("\n" + subscription.displayName() + " : " + subscription.subscriptionId());
});
throw logger.logExceptionAsError(new RuntimeException(stringBuilder.toString()));
} else {
profile.withSubscriptionId(subscriptions.get(0).subscriptionId());
}
}
return new Azure(httpPipeline, profile, this);
} | new RuntimeException("Please create a subscription before you start resource management. " | public Azure withDefaultSubscription() {
if (profile.subscriptionId() == null) {
profile.withSubscriptionId(Utils.defaultSubscription(this.subscriptions().list()));
}
return new Azure(httpPipeline, profile, this);
} | class AuthenticatedImpl implements Authenticated {
private final ClientLogger logger = new ClientLogger(AuthenticatedImpl.class);
private final HttpPipeline httpPipeline;
private final AzureProfile profile;
private final ResourceManager.Authenticated resourceManagerAuthenticated;
private final GraphRbacManager graphRbacManager;
private SdkContext sdkContext;
private AuthenticatedImpl(HttpPipeline httpPipeline, AzureProfile profile) {
this.resourceManagerAuthenticated = ResourceManager.authenticate(httpPipeline, profile);
this.graphRbacManager = GraphRbacManager.authenticate(httpPipeline, profile);
this.httpPipeline = httpPipeline;
this.profile = profile;
this.sdkContext = new SdkContext();
}
@Override
public String tenantId() {
return profile.tenantId();
}
@Override
public Subscriptions subscriptions() {
return resourceManagerAuthenticated.subscriptions();
}
@Override
public Tenants tenants() {
return resourceManagerAuthenticated.tenants();
}
@Override
public ActiveDirectoryUsers activeDirectoryUsers() {
return graphRbacManager.users();
}
@Override
public ActiveDirectoryGroups activeDirectoryGroups() {
return graphRbacManager.groups();
}
@Override
public ServicePrincipals servicePrincipals() {
return graphRbacManager.servicePrincipals();
}
@Override
public ActiveDirectoryApplications activeDirectoryApplications() {
return graphRbacManager.applications();
}
@Override
public RoleDefinitions roleDefinitions() {
return graphRbacManager.roleDefinitions();
}
@Override
public RoleAssignments roleAssignments() {
return graphRbacManager.roleAssignments();
}
@Override
public Authenticated withSdkContext(SdkContext sdkContext) {
this.sdkContext = sdkContext;
return this;
}
@Override
public SdkContext sdkContext() {
return this.sdkContext;
}
@Override
public Authenticated withTenantId(String tenantId) {
profile.withTenantId(tenantId);
return this;
}
@Override
public Azure withSubscription(String subscriptionId) {
profile.withSubscriptionId(subscriptionId);
return new Azure(httpPipeline, profile, this);
}
@Override
} | class AuthenticatedImpl implements Authenticated {
private final HttpPipeline httpPipeline;
private final AzureProfile profile;
private final ResourceManager.Authenticated resourceManagerAuthenticated;
private final GraphRbacManager graphRbacManager;
private SdkContext sdkContext;
private AuthenticatedImpl(HttpPipeline httpPipeline, AzureProfile profile) {
this.resourceManagerAuthenticated = ResourceManager.authenticate(httpPipeline, profile);
this.graphRbacManager = GraphRbacManager.authenticate(httpPipeline, profile);
this.httpPipeline = httpPipeline;
this.profile = profile;
this.sdkContext = new SdkContext();
}
@Override
public String tenantId() {
return profile.tenantId();
}
@Override
public Subscriptions subscriptions() {
return resourceManagerAuthenticated.subscriptions();
}
@Override
public Tenants tenants() {
return resourceManagerAuthenticated.tenants();
}
@Override
public ActiveDirectoryUsers activeDirectoryUsers() {
return graphRbacManager.users();
}
@Override
public ActiveDirectoryGroups activeDirectoryGroups() {
return graphRbacManager.groups();
}
@Override
public ServicePrincipals servicePrincipals() {
return graphRbacManager.servicePrincipals();
}
@Override
public ActiveDirectoryApplications activeDirectoryApplications() {
return graphRbacManager.applications();
}
@Override
public RoleDefinitions roleDefinitions() {
return graphRbacManager.roleDefinitions();
}
@Override
public RoleAssignments roleAssignments() {
return graphRbacManager.roleAssignments();
}
@Override
public Authenticated withSdkContext(SdkContext sdkContext) {
this.sdkContext = sdkContext;
return this;
}
@Override
public SdkContext sdkContext() {
return this.sdkContext;
}
@Override
public Authenticated withTenantId(String tenantId) {
profile.withTenantId(tenantId);
return this;
}
@Override
public Azure withSubscription(String subscriptionId) {
profile.withSubscriptionId(subscriptionId);
return new Azure(httpPipeline, profile, this);
}
@Override
} |
Consider adding factory function for `NodeEvent.forBucketSpace` to avoid overloading confusion | private static NodeEvent createNodeEvent(NodeInfo nodeInfo, String description, PerStateParams params) {
if (params.bucketSpace.isPresent()) {
return new NodeEvent(nodeInfo, params.bucketSpace.get(), description, NodeEvent.Type.CURRENT, params.currentTime);
} else {
return new NodeEvent(nodeInfo, description, NodeEvent.Type.CURRENT, params.currentTime);
}
} | return new NodeEvent(nodeInfo, params.bucketSpace.get(), description, NodeEvent.Type.CURRENT, params.currentTime); | private static NodeEvent createNodeEvent(NodeInfo nodeInfo, String description, PerStateParams params) {
if (params.bucketSpace.isPresent()) {
return NodeEvent.forBucketSpace(nodeInfo, params.bucketSpace.get(), description, NodeEvent.Type.CURRENT, params.currentTime);
} else {
return NodeEvent.forBaseline(nodeInfo, description, NodeEvent.Type.CURRENT, params.currentTime);
}
} | class PerStateParams {
final ContentCluster cluster;
final Optional<String> bucketSpace;
final AnnotatedClusterState fromState;
final AnnotatedClusterState toState;
final long currentTime;
PerStateParams(ContentCluster cluster,
Optional<String> bucketSpace,
AnnotatedClusterState fromState,
AnnotatedClusterState toState,
long currentTime) {
this.cluster = cluster;
this.bucketSpace = bucketSpace;
this.fromState = fromState;
this.toState = toState;
this.currentTime = currentTime;
}
} | class PerStateParams {
final ContentCluster cluster;
final Optional<String> bucketSpace;
final AnnotatedClusterState fromState;
final AnnotatedClusterState toState;
final long currentTime;
PerStateParams(ContentCluster cluster,
Optional<String> bucketSpace,
AnnotatedClusterState fromState,
AnnotatedClusterState toState,
long currentTime) {
this.cluster = cluster;
this.bucketSpace = bucketSpace;
this.fromState = fromState;
this.toState = toState;
this.currentTime = currentTime;
}
} |
Was this required for all or just one of the tests? | static void beforeAll() throws InterruptedException {
TimeUnit.SECONDS.sleep(180);
StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));
} | TimeUnit.SECONDS.sleep(180); | static void beforeAll() throws InterruptedException {
TimeUnit.SECONDS.sleep(180);
StepVerifier.setDefaultTimeout(Duration.ofSeconds(30));
} | class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsAsyncClient client;
@BeforeAll
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
private HttpClient buildAsyncAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertAsync()
.build();
}
private TextAnalyticsAsyncClient getTextAnalyticsAsyncClient(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion, boolean isStaticResource) {
return getTextAnalyticsClientBuilder(
buildAsyncAssertingClient(interceptorManager.isPlaybackMode() ? interceptorManager.getPlaybackClient() : httpClient),
serviceVersion,
isStaticResource)
.buildAsyncClient();
}
/**
* Verify that we can get statistics on the collection result when given a batch of documents with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageShowStatisticsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(true, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each {@code DetectLanguageResult} input of a batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(false, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with given country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguagesCountryHintRunner((inputs, countryHint) ->
StepVerifier.create(client.detectLanguageBatch(inputs, countryHint, null))
.assertNext(actualResults ->
validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), actualResults))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHintWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguagesBatchListCountryHintWithOptionsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, options))
.assertNext(response -> validateDetectLanguageResultCollection(true, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageStringInputRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, null))
.assertNext(response -> validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Verifies that a single DetectedLanguage is returned for a document to detectLanguage.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectSingleTextLanguage(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectSingleTextLanguageRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageEnglish(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for a document with invalid country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageInvalidCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageInvalidCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_COUNTRY_HINT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that a bad request exception is returned for input documents with same ids.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageDuplicateIdRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageInputEmptyIdRunner(inputs ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that with countryHint with empty string will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageEmptyCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
/**
* Verify that with countryHint with "none" will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageNoneCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageNoneCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeCategorizedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.assertNext(response -> validateCategorizedEntities(response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchCategorizedEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizeEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizeEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizeEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchCategorizedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(false, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(true, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeCategorizedEntityStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeCategorizedEntitiesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, language, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeStringBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, options))
.assertNext(response -> validateCategorizedEntitiesResultCollection(true, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
@Disabled("https:
public void recognizeEntitiesBatchWithResponseEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(22, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(30, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(14, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(126, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesResolutions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeEntitiesBatchResolutionRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, options))
.assertNext(recognizeEntitiesResults -> validateEntityResolutions(recognizeEntitiesResults))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizePiiSingleDocumentRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(response -> validatePiiEntities(getPiiEntitiesList1(), response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(document -> StepVerifier.create(client.recognizePiiEntities(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchPiiEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizePiiEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizePiiEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizePiiEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(true, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language, null))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeStringBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, options))
.assertNext(response -> validatePiiEntitiesResultCollection(true, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(17, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(25, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(9, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(121, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizePiiDomainFilterRunner((document, options) ->
StepVerifier.create(client.recognizePiiEntities(document, "en", options))
.assertNext(response -> validatePiiEntities(getPiiEntitiesList1ForDomainFilter(),
response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputStringForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomain.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntitiesForDomainFilter(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomain.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntitiesForDomainFilter(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputForCategoriesFilter(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeStringBatchPiiEntitiesForCategoriesFilterRunner(
(inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, "en", options))
.assertNext(
resultCollection -> validatePiiEntitiesResultCollection(false,
getExpectedBatchPiiEntitiesForCategoriesFilter(), resultCollection))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntityWithCategoriesFilterFromOtherResult(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeStringBatchPiiEntitiesForCategoriesFilterRunner(
(inputs, options) -> {
List<PiiEntityCategory> categories = new ArrayList<>();
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, "en", options))
.assertNext(
resultCollection -> {
resultCollection.forEach(result -> result.getEntities().forEach(piiEntity -> {
final PiiEntityCategory category = piiEntity.getCategory();
if (PiiEntityCategory.ABA_ROUTING_NUMBER == category
|| PiiEntityCategory.US_SOCIAL_SECURITY_NUMBER == category) {
categories.add(category);
}
}));
validatePiiEntitiesResultCollection(false,
getExpectedBatchPiiEntitiesForCategoriesFilter(), resultCollection);
})
.verifyComplete();
final PiiEntityCategory[] piiEntityCategories = categories.toArray(new PiiEntityCategory[categories.size()]);
options.setCategoriesFilter(piiEntityCategories);
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, "en", options))
.assertNext(
resultCollection -> validatePiiEntitiesResultCollection(false,
getExpectedBatchPiiEntitiesForCategoriesFilter(), resultCollection))
.verifyComplete();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeLinkedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.assertNext(response -> validateLinkedEntity(getLinkedEntitiesList1().get(0), response.iterator().next()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchLinkedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(false,
getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(true, getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeLinkedStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeLinkedLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, language, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchStringLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, options))
.assertNext(response -> validateLinkedEntitiesResultCollection(true, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(22, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(30, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(14, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(126, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesForSingleTextInputRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(keyPhrasesCollection -> validateKeyPhrases(asList("monde"),
keyPhrasesCollection.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractBatchKeyPhrasesRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(false, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(true, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesStringInputRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, language, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractBatchStringKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(true, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesWarningRunner(
input -> StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(keyPhrasesResult -> {
keyPhrasesResult.getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
});
})
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesBatchWarningRunner(
inputs -> StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> response.getValue().forEach(keyPhrasesResult ->
keyPhrasesResult.getKeyPhrases().getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
})
))
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
/**
* Test analyzing sentiment for a string input.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input))
.assertNext(response -> validateDocumentSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input with default language hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithDefaultLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input, null))
.assertNext(response -> validateDocumentSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input and verifying the result of opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentForTextInputWithOpinionMiningRunner((input, options) ->
StepVerifier.create(client.analyzeSentiment(input, "en", options))
.assertNext(response -> validateDocumentSentiment(true, getExpectedDocumentSentiment(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
/**
* Test analyzing sentiment for a duplicate ID list.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, new TextAnalyticsRequestOptions()))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that the collection result excludes request statistics and sentence options when given a batch of
* String documents with null TextAnalyticsRequestOptions and null language code which will use the default language
* code, 'en'.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and null language code which will use the default language code, 'en'.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentStringInputRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateAnalyzeSentimentResultCollection(false, false,
getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and sentence options when given a batch of
* String documents with null TextAnalyticsRequestOptions and given a language code.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and given a language code.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringWithLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, language, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateAnalyzeSentimentResultCollection(false, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not sentence options when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which to show the request statistics only and verify the analyzed sentiment result.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateAnalyzeSentimentResultCollection(true, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes sentence options but not request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateAnalyzeSentimentResultCollection(false, true, getExpectedBatchTextSentiment(), response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes sentence options and request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateAnalyzeSentimentResultCollection(true, true, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and sentence options when given a batch of
* TextDocumentInput documents with null TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullRequestOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (TextAnalyticsRequestOptions) null))
.assertNext(response -> validateAnalyzeSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that we can get statistics on the collection result when given a batch of
* TextDocumentInput documents with TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions includes request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentShowStatsRunner((inputs, requestOptions) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, requestOptions))
.assertNext(response -> validateAnalyzeSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and sentence options when given a batch of
* TextDocumentInput documents with null AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullAnalyzeSentimentOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (AnalyzeSentimentOptions) null))
.assertNext(response -> validateAnalyzeSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not sentence options when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes request statistics but not opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateAnalyzeSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes sentence options but not request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining but not request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentOpinionMining((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response ->
validateAnalyzeSentimentResultCollectionWithResponse(false, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes sentence options and request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response -> validateAnalyzeSentimentResultCollectionWithResponse(true, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verifies that an InvalidDocumentBatch exception is returned for input documents with too many documents.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(25, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(opinionSentiment -> {
assertEquals(7, opinionSentiment.getLength());
assertEquals(17, opinionSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(7, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiWithSkinToneModifierRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(27, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(19, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(9, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(
result -> result.getSentences().forEach(
sentenceSentiment -> {
assertEquals(34, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(26, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(16, targetSentiment.getOffset());
});
})
)
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamilyWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyWithSkinToneModifierRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(
result -> result.getSentences().forEach(
sentenceSentiment -> {
assertEquals(42, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(34, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(24, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfcRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(26, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(18, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(8, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfdRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(
sentenceSentiment -> {
assertEquals(27, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(19, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(9, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfcRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(25, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(17, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(7, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfdRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(25, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(17, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(7, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
zalgoTextRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(138, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(130, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(120, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareStringInputWithoutOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
healthcareStringInputRunner((documents, dummyOptions) -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
validateAnalyzeHealthcareEntitiesResultCollectionList(
false,
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForSinglePage(),
analyzeHealthcareEntitiesPagedFlux.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareStringInputWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
healthcareStringInputRunner((documents, options) -> {
boolean isValidApiVersionForDisplayName = serviceVersion != TextAnalyticsServiceVersion.V3_0
&& serviceVersion != TextAnalyticsServiceVersion.V3_1;
if (isValidApiVersionForDisplayName) {
options.setDisplayName("operationName");
}
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
PollResponse<AnalyzeHealthcareEntitiesOperationDetail> pollResponse = syncPoller.waitForCompletion();
if (isValidApiVersionForDisplayName) {
assertEquals(options.getDisplayName(), pollResponse.getValue().getDisplayName());
}
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
validateAnalyzeHealthcareEntitiesResultCollectionList(
options.isIncludeStatistics(),
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForSinglePageWithFhir(),
analyzeHealthcareEntitiesPagedFlux.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareMaxOverload(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
healthcareLroRunner((documents, options) -> {
boolean isValidApiVersionForDisplayName = serviceVersion != TextAnalyticsServiceVersion.V3_0
&& serviceVersion != TextAnalyticsServiceVersion.V3_1;
if (isValidApiVersionForDisplayName) {
options.setDisplayName("operationName");
}
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents, options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
PollResponse<AnalyzeHealthcareEntitiesOperationDetail> pollResponse = syncPoller.waitForCompletion();
if (isValidApiVersionForDisplayName) {
assertEquals(options.getDisplayName(), pollResponse.getValue().getDisplayName());
}
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
validateAnalyzeHealthcareEntitiesResultCollectionList(
options.isIncludeStatistics(),
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForSinglePage(),
analyzeHealthcareEntitiesPagedFlux.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
healthcareLroPaginationRunner((documents, options) -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents, options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
validateAnalyzeHealthcareEntitiesResultCollectionList(
options.isIncludeStatistics(),
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForMultiplePages(0, 10, 0),
analyzeHealthcareEntitiesPagedFlux.toStream().collect(Collectors.toList()));
}, 10);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyListRunner((documents, errorMessage) -> {
StepVerifier.create(client.beginAnalyzeHealthcareEntities(documents, null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
@Disabled("https:
public void analyzeHealthcareEntitiesEmojiUnicodeCodePoint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(20, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesEmojiWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiWithSkinToneModifierRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(22, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(29, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesEmojiFamilyWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyWithSkinToneModifierRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(37, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesDiacriticsNfc(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfcRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(21, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesDiacriticsNfd(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfdRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(22, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfcRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(20, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfdRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(20, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
zalgoTextRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(133, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesForAssertion(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeHealthcareEntitiesForAssertionRunner((documents, options) -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
final HealthcareEntityAssertion assertion =
analyzeHealthcareEntitiesPagedFlux.toStream().collect(Collectors.toList())
.get(0).stream().collect(Collectors.toList())
.get(0).getEntities().stream().collect(Collectors.toList())
.get(1)
.getAssertion();
assertEquals(EntityConditionality.HYPOTHETICAL, assertion.getConditionality());
assertNull(assertion.getAssociation());
assertNull(assertion.getCertainty());
});
}
@Disabled("Temporary disable it for green test")
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void cancelHealthcareLro(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
cancelHealthcareLroRunner((documents, options) -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents, options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.cancelOperation();
LongRunningOperationStatus operationStatus = syncPoller.poll().getStatus();
while (!LongRunningOperationStatus.USER_CANCELLED.equals(operationStatus)) {
operationStatus = syncPoller.poll().getStatus();
}
syncPoller.waitForCompletion();
Assertions.assertEquals(LongRunningOperationStatus.USER_CANCELLED, operationStatus);
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeActionsStringInputRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(false, null,
TIME_NOW, getRecognizeEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult(false, null,
TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForActions(), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(false, null,
TIME_NOW,
getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(null),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(false, null,
TIME_NOW, getExtractKeyPhrasesResultCollection(), null))),
IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult(false, null,
TIME_NOW, getAnalyzeSentimentResultCollectionForActions(), null))),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchActionsRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks,
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(false, null,
TIME_NOW, getRecognizeEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult(false, null,
TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForActions(), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(false, null,
TIME_NOW,
getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(null),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(false, null,
TIME_NOW, getExtractKeyPhrasesResultCollection(), null))),
IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult(false, null,
TIME_NOW, getAnalyzeSentimentResultCollectionForActions(), null))),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsWithMultiSameKindActions(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeActionsWithMultiSameKindActionsRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(actionsResult -> {
assertEquals(2, actionsResult.getRecognizeEntitiesResults().stream().count());
assertEquals(2, actionsResult.getRecognizePiiEntitiesResults().stream().count());
assertEquals(2, actionsResult.getRecognizeLinkedEntitiesResults().stream().count());
assertEquals(2, actionsResult.getAnalyzeSentimentResults().stream().count());
assertEquals(2, actionsResult.getExtractKeyPhrasesResults().stream().count());
});
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsWithActionNames(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeActionsWithActionNamesRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(actionsResult -> {
assertEquals(CUSTOM_ACTION_NAME, actionsResult.getRecognizeEntitiesResults().stream()
.collect(Collectors.toList()).get(0).getActionName());
assertEquals(CUSTOM_ACTION_NAME, actionsResult.getRecognizePiiEntitiesResults().stream()
.collect(Collectors.toList()).get(0).getActionName());
assertEquals(CUSTOM_ACTION_NAME, actionsResult.getRecognizeLinkedEntitiesResults().stream()
.collect(Collectors.toList()).get(0).getActionName());
assertEquals(CUSTOM_ACTION_NAME, actionsResult.getAnalyzeSentimentResults().stream()
.collect(Collectors.toList()).get(0).getActionName());
assertEquals(CUSTOM_ACTION_NAME, actionsResult.getExtractKeyPhrasesResults().stream()
.collect(Collectors.toList()).get(0).getActionName());
});
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsAutoDetectedLanguage(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeActionsAutoDetectedLanguageRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(actionsResult -> {
List<RecognizeEntitiesResult> recognizeEntitiesResults = actionsResult.getRecognizeEntitiesResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
recognizeEntitiesResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
recognizeEntitiesResults.get(1).getDetectedLanguage());
List<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
actionsResult.getRecognizePiiEntitiesResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
recognizePiiEntitiesResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
recognizePiiEntitiesResults.get(1).getDetectedLanguage());
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
actionsResult.getRecognizeLinkedEntitiesResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
recognizeLinkedEntitiesResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
recognizeLinkedEntitiesResults.get(1).getDetectedLanguage());
List<AnalyzeSentimentResult> analyzeSentimentResults = actionsResult.getAnalyzeSentimentResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
analyzeSentimentResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
analyzeSentimentResults.get(1).getDetectedLanguage());
List<ExtractKeyPhraseResult> keyPhraseResults = actionsResult.getExtractKeyPhrasesResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
keyPhraseResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
keyPhraseResults.get(1).getDetectedLanguage());
});
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsAutoDetectedLanguageCustomTexts(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
analyzeActionsAutoDetectedLanguageCustomTextRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(actionsResult -> {
List<RecognizeEntitiesResult> customEntitiesResults = actionsResult.getRecognizeCustomEntitiesResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
customEntitiesResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
customEntitiesResults.get(1).getDetectedLanguage());
List<ClassifyDocumentResult> singleLabelResults = actionsResult.getSingleLabelClassifyResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
singleLabelResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
singleLabelResults.get(1).getDetectedLanguage());
List<ClassifyDocumentResult> multiLabelResults = actionsResult.getMultiLabelClassifyResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
multiLabelResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
multiLabelResults.get(1).getDetectedLanguage());
});
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchActionsPaginationRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux>
syncPoller = client.beginAnalyzeActions(
documents, tasks, new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
getExpectedAnalyzeActionsResultListForMultiplePages(0, 20, 2),
result.toStream().collect(Collectors.toList()));
}, 22);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyListRunner((documents, errorMessage) ->
StepVerifier.create(client.beginAnalyzeActions(documents,
new TextAnalyticsActions()
.setRecognizeEntitiesActions(new RecognizeEntitiesAction()), null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeEntitiesRecognitionAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeEntitiesRecognitionRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks,
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(false, null,
TIME_NOW, getRecognizeEntitiesResultCollection(), null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeEntitiesRecognitionActionResolution(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeEntitiesRecognitionResolutionRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
result.toStream().forEach(actionsResult -> {
actionsResult.getRecognizeEntitiesResults().forEach(nerActionResult -> {
validateEntityResolutions(nerActionResult.getDocumentsResults());
});
});
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzePiiEntityRecognitionWithCategoriesFilters(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzePiiEntityRecognitionWithCategoriesFiltersRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks,
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(false, null,
TIME_NOW, getExpectedBatchPiiEntitiesForCategoriesFilter(), null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzePiiEntityRecognitionWithDomainFilters(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzePiiEntityRecognitionWithDomainFiltersRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks,
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(false, null,
TIME_NOW, getExpectedBatchPiiEntitiesForDomainFilter(), null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeLinkedEntityActions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeLinkedEntityRecognitionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en",
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(
false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult(false, null,
TIME_NOW, getRecognizeLinkedEntitiesResultCollection(), null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeKeyPhrasesExtractionAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en",
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(false, null,
TIME_NOW, getExtractKeyPhrasesResultCollection(), null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en",
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult(false, null,
TIME_NOW, getExpectedBatchTextSentiment(), null))),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeHealthcareEntitiesRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExpectedAnalyzeHealthcareEntitiesActionResult(false, null, TIME_NOW,
getExpectedAnalyzeHealthcareEntitiesResultCollection(2,
asList(
getRecognizeHealthcareEntitiesResultWithFhir1("0"),
getRecognizeHealthcareEntitiesResultWithFhir2())),
null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeCustomEntitiesAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
recognizeCustomEntitiesActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(
actionsResult -> actionsResult.getRecognizeCustomEntitiesResults().forEach(
customEntitiesActionResult -> customEntitiesActionResult.getDocumentsResults().forEach(
documentResult -> validateCategorizedEntities(
documentResult.getEntities().stream().collect(Collectors.toList())))));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void singleLabelClassificationAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomSingleCategoryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(
actionsResult -> actionsResult.getSingleLabelClassifyResults().forEach(
customSingleCategoryActionResult -> customSingleCategoryActionResult.getDocumentsResults().forEach(
documentResult -> validateLabelClassificationResult(documentResult))));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void multiCategoryClassifyAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomMultiCategoryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(
actionsResult -> actionsResult.getMultiLabelClassifyResults().forEach(
customMultiCategoryActionResult -> customMultiCategoryActionResult.getDocumentsResults().forEach(
documentResult -> validateLabelClassificationResult(documentResult))));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeCustomEntitiesStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
recognizeCustomEntitiesRunner((documents, parameters) -> {
SyncPoller<RecognizeCustomEntitiesOperationDetail, RecognizeCustomEntitiesPagedFlux> syncPoller =
client.beginRecognizeCustomEntities(documents, parameters.get(0), parameters.get(1)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
RecognizeCustomEntitiesPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult ->
validateCategorizedEntities(documentResult.getEntities().stream().collect(Collectors.toList()))));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeCustomEntities(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
recognizeCustomEntitiesRunner((documents, parameters) -> {
RecognizeCustomEntitiesOptions options = new RecognizeCustomEntitiesOptions()
.setDisplayName("operationName");
SyncPoller<RecognizeCustomEntitiesOperationDetail, RecognizeCustomEntitiesPagedFlux> syncPoller =
client.beginRecognizeCustomEntities(documents, parameters.get(0), parameters.get(1), "en", options)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
PollResponse<RecognizeCustomEntitiesOperationDetail> pollResponse = syncPoller.waitForCompletion();
assertEquals(options.getDisplayName(), pollResponse.getValue().getDisplayName());
RecognizeCustomEntitiesPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult ->
validateCategorizedEntities(documentResult.getEntities().stream().collect(Collectors.toList()))));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void singleLabelClassificationStringInput(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomSingleLabelRunner((documents, parameters) -> {
SyncPoller<ClassifyDocumentOperationDetail, ClassifyDocumentPagedFlux> syncPoller =
client.beginSingleLabelClassify(documents, parameters.get(0), parameters.get(1))
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ClassifyDocumentPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult -> validateLabelClassificationResult(documentResult)));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void singleLabelClassification(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomSingleLabelRunner((documents, parameters) -> {
SingleLabelClassifyOptions options = new SingleLabelClassifyOptions().setDisplayName("operationName");
SyncPoller<ClassifyDocumentOperationDetail, ClassifyDocumentPagedFlux> syncPoller =
client.beginSingleLabelClassify(documents, parameters.get(0), parameters.get(1), "en", options)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
PollResponse<ClassifyDocumentOperationDetail> pollResponse = syncPoller.waitForCompletion();
assertEquals(options.getDisplayName(), pollResponse.getValue().getDisplayName());
ClassifyDocumentPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult -> validateLabelClassificationResult(documentResult)));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void multiLabelClassificationStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomMultiLabelRunner((documents, parameters) -> {
SyncPoller<ClassifyDocumentOperationDetail, ClassifyDocumentPagedFlux> syncPoller =
client.beginMultiLabelClassify(documents, parameters.get(0), parameters.get(1))
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ClassifyDocumentPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult -> validateLabelClassificationResult(documentResult)));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void multiLabelClassification(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomMultiLabelRunner((documents, parameters) -> {
MultiLabelClassifyOptions options = new MultiLabelClassifyOptions().setDisplayName("operationName");
SyncPoller<ClassifyDocumentOperationDetail, ClassifyDocumentPagedFlux> syncPoller =
client.beginMultiLabelClassify(documents, parameters.get(0), parameters.get(1), "en", options)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
PollResponse<ClassifyDocumentOperationDetail> pollResponse = syncPoller.waitForCompletion();
assertEquals(options.getDisplayName(), pollResponse.getValue().getDisplayName());
ClassifyDocumentPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult -> validateLabelClassificationResult(documentResult)));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionWithDefaultParameterValues(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExtractSummaryActionResult(false, null,
TIME_NOW,
getExpectedExtractSummaryResultCollection(getExpectedExtractSummaryResultSortByOffset()),
null))),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
}, null, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionSortedByOffset(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
actionsResult -> actionsResult.getExtractSummaryResults().forEach(
extractSummaryActionResult -> extractSummaryActionResult.getDocumentsResults().forEach(
documentResult -> assertTrue(isAscendingOrderByOffSet(
documentResult.getSentences().stream().collect(Collectors.toList()))))));
}, 4, SummarySentencesOrder.OFFSET);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionSortedByRankScore(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
actionsResult -> actionsResult.getExtractSummaryResults().forEach(
extractSummaryActionResult -> extractSummaryActionResult.getDocumentsResults().forEach(
documentResult -> assertTrue(isDescendingOrderByRankScore(
documentResult.getSentences().stream().collect(Collectors.toList()))))));
}, 4, SummarySentencesOrder.RANK);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionWithSentenceCountLessThanMaxCount(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
actionsResult -> actionsResult.getExtractSummaryResults().forEach(
extractSummaryActionResult -> extractSummaryActionResult.getDocumentsResults().forEach(
documentResult -> assertTrue(
documentResult.getSentences().stream().collect(Collectors.toList()).size() < 20))));
}, 20, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionWithNonDefaultSentenceCount(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(
actionsResult -> actionsResult.getExtractSummaryResults().forEach(
extractSummaryActionResult -> extractSummaryActionResult.getDocumentsResults().forEach(
documentResult -> assertEquals(
documentResult.getSentences().stream().collect(Collectors.toList()).size(), 5))));
}, 5, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionMaxSentenceCountInvalidRangeException(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
int[] invalidMaxSentenceCounts = {0, 21};
for (int invalidCount: invalidMaxSentenceCounts) {
extractSummaryActionRunner(
(documents, tasks) -> {
HttpResponseException exception = assertThrows(HttpResponseException.class, () -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions())
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
});
assertEquals(
TextAnalyticsErrorCode.INVALID_PARAMETER_VALUE,
((TextAnalyticsError) exception.getValue()).getErrorCode());
}, invalidCount, null);
}
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeAbstractiveSummaryActionWithDefaultParameterValues(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
abstractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getAbstractSummaryActionResult(false, null,
TIME_NOW,
new AbstractSummaryResultCollection(asList(getExpectedAbstractiveSummaryResult())),
null
)))
)),
result.toStream().collect(Collectors.toList()));
}, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginAbstractSummaryDuplicateIdInput(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs -> {
StepVerifier.create(client.beginAbstractSummary(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass()));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginAbstractSummaryEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs -> {
StepVerifier.create(client.beginAbstractSummary(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginAbstractSummaryTooManyDocuments(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs -> {
StepVerifier.create(client.beginAbstractSummary(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginAbstractSummaryStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
abstractSummaryRunner((documents, options) -> {
SyncPoller<AbstractSummaryOperationDetail, AbstractSummaryPagedFlux> syncPoller =
client.beginAbstractSummary(documents)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AbstractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResult -> validateAbstractiveSummaryResultCollection(false,
new AbstractSummaryResultCollection(asList(getExpectedAbstractiveSummaryResult())),
documentResult));
}, 4);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginAbstractSummaryMaxOverload(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
abstractSummaryMaxOverloadRunner((documents, options) -> {
SyncPoller<AbstractSummaryOperationDetail, AbstractSummaryPagedFlux> syncPoller =
client.beginAbstractSummary(documents, options)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AbstractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResult -> validateAbstractiveSummaryResultCollection(false,
new AbstractSummaryResultCollection(asList(getExpectedAbstractiveSummaryResult())),
documentResult));
}, 4);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginExtractSummarySortedByOffset(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryRunner((documents, options) -> {
SyncPoller<ExtractSummaryOperationDetail, ExtractSummaryPagedFlux> syncPoller =
client.beginExtractSummary(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ExtractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResultCollection -> documentResultCollection.forEach(
documentResult -> assertTrue(
isAscendingOrderByOffSet(documentResult.getSentences().stream().collect(Collectors.toList())))
));
}, 4, SummarySentencesOrder.OFFSET);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginExtractSummarySortedByRankScore(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryRunner((documents, options) -> {
SyncPoller<ExtractSummaryOperationDetail, ExtractSummaryPagedFlux> syncPoller =
client.beginExtractSummary(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ExtractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResultCollection -> documentResultCollection.forEach(
documentResult -> assertTrue(
isDescendingOrderByRankScore(documentResult.getSentences().stream().collect(Collectors.toList())))
));
}, 4, SummarySentencesOrder.RANK);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginExtractSummarySentenceCountLessThanMaxCount(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryRunner((documents, options) -> {
SyncPoller<ExtractSummaryOperationDetail, ExtractSummaryPagedFlux> syncPoller =
client.beginExtractSummary(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ExtractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResultCollection -> documentResultCollection.forEach(
documentResult -> assertTrue(
documentResult.getSentences().stream().collect(Collectors.toList()).size() < 20)));
}, 20, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginExtractSummaryNonDefaultSentenceCount(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryRunner((documents, options) -> {
SyncPoller<ExtractSummaryOperationDetail, ExtractSummaryPagedFlux> syncPoller =
client.beginExtractSummary(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ExtractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResultCollection -> documentResultCollection.forEach(
documentResult -> assertEquals(
documentResult.getSentences().stream().collect(Collectors.toList()).size(), 5)));
}, 5, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginExtractSummaryMaxSentenceCountInvalidRangeException(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
int[] invalidMaxSentenceCounts = {0, 21};
for (int invalidCount: invalidMaxSentenceCounts) {
extractSummaryRunner(
(documents, options) -> {
HttpResponseException exception = assertThrows(HttpResponseException.class, () -> {
SyncPoller<ExtractSummaryOperationDetail, ExtractSummaryPagedFlux> syncPoller =
client.beginExtractSummary(documents, "en", options)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ExtractSummaryPagedFlux result = syncPoller.getFinalResult();
});
assertEquals(
TextAnalyticsErrorCode.INVALID_PARAMETER_VALUE,
((TextAnalyticsError) exception.getValue()).getErrorCode());
}, invalidCount, null);
}
}
} | class TextAnalyticsAsyncClientTest extends TextAnalyticsClientTestBase {
private TextAnalyticsAsyncClient client;
@BeforeAll
@AfterAll
static void afterAll() {
StepVerifier.resetDefaultTimeout();
}
private HttpClient buildAsyncAssertingClient(HttpClient httpClient) {
return new AssertingHttpClientBuilder(httpClient)
.skipRequest((ignored1, ignored2) -> false)
.assertAsync()
.build();
}
private TextAnalyticsAsyncClient getTextAnalyticsAsyncClient(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion, boolean isStaticResource) {
return getTextAnalyticsClientBuilder(
buildAsyncAssertingClient(interceptorManager.isPlaybackMode() ? interceptorManager.getPlaybackClient() : httpClient),
serviceVersion,
isStaticResource)
.buildAsyncClient();
}
/**
* Verify that we can get statistics on the collection result when given a batch of documents with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageShowStatisticsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(true, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each {@code DetectLanguageResult} input of a batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.assertNext(response ->
validateDetectLanguageResultCollectionWithResponse(false, getExpectedBatchDetectedLanguages(),
200, response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with given country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguagesCountryHintRunner((inputs, countryHint) ->
StepVerifier.create(client.detectLanguageBatch(inputs, countryHint, null))
.assertNext(actualResults ->
validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), actualResults))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch with request options.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchListCountryHintWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguagesBatchListCountryHintWithOptionsRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, options))
.assertNext(response -> validateDetectLanguageResultCollection(true, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Test to detect language for each string input of batch.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguagesBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageStringInputRunner((inputs) ->
StepVerifier.create(client.detectLanguageBatch(inputs, null, null))
.assertNext(response -> validateDetectLanguageResultCollection(false, getExpectedBatchDetectedLanguages(), response))
.verifyComplete());
}
/**
* Verifies that a single DetectedLanguage is returned for a document to detectLanguage.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectSingleTextLanguage(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectSingleTextLanguageRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageEnglish(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for a document with invalid country hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageInvalidCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageInvalidCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_COUNTRY_HINT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(input ->
StepVerifier.create(client.detectLanguage(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
/**
* Verifies that a bad request exception is returned for input documents with same ids.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageDuplicateIdRunner((inputs, options) ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, options))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageInputEmptyIdRunner(inputs ->
StepVerifier.create(client.detectLanguageBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that with countryHint with empty string will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageEmptyCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageEmptyCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
/**
* Verify that with countryHint with "none" will not throw exception.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void detectLanguageNoneCountryHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
detectLanguageNoneCountryHintRunner((input, countryHint) ->
StepVerifier.create(client.detectLanguage(input, countryHint))
.assertNext(response -> validatePrimaryLanguage(getDetectedLanguageSpanish(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeCategorizedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.assertNext(response -> validateCategorizedEntities(response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchCategorizedEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizeEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizeEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizeEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchCategorizedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(false, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateCategorizedEntitiesResultCollectionWithResponse(true, getExpectedBatchCategorizedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeCategorizedEntityStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeCategorizedEntitiesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, language, null))
.assertNext(response -> validateCategorizedEntitiesResultCollection(false, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesForListWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeStringBatchCategorizedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, options))
.assertNext(response -> validateCategorizedEntitiesResultCollection(true, getExpectedBatchCategorizedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
@Disabled("https:
public void recognizeEntitiesBatchWithResponseEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(22, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(30, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(14, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(15, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(13, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeEntities(document))
.assertNext(result -> result.forEach(categorizedEntity -> {
assertEquals(9, categorizedEntity.getLength());
assertEquals(126, categorizedEntity.getOffset());
})).verifyComplete(), CATEGORIZED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeEntitiesResolutions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeEntitiesBatchResolutionRunner((inputs, options) ->
StepVerifier.create(client.recognizeEntitiesBatch(inputs, null, options))
.assertNext(recognizeEntitiesResults -> validateEntityResolutions(recognizeEntitiesResults))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizePiiSingleDocumentRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(response -> validatePiiEntities(getPiiEntitiesList1(), response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(document -> StepVerifier.create(client.recognizePiiEntities(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchInputSingleError(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchPiiEntitySingleErrorRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(resultCollection -> resultCollection.getValue().forEach(recognizePiiEntitiesResult -> {
Exception exception = assertThrows(TextAnalyticsException.class, recognizePiiEntitiesResult::getEntities);
assertEquals(String.format(BATCH_ERROR_EXCEPTION_MESSAGE, "RecognizePiiEntitiesResult"), exception.getMessage());
})).verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(true, getExpectedBatchPiiEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language, null))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeStringBatchPiiEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, options))
.assertNext(response -> validatePiiEntitiesResultCollection(true, getExpectedBatchPiiEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(17, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(25, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(9, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(10, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(8, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizePiiEntities(document))
.assertNext(result -> result.forEach(piiEntity -> {
assertEquals(11, piiEntity.getLength());
assertEquals(121, piiEntity.getOffset());
})).verifyComplete(), PII_ENTITY_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizePiiDomainFilterRunner((document, options) ->
StepVerifier.create(client.recognizePiiEntities(document, "en", options))
.assertNext(response -> validatePiiEntities(getPiiEntitiesList1ForDomainFilter(),
response.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputStringForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizePiiLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, language,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomain.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollection(false, getExpectedBatchPiiEntitiesForDomainFilter(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputForDomainFilter(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchPiiEntitiesRunner((inputs) ->
StepVerifier.create(client.recognizePiiEntitiesBatchWithResponse(inputs,
new RecognizePiiEntitiesOptions().setDomainFilter(PiiEntityDomain.PROTECTED_HEALTH_INFORMATION)))
.assertNext(response -> validatePiiEntitiesResultCollectionWithResponse(false, getExpectedBatchPiiEntitiesForDomainFilter(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntitiesForBatchInputForCategoriesFilter(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeStringBatchPiiEntitiesForCategoriesFilterRunner(
(inputs, options) ->
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, "en", options))
.assertNext(
resultCollection -> validatePiiEntitiesResultCollection(false,
getExpectedBatchPiiEntitiesForCategoriesFilter(), resultCollection))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizePiiEntityWithCategoriesFilterFromOtherResult(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeStringBatchPiiEntitiesForCategoriesFilterRunner(
(inputs, options) -> {
List<PiiEntityCategory> categories = new ArrayList<>();
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, "en", options))
.assertNext(
resultCollection -> {
resultCollection.forEach(result -> result.getEntities().forEach(piiEntity -> {
final PiiEntityCategory category = piiEntity.getCategory();
if (PiiEntityCategory.ABA_ROUTING_NUMBER == category
|| PiiEntityCategory.US_SOCIAL_SECURITY_NUMBER == category) {
categories.add(category);
}
}));
validatePiiEntitiesResultCollection(false,
getExpectedBatchPiiEntitiesForCategoriesFilter(), resultCollection);
})
.verifyComplete();
final PiiEntityCategory[] piiEntityCategories = categories.toArray(new PiiEntityCategory[categories.size()]);
options.setCategoriesFilter(piiEntityCategories);
StepVerifier.create(client.recognizePiiEntitiesBatch(inputs, "en", options))
.assertNext(
resultCollection -> validatePiiEntitiesResultCollection(false,
getExpectedBatchPiiEntitiesForCategoriesFilter(), resultCollection))
.verifyComplete();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeLinkedEntitiesForSingleTextInputRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.assertNext(response -> validateLinkedEntity(getLinkedEntitiesList1().get(0), response.iterator().next()))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(input ->
StepVerifier.create(client.recognizeLinkedEntities(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchLinkedEntityRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, null))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(false,
getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatchWithResponse(inputs, options))
.assertNext(response -> validateLinkedEntitiesResultCollectionWithResponse(true, getExpectedBatchLinkedEntities(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeLinkedStringInputRunner((inputs) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeLinkedLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, language, null))
.assertNext(response -> validateLinkedEntitiesResultCollection(false, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
recognizeBatchStringLinkedEntitiesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, options))
.assertNext(response -> validateLinkedEntitiesResultCollection(true, getExpectedBatchLinkedEntities(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.recognizeLinkedEntitiesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(22, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesEmojiFamilyWIthSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyWithSkinToneModifierRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(30, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(14, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(15, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfcRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfdRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(13, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeLinkedEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
zalgoTextRunner(document ->
StepVerifier.create(client.recognizeLinkedEntities(document))
.assertNext(result -> result.forEach(linkedEntity -> {
linkedEntity.getMatches().forEach(linkedEntityMatch -> {
assertEquals(9, linkedEntityMatch.getLength());
assertEquals(126, linkedEntityMatch.getOffset());
});
})).verifyComplete(), LINKED_ENTITY_INPUTS.get(1)
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesForSingleTextInputRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(keyPhrasesCollection -> validateKeyPhrases(asList("monde"),
keyPhrasesCollection.stream().collect(Collectors.toList())))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(input ->
StepVerifier.create(client.extractKeyPhrases(input))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractBatchKeyPhrasesRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(false, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractBatchKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollectionWithResponse(true, getExpectedBatchKeyPhrases(), 200, response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesStringInputRunner((inputs) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, language, null))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(false, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesForListStringWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractBatchStringKeyPhrasesShowStatsRunner((inputs, options) ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, options))
.assertNext(response -> validateExtractKeyPhrasesResultCollection(true, getExpectedBatchKeyPhrases(), response))
.verifyComplete());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesWarningRunner(
input -> StepVerifier.create(client.extractKeyPhrases(input))
.assertNext(keyPhrasesResult -> {
keyPhrasesResult.getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
});
})
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchWarning(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesBatchWarningRunner(
inputs -> StepVerifier.create(client.extractKeyPhrasesBatchWithResponse(inputs, null))
.assertNext(response -> response.getValue().forEach(keyPhrasesResult ->
keyPhrasesResult.getKeyPhrases().getWarnings().forEach(warning -> {
assertTrue(WARNING_TOO_LONG_DOCUMENT_INPUT_MESSAGE.equals(warning.getMessage()));
assertTrue(LONG_WORDS_IN_DOCUMENT.equals(warning.getWarningCode()));
})
))
.verifyComplete()
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void extractKeyPhrasesBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.extractKeyPhrasesBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
/**
* Test analyzing sentiment for a string input.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input))
.assertNext(response -> validateDocumentSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input with default language hint.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithDefaultLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentForSingleTextInputRunner(input ->
StepVerifier.create(client.analyzeSentiment(input, null))
.assertNext(response -> validateDocumentSentiment(false, getExpectedDocumentSentiment(), response))
.verifyComplete()
);
}
/**
* Test analyzing sentiment for a string input and verifying the result of opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForTextInputWithOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentForTextInputWithOpinionMiningRunner((input, options) ->
StepVerifier.create(client.analyzeSentiment(input, "en", options))
.assertNext(response -> validateDocumentSentiment(true, getExpectedDocumentSentiment(), response))
.verifyComplete());
}
/**
* Verifies that an TextAnalyticsException is thrown for an empty document.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForEmptyText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyTextRunner(document ->
StepVerifier.create(client.analyzeSentiment(document))
.expectErrorMatches(throwable -> throwable instanceof TextAnalyticsException
&& INVALID_DOCUMENT.equals(((TextAnalyticsException) throwable).getErrorCode()))
.verify()
);
}
/**
* Test analyzing sentiment for a duplicate ID list.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDuplicateIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, new TextAnalyticsRequestOptions()))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass())));
}
/**
* Verifies that an invalid document exception is returned for input documents with an empty ID.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
}));
}
/**
* Verify that the collection result excludes request statistics and sentence options when given a batch of
* String documents with null TextAnalyticsRequestOptions and null language code which will use the default language
* code, 'en'.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and null language code which will use the default language code, 'en'.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentStringInputRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateAnalyzeSentimentResultCollection(false, false,
getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and sentence options when given a batch of
* String documents with null TextAnalyticsRequestOptions and given a language code.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null and given a language code.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringWithLanguageHint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentLanguageHintRunner((inputs, language) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, language, new TextAnalyticsRequestOptions()))
.assertNext(response -> validateAnalyzeSentimentResultCollection(false, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not sentence options when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which to show the request statistics only and verify the analyzed sentiment result.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateAnalyzeSentimentResultCollection(true, false, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result includes sentence options but not request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateAnalyzeSentimentResultCollection(false, true, getExpectedBatchTextSentiment(), response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes sentence options and request statistics when given a batch of
* String documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForListStringShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchStringSentimentShowStatsAndIncludeOpinionMiningRunner((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, options))
.assertNext(response -> validateAnalyzeSentimentResultCollection(true, true, getExpectedBatchTextSentiment(), response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and sentence options when given a batch of
* TextDocumentInput documents with null TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullRequestOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (TextAnalyticsRequestOptions) null))
.assertNext(response -> validateAnalyzeSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that we can get statistics on the collection result when given a batch of
* TextDocumentInput documents with TextAnalyticsRequestOptions.
*
* {@link TextAnalyticsAsyncClient
* which TextAnalyticsRequestOptions includes request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatistics(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentShowStatsRunner((inputs, requestOptions) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, requestOptions))
.assertNext(response -> validateAnalyzeSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result excludes request statistics and sentence options when given a batch of
* TextDocumentInput documents with null AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions is null.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputWithNullAnalyzeSentimentOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, (AnalyzeSentimentOptions) null))
.assertNext(response -> validateAnalyzeSentimentResultCollectionWithResponse(false, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes request statistics but not sentence options when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes request statistics but not opinion mining.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsExcludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options.setIncludeOpinionMining(false)))
.assertNext(response -> validateAnalyzeSentimentResultCollectionWithResponse(true, false, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verify that the collection result includes sentence options but not request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining but not request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputNotShowStatisticsButIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentOpinionMining((inputs, options) -> {
options.setIncludeStatistics(false);
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response ->
validateAnalyzeSentimentResultCollectionWithResponse(false, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete();
});
}
/**
* Verify that the collection result includes sentence options and request statistics when given a batch of
* TextDocumentInput documents with AnalyzeSentimentOptions.
*
* {@link TextAnalyticsAsyncClient
* which AnalyzeSentimentOptions includes opinion mining and request statistics.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentForBatchInputShowStatisticsAndIncludeOpinionMining(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchSentimentOpinionMining((inputs, options) ->
StepVerifier.create(client.analyzeSentimentBatchWithResponse(inputs, options))
.assertNext(response -> validateAnalyzeSentimentResultCollectionWithResponse(true, true, getExpectedBatchTextSentiment(), 200, response))
.verifyComplete());
}
/**
* Verifies that an InvalidDocumentBatch exception is returned for input documents with too many documents.
*/
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentBatchTooManyDocuments(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs ->
StepVerifier.create(client.analyzeSentimentBatch(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
}));
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(25, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(opinionSentiment -> {
assertEquals(7, opinionSentiment.getLength());
assertEquals(17, opinionSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(7, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiWithSkinToneModifier(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiWithSkinToneModifierRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(27, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(19, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(9, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(
result -> result.getSentences().forEach(
sentenceSentiment -> {
assertEquals(34, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(26, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(16, targetSentiment.getOffset());
});
})
)
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentEmojiFamilyWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyWithSkinToneModifierRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(
result -> result.getSentences().forEach(
sentenceSentiment -> {
assertEquals(42, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(34, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(24, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfcRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(26, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(18, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(8, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentDiacriticsNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfdRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(
sentenceSentiment -> {
assertEquals(27, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(19, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(9, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfcRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(25, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(17, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(7, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfdRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(25, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(17, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(7, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
zalgoTextRunner(
document ->
StepVerifier.create(client.analyzeSentiment(document, null,
new AnalyzeSentimentOptions().setIncludeOpinionMining(true)))
.assertNext(result -> result.getSentences().forEach(sentenceSentiment -> {
assertEquals(138, sentenceSentiment.getLength());
assertEquals(0, sentenceSentiment.getOffset());
sentenceSentiment.getOpinions().forEach(opinion -> {
opinion.getAssessments().forEach(assessmentSentiment -> {
assertEquals(7, assessmentSentiment.getLength());
assertEquals(130, assessmentSentiment.getOffset());
});
final TargetSentiment targetSentiment = opinion.getTarget();
assertEquals(5, targetSentiment.getLength());
assertEquals(120, targetSentiment.getOffset());
});
}))
.verifyComplete(),
SENTIMENT_OFFSET_INPUT
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareStringInputWithoutOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
healthcareStringInputRunner((documents, dummyOptions) -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
validateAnalyzeHealthcareEntitiesResultCollectionList(
false,
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForSinglePage(),
analyzeHealthcareEntitiesPagedFlux.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareStringInputWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
healthcareStringInputRunner((documents, options) -> {
boolean isValidApiVersionForDisplayName = serviceVersion != TextAnalyticsServiceVersion.V3_0
&& serviceVersion != TextAnalyticsServiceVersion.V3_1;
if (isValidApiVersionForDisplayName) {
options.setDisplayName("operationName");
}
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
PollResponse<AnalyzeHealthcareEntitiesOperationDetail> pollResponse = syncPoller.waitForCompletion();
if (isValidApiVersionForDisplayName) {
assertEquals(options.getDisplayName(), pollResponse.getValue().getDisplayName());
}
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
validateAnalyzeHealthcareEntitiesResultCollectionList(
options.isIncludeStatistics(),
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForSinglePageWithFhir(),
analyzeHealthcareEntitiesPagedFlux.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareMaxOverload(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
healthcareLroRunner((documents, options) -> {
boolean isValidApiVersionForDisplayName = serviceVersion != TextAnalyticsServiceVersion.V3_0
&& serviceVersion != TextAnalyticsServiceVersion.V3_1;
if (isValidApiVersionForDisplayName) {
options.setDisplayName("operationName");
}
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents, options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
PollResponse<AnalyzeHealthcareEntitiesOperationDetail> pollResponse = syncPoller.waitForCompletion();
if (isValidApiVersionForDisplayName) {
assertEquals(options.getDisplayName(), pollResponse.getValue().getDisplayName());
}
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
validateAnalyzeHealthcareEntitiesResultCollectionList(
options.isIncludeStatistics(),
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForSinglePage(),
analyzeHealthcareEntitiesPagedFlux.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
healthcareLroPaginationRunner((documents, options) -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents, options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
validateAnalyzeHealthcareEntitiesResultCollectionList(
options.isIncludeStatistics(),
getExpectedAnalyzeHealthcareEntitiesResultCollectionListForMultiplePages(0, 10, 0),
analyzeHealthcareEntitiesPagedFlux.toStream().collect(Collectors.toList()));
}, 10);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void healthcareLroEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyListRunner((documents, errorMessage) -> {
StepVerifier.create(client.beginAnalyzeHealthcareEntities(documents, null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify();
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
@Disabled("https:
public void analyzeHealthcareEntitiesEmojiUnicodeCodePoint(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesEmoji(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(20, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesEmojiWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiWithSkinToneModifierRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(22, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesEmojiFamily(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(29, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesEmojiFamilyWithSkinToneModifier(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emojiFamilyWithSkinToneModifierRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(37, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesDiacriticsNfc(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfcRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(21, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesDiacriticsNfd(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
diacriticsNfdRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(22, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesKoreanNfc(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfcRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(20, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesKoreanNfd(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
koreanNfdRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(20, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesZalgoText(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
zalgoTextRunner(
document -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(
Collections.singletonList(new TextDocumentInput("0", document)), null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
analyzeHealthcareEntitiesPagedFlux.toStream().forEach(result -> result.forEach(
entitiesResult -> entitiesResult.getEntities().forEach(
entity -> {
assertEquals(11, entity.getLength());
assertEquals(133, entity.getOffset());
})));
},
HEALTHCARE_ENTITY_OFFSET_INPUT);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareEntitiesForAssertion(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeHealthcareEntitiesForAssertionRunner((documents, options) -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeHealthcareEntitiesPagedFlux analyzeHealthcareEntitiesPagedFlux = syncPoller.getFinalResult();
final HealthcareEntityAssertion assertion =
analyzeHealthcareEntitiesPagedFlux.toStream().collect(Collectors.toList())
.get(0).stream().collect(Collectors.toList())
.get(0).getEntities().stream().collect(Collectors.toList())
.get(1)
.getAssertion();
assertEquals(EntityConditionality.HYPOTHETICAL, assertion.getConditionality());
assertNull(assertion.getAssociation());
assertNull(assertion.getCertainty());
});
}
@Disabled("Temporary disable it for green test")
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void cancelHealthcareLro(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
cancelHealthcareLroRunner((documents, options) -> {
SyncPoller<AnalyzeHealthcareEntitiesOperationDetail, AnalyzeHealthcareEntitiesPagedFlux>
syncPoller = client.beginAnalyzeHealthcareEntities(documents, options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.cancelOperation();
LongRunningOperationStatus operationStatus = syncPoller.poll().getStatus();
while (!LongRunningOperationStatus.USER_CANCELLED.equals(operationStatus)) {
operationStatus = syncPoller.poll().getStatus();
}
syncPoller.waitForCompletion();
Assertions.assertEquals(LongRunningOperationStatus.USER_CANCELLED, operationStatus);
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeActionsStringInputRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(false, null,
TIME_NOW, getRecognizeEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult(false, null,
TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForActions(), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(false, null,
TIME_NOW,
getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(null),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(false, null,
TIME_NOW, getExtractKeyPhrasesResultCollection(), null))),
IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult(false, null,
TIME_NOW, getAnalyzeSentimentResultCollectionForActions(), null))),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsWithOptions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchActionsRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks,
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(false, null,
TIME_NOW, getRecognizeEntitiesResultCollection(), null))),
IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult(false, null,
TIME_NOW, getRecognizeLinkedEntitiesResultCollectionForActions(), null))),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(false, null,
TIME_NOW,
getRecognizePiiEntitiesResultCollection(), null))),
IterableStream.of(null),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(false, null,
TIME_NOW, getExtractKeyPhrasesResultCollection(), null))),
IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult(false, null,
TIME_NOW, getAnalyzeSentimentResultCollectionForActions(), null))),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsWithMultiSameKindActions(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeActionsWithMultiSameKindActionsRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(actionsResult -> {
assertEquals(2, actionsResult.getRecognizeEntitiesResults().stream().count());
assertEquals(2, actionsResult.getRecognizePiiEntitiesResults().stream().count());
assertEquals(2, actionsResult.getRecognizeLinkedEntitiesResults().stream().count());
assertEquals(2, actionsResult.getAnalyzeSentimentResults().stream().count());
assertEquals(2, actionsResult.getExtractKeyPhrasesResults().stream().count());
});
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsWithActionNames(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeActionsWithActionNamesRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(actionsResult -> {
assertEquals(CUSTOM_ACTION_NAME, actionsResult.getRecognizeEntitiesResults().stream()
.collect(Collectors.toList()).get(0).getActionName());
assertEquals(CUSTOM_ACTION_NAME, actionsResult.getRecognizePiiEntitiesResults().stream()
.collect(Collectors.toList()).get(0).getActionName());
assertEquals(CUSTOM_ACTION_NAME, actionsResult.getRecognizeLinkedEntitiesResults().stream()
.collect(Collectors.toList()).get(0).getActionName());
assertEquals(CUSTOM_ACTION_NAME, actionsResult.getAnalyzeSentimentResults().stream()
.collect(Collectors.toList()).get(0).getActionName());
assertEquals(CUSTOM_ACTION_NAME, actionsResult.getExtractKeyPhrasesResults().stream()
.collect(Collectors.toList()).get(0).getActionName());
});
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsAutoDetectedLanguage(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeActionsAutoDetectedLanguageRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(actionsResult -> {
List<RecognizeEntitiesResult> recognizeEntitiesResults = actionsResult.getRecognizeEntitiesResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
recognizeEntitiesResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
recognizeEntitiesResults.get(1).getDetectedLanguage());
List<RecognizePiiEntitiesResult> recognizePiiEntitiesResults =
actionsResult.getRecognizePiiEntitiesResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
recognizePiiEntitiesResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
recognizePiiEntitiesResults.get(1).getDetectedLanguage());
List<RecognizeLinkedEntitiesResult> recognizeLinkedEntitiesResults =
actionsResult.getRecognizeLinkedEntitiesResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
recognizeLinkedEntitiesResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
recognizeLinkedEntitiesResults.get(1).getDetectedLanguage());
List<AnalyzeSentimentResult> analyzeSentimentResults = actionsResult.getAnalyzeSentimentResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
analyzeSentimentResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
analyzeSentimentResults.get(1).getDetectedLanguage());
List<ExtractKeyPhraseResult> keyPhraseResults = actionsResult.getExtractKeyPhrasesResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
keyPhraseResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
keyPhraseResults.get(1).getDetectedLanguage());
});
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsAutoDetectedLanguageCustomTexts(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
analyzeActionsAutoDetectedLanguageCustomTextRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(actionsResult -> {
List<RecognizeEntitiesResult> customEntitiesResults = actionsResult.getRecognizeCustomEntitiesResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
customEntitiesResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
customEntitiesResults.get(1).getDetectedLanguage());
List<ClassifyDocumentResult> singleLabelResults = actionsResult.getSingleLabelClassifyResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
singleLabelResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
singleLabelResults.get(1).getDetectedLanguage());
List<ClassifyDocumentResult> multiLabelResults = actionsResult.getMultiLabelClassifyResults()
.stream().collect(Collectors.toList()).get(0).getDocumentsResults()
.stream().collect(Collectors.toList());
validatePrimaryLanguage(DETECTED_LANGUAGE_ENGLISH,
multiLabelResults.get(0).getDetectedLanguage());
validatePrimaryLanguage(DETECTED_LANGUAGE_SPANISH,
multiLabelResults.get(1).getDetectedLanguage());
});
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsPagination(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeBatchActionsPaginationRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux>
syncPoller = client.beginAnalyzeActions(
documents, tasks, new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
getExpectedAnalyzeActionsResultListForMultiplePages(0, 20, 2),
result.toStream().collect(Collectors.toList()));
}, 22);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeActionsEmptyInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyListRunner((documents, errorMessage) ->
StepVerifier.create(client.beginAnalyzeActions(documents,
new TextAnalyticsActions()
.setRecognizeEntitiesActions(new RecognizeEntitiesAction()), null))
.expectErrorMatches(throwable -> throwable instanceof IllegalArgumentException
&& errorMessage.equals(throwable.getMessage()))
.verify());
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeEntitiesRecognitionAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeEntitiesRecognitionRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks,
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(asList(getExpectedRecognizeEntitiesActionResult(false, null,
TIME_NOW, getRecognizeEntitiesResultCollection(), null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeEntitiesRecognitionActionResolution(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeEntitiesRecognitionResolutionRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
result.toStream().forEach(actionsResult -> {
actionsResult.getRecognizeEntitiesResults().forEach(nerActionResult -> {
validateEntityResolutions(nerActionResult.getDocumentsResults());
});
});
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzePiiEntityRecognitionWithCategoriesFilters(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzePiiEntityRecognitionWithCategoriesFiltersRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks,
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(false, null,
TIME_NOW, getExpectedBatchPiiEntitiesForCategoriesFilter(), null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzePiiEntityRecognitionWithDomainFilters(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzePiiEntityRecognitionWithDomainFiltersRunner(
(documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks,
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
Arrays.asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExpectedRecognizePiiEntitiesActionResult(false, null,
TIME_NOW, getExpectedBatchPiiEntitiesForDomainFilter(), null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
}
);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeLinkedEntityActions(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeLinkedEntityRecognitionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en",
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(
false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(asList(getExpectedRecognizeLinkedEntitiesActionResult(false, null,
TIME_NOW, getRecognizeLinkedEntitiesResultCollection(), null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeKeyPhrasesExtractionAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractKeyPhrasesRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en",
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExpectedExtractKeyPhrasesActionResult(false, null,
TIME_NOW, getExtractKeyPhrasesResultCollection(), null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeSentimentAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeSentimentRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en",
new AnalyzeActionsOptions().setIncludeStatistics(false)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExpectedAnalyzeSentimentActionResult(false, null,
TIME_NOW, getExpectedBatchTextSentiment(), null))),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeHealthcareAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
analyzeHealthcareEntitiesRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExpectedAnalyzeHealthcareEntitiesActionResult(false, null, TIME_NOW,
getExpectedAnalyzeHealthcareEntitiesResultCollection(2,
asList(
getRecognizeHealthcareEntitiesResultWithFhir1("0"),
getRecognizeHealthcareEntitiesResultWithFhir2())),
null))),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeCustomEntitiesAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
recognizeCustomEntitiesActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(
actionsResult -> actionsResult.getRecognizeCustomEntitiesResults().forEach(
customEntitiesActionResult -> customEntitiesActionResult.getDocumentsResults().forEach(
documentResult -> validateCategorizedEntities(
documentResult.getEntities().stream().collect(Collectors.toList())))));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void singleLabelClassificationAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomSingleCategoryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(
actionsResult -> actionsResult.getSingleLabelClassifyResults().forEach(
customSingleCategoryActionResult -> customSingleCategoryActionResult.getDocumentsResults().forEach(
documentResult -> validateLabelClassificationResult(documentResult))));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void multiCategoryClassifyAction(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomMultiCategoryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", null).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(
actionsResult -> actionsResult.getMultiLabelClassifyResults().forEach(
customMultiCategoryActionResult -> customMultiCategoryActionResult.getDocumentsResults().forEach(
documentResult -> validateLabelClassificationResult(documentResult))));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeCustomEntitiesStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
recognizeCustomEntitiesRunner((documents, parameters) -> {
SyncPoller<RecognizeCustomEntitiesOperationDetail, RecognizeCustomEntitiesPagedFlux> syncPoller =
client.beginRecognizeCustomEntities(documents, parameters.get(0), parameters.get(1)).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
RecognizeCustomEntitiesPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult ->
validateCategorizedEntities(documentResult.getEntities().stream().collect(Collectors.toList()))));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void recognizeCustomEntities(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
recognizeCustomEntitiesRunner((documents, parameters) -> {
RecognizeCustomEntitiesOptions options = new RecognizeCustomEntitiesOptions()
.setDisplayName("operationName");
SyncPoller<RecognizeCustomEntitiesOperationDetail, RecognizeCustomEntitiesPagedFlux> syncPoller =
client.beginRecognizeCustomEntities(documents, parameters.get(0), parameters.get(1), "en", options)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
PollResponse<RecognizeCustomEntitiesOperationDetail> pollResponse = syncPoller.waitForCompletion();
assertEquals(options.getDisplayName(), pollResponse.getValue().getDisplayName());
RecognizeCustomEntitiesPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult ->
validateCategorizedEntities(documentResult.getEntities().stream().collect(Collectors.toList()))));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void singleLabelClassificationStringInput(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomSingleLabelRunner((documents, parameters) -> {
SyncPoller<ClassifyDocumentOperationDetail, ClassifyDocumentPagedFlux> syncPoller =
client.beginSingleLabelClassify(documents, parameters.get(0), parameters.get(1))
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ClassifyDocumentPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult -> validateLabelClassificationResult(documentResult)));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void singleLabelClassification(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomSingleLabelRunner((documents, parameters) -> {
SingleLabelClassifyOptions options = new SingleLabelClassifyOptions().setDisplayName("operationName");
SyncPoller<ClassifyDocumentOperationDetail, ClassifyDocumentPagedFlux> syncPoller =
client.beginSingleLabelClassify(documents, parameters.get(0), parameters.get(1), "en", options)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
PollResponse<ClassifyDocumentOperationDetail> pollResponse = syncPoller.waitForCompletion();
assertEquals(options.getDisplayName(), pollResponse.getValue().getDisplayName());
ClassifyDocumentPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult -> validateLabelClassificationResult(documentResult)));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void multiLabelClassificationStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomMultiLabelRunner((documents, parameters) -> {
SyncPoller<ClassifyDocumentOperationDetail, ClassifyDocumentPagedFlux> syncPoller =
client.beginMultiLabelClassify(documents, parameters.get(0), parameters.get(1))
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ClassifyDocumentPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult -> validateLabelClassificationResult(documentResult)));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void multiLabelClassification(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, true);
classifyCustomMultiLabelRunner((documents, parameters) -> {
MultiLabelClassifyOptions options = new MultiLabelClassifyOptions().setDisplayName("operationName");
SyncPoller<ClassifyDocumentOperationDetail, ClassifyDocumentPagedFlux> syncPoller =
client.beginMultiLabelClassify(documents, parameters.get(0), parameters.get(1), "en", options)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
PollResponse<ClassifyDocumentOperationDetail> pollResponse = syncPoller.waitForCompletion();
assertEquals(options.getDisplayName(), pollResponse.getValue().getDisplayName());
ClassifyDocumentPagedFlux pagedFlux = syncPoller.getFinalResult();
pagedFlux.toStream().collect(Collectors.toList()).forEach(resultCollection ->
resultCollection.forEach(documentResult -> validateLabelClassificationResult(documentResult)));
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionWithDefaultParameterValues(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getExtractSummaryActionResult(false, null,
TIME_NOW,
getExpectedExtractSummaryResultCollection(getExpectedExtractSummaryResultSortByOffset()),
null))),
IterableStream.of(null)
)),
result.toStream().collect(Collectors.toList()));
}, null, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionSortedByOffset(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
actionsResult -> actionsResult.getExtractSummaryResults().forEach(
extractSummaryActionResult -> extractSummaryActionResult.getDocumentsResults().forEach(
documentResult -> assertTrue(isAscendingOrderByOffSet(
documentResult.getSentences().stream().collect(Collectors.toList()))))));
}, 4, SummarySentencesOrder.OFFSET);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionSortedByRankScore(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
actionsResult -> actionsResult.getExtractSummaryResults().forEach(
extractSummaryActionResult -> extractSummaryActionResult.getDocumentsResults().forEach(
documentResult -> assertTrue(isDescendingOrderByRankScore(
documentResult.getSentences().stream().collect(Collectors.toList()))))));
}, 4, SummarySentencesOrder.RANK);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionWithSentenceCountLessThanMaxCount(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
actionsResult -> actionsResult.getExtractSummaryResults().forEach(
extractSummaryActionResult -> extractSummaryActionResult.getDocumentsResults().forEach(
documentResult -> assertTrue(
documentResult.getSentences().stream().collect(Collectors.toList()).size() < 20))));
}, 20, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionWithNonDefaultSentenceCount(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
final List<AnalyzeActionsResult> actionsResults = result.toStream().collect(Collectors.toList());
actionsResults.forEach(
actionsResult -> actionsResult.getExtractSummaryResults().forEach(
extractSummaryActionResult -> extractSummaryActionResult.getDocumentsResults().forEach(
documentResult -> assertEquals(
documentResult.getSentences().stream().collect(Collectors.toList()).size(), 5))));
}, 5, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeExtractSummaryActionMaxSentenceCountInvalidRangeException(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
int[] invalidMaxSentenceCounts = {0, 21};
for (int invalidCount: invalidMaxSentenceCounts) {
extractSummaryActionRunner(
(documents, tasks) -> {
HttpResponseException exception = assertThrows(HttpResponseException.class, () -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions())
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
});
assertEquals(
TextAnalyticsErrorCode.INVALID_PARAMETER_VALUE,
((TextAnalyticsError) exception.getValue()).getErrorCode());
}, invalidCount, null);
}
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void analyzeAbstractiveSummaryActionWithDefaultParameterValues(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
abstractSummaryActionRunner((documents, tasks) -> {
SyncPoller<AnalyzeActionsOperationDetail, AnalyzeActionsResultPagedFlux> syncPoller =
client.beginAnalyzeActions(documents, tasks, "en", new AnalyzeActionsOptions()).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AnalyzeActionsResultPagedFlux result = syncPoller.getFinalResult();
validateAnalyzeBatchActionsResultList(false, false,
asList(getExpectedAnalyzeBatchActionsResult(
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(null),
IterableStream.of(asList(getAbstractSummaryActionResult(false, null,
TIME_NOW,
new AbstractSummaryResultCollection(asList(getExpectedAbstractiveSummaryResult())),
null
)))
)),
result.toStream().collect(Collectors.toList()));
}, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginAbstractSummaryDuplicateIdInput(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
duplicateIdRunner(inputs -> {
StepVerifier.create(client.beginAbstractSummary(inputs, null))
.verifyErrorSatisfies(ex -> assertEquals(HttpResponseException.class, ex.getClass()));
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginAbstractSummaryEmptyIdInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
emptyDocumentIdRunner(inputs -> {
StepVerifier.create(client.beginAbstractSummary(inputs, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT, textAnalyticsError.getErrorCode());
});
});
}
@Disabled("https:
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginAbstractSummaryTooManyDocuments(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
tooManyDocumentsRunner(inputs -> {
StepVerifier.create(client.beginAbstractSummary(inputs, null, null))
.verifyErrorSatisfies(ex -> {
final HttpResponseException httpResponseException = (HttpResponseException) ex;
assertEquals(400, httpResponseException.getResponse().getStatusCode());
final TextAnalyticsError textAnalyticsError = (TextAnalyticsError) httpResponseException.getValue();
assertEquals(INVALID_DOCUMENT_BATCH, textAnalyticsError.getErrorCode());
});
});
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginAbstractSummaryStringInput(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
abstractSummaryRunner((documents, options) -> {
SyncPoller<AbstractSummaryOperationDetail, AbstractSummaryPagedFlux> syncPoller =
client.beginAbstractSummary(documents)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AbstractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResult -> validateAbstractiveSummaryResultCollection(false,
new AbstractSummaryResultCollection(asList(getExpectedAbstractiveSummaryResult())),
documentResult));
}, 4);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginAbstractSummaryMaxOverload(HttpClient httpClient, TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
abstractSummaryMaxOverloadRunner((documents, options) -> {
SyncPoller<AbstractSummaryOperationDetail, AbstractSummaryPagedFlux> syncPoller =
client.beginAbstractSummary(documents, options)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
AbstractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResult -> validateAbstractiveSummaryResultCollection(false,
new AbstractSummaryResultCollection(asList(getExpectedAbstractiveSummaryResult())),
documentResult));
}, 4);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginExtractSummarySortedByOffset(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryRunner((documents, options) -> {
SyncPoller<ExtractSummaryOperationDetail, ExtractSummaryPagedFlux> syncPoller =
client.beginExtractSummary(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ExtractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResultCollection -> documentResultCollection.forEach(
documentResult -> assertTrue(
isAscendingOrderByOffSet(documentResult.getSentences().stream().collect(Collectors.toList())))
));
}, 4, SummarySentencesOrder.OFFSET);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginExtractSummarySortedByRankScore(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryRunner((documents, options) -> {
SyncPoller<ExtractSummaryOperationDetail, ExtractSummaryPagedFlux> syncPoller =
client.beginExtractSummary(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ExtractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResultCollection -> documentResultCollection.forEach(
documentResult -> assertTrue(
isDescendingOrderByRankScore(documentResult.getSentences().stream().collect(Collectors.toList())))
));
}, 4, SummarySentencesOrder.RANK);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginExtractSummarySentenceCountLessThanMaxCount(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryRunner((documents, options) -> {
SyncPoller<ExtractSummaryOperationDetail, ExtractSummaryPagedFlux> syncPoller =
client.beginExtractSummary(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ExtractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResultCollection -> documentResultCollection.forEach(
documentResult -> assertTrue(
documentResult.getSentences().stream().collect(Collectors.toList()).size() < 20)));
}, 20, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginExtractSummaryNonDefaultSentenceCount(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
extractSummaryRunner((documents, options) -> {
SyncPoller<ExtractSummaryOperationDetail, ExtractSummaryPagedFlux> syncPoller =
client.beginExtractSummary(documents, "en", options).getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ExtractSummaryPagedFlux result = syncPoller.getFinalResult();
result.toStream().collect(Collectors.toList()).forEach(
documentResultCollection -> documentResultCollection.forEach(
documentResult -> assertEquals(
documentResult.getSentences().stream().collect(Collectors.toList()).size(), 5)));
}, 5, null);
}
@ParameterizedTest(name = DISPLAY_NAME_WITH_ARGUMENTS)
@MethodSource("com.azure.ai.textanalytics.TestUtils
public void beginExtractSummaryMaxSentenceCountInvalidRangeException(HttpClient httpClient,
TextAnalyticsServiceVersion serviceVersion) {
client = getTextAnalyticsAsyncClient(httpClient, serviceVersion, false);
int[] invalidMaxSentenceCounts = {0, 21};
for (int invalidCount: invalidMaxSentenceCounts) {
extractSummaryRunner(
(documents, options) -> {
HttpResponseException exception = assertThrows(HttpResponseException.class, () -> {
SyncPoller<ExtractSummaryOperationDetail, ExtractSummaryPagedFlux> syncPoller =
client.beginExtractSummary(documents, "en", options)
.getSyncPoller();
syncPoller = setPollInterval(syncPoller);
syncPoller.waitForCompletion();
ExtractSummaryPagedFlux result = syncPoller.getFinalResult();
});
assertEquals(
TextAnalyticsErrorCode.INVALID_PARAMETER_VALUE,
((TextAnalyticsError) exception.getValue()).getErrorCode());
}, invalidCount, null);
}
}
} |
Will need a design or documentation in the case if there is a row that does not match the schema. If you check pubsub json support, it supports a dead letter queue that sends rows to that queue if those rows does not match with the schema. You might reuse the same design. | public PCollection<Row> expand(PCollection<String> input) {
return input
.apply(
ParDo.of(
new DoFn<String, Row>() {
@ProcessElement
public void processElement(ProcessContext context) {
context.output(jsonToRow(objectMapper, context.element()));
}
}))
.setRowSchema(schema);
} | public void processElement(ProcessContext context) { | public PCollection<Row> expand(PCollection<String> input) {
return input
.apply(
"linesToRows",
MapElements.into(TypeDescriptors.rows())
.via(s -> Row.withSchema(SCHEMA).addValue(s).build()))
.setRowSchema(SCHEMA);
} | class LinesReadConverter extends PTransform<PCollection<String>, PCollection<Row>>
implements Serializable {
private static final Schema SCHEMA = Schema.builder().addStringField("line").build();
public LinesReadConverter() {}
@Override
} | class LinesReadConverter extends PTransform<PCollection<String>, PCollection<Row>>
implements Serializable {
private static final Schema SCHEMA = Schema.builder().addStringField("line").build();
public LinesReadConverter() {}
@Override
} |
Could we add requestId generator? There're much hard-coded code for now | public void userEventTriggered(final ChannelHandlerContext ctx, final Object evt) {
if (evt instanceof CreateSubscriptionEvent) {
Builder builder = CDCRequest.newBuilder();
builder.setCreateSubscription(buildCreateSubscriptionRequest());
builder.setRequestId(UUID.randomUUID().toString());
ctx.writeAndFlush(builder.build());
}
} | builder.setRequestId(UUID.randomUUID().toString()); | public void userEventTriggered(final ChannelHandlerContext ctx, final Object evt) {
if (evt instanceof CreateSubscriptionEvent) {
CDCRequest request = CDCRequest.newBuilder().setCreateSubscription(buildCreateSubscriptionRequest()).setRequestId(RequestIdUtil.generateRequestId()).build();
ctx.writeAndFlush(request);
}
} | class SubscriptionRequestHandler extends ChannelInboundHandlerAdapter {
@Override
private CreateSubscriptionRequest buildCreateSubscriptionRequest() {
return CreateSubscriptionRequest.newBuilder().setSubscriptionMode(SubscriptionMode.INCREMENTAL).setSubscriptionName("sharding_db").setDatabase("sharding_db")
.addAllTableNames(Arrays.asList("t_order", "t_order_item")).build();
}
@Override
public void channelRead(final ChannelHandlerContext ctx, final Object msg) {
CDCResponse response = (CDCResponse) msg;
if (Status.SUCCEED == response.getStatus()) {
processSucceed(ctx, response);
} else {
log.error("subscription response error {}", msg);
}
}
private void processSucceed(final ChannelHandlerContext ctx, final CDCResponse response) {
if (response.hasCreateSubscriptionResult()) {
log.info("create subscription succeed, subcrption name {}", response.getCreateSubscriptionResult().getSubscriptionName());
Builder builder = CDCRequest.newBuilder();
builder.setStartSubscription(buildStartSubscriptionRequest(response.getCreateSubscriptionResult().getSubscriptionName()));
builder.setRequestId(UUID.randomUUID().toString());
ctx.writeAndFlush(builder.build());
}
}
private StartSubscriptionRequest buildStartSubscriptionRequest(final String subscriptionName) {
return StartSubscriptionRequest.newBuilder().setSubscriptionName(subscriptionName).build();
}
} | class SubscriptionRequestHandler extends ChannelInboundHandlerAdapter {
@Override
private CreateSubscriptionRequest buildCreateSubscriptionRequest() {
TableName tableName = TableName.newBuilder().build();
return CreateSubscriptionRequest.newBuilder().setSubscriptionMode(SubscriptionMode.INCREMENTAL).setSubscriptionName("sharding_db").setDatabase("sharding_db")
.addTableNames(tableName).build();
}
@Override
public void channelRead(final ChannelHandlerContext ctx, final Object msg) {
CDCResponse response = (CDCResponse) msg;
if (Status.SUCCEED == response.getStatus()) {
processSucceed(ctx, response);
} else {
log.error("subscription response error {}", msg);
}
}
private void processSucceed(final ChannelHandlerContext ctx, final CDCResponse response) {
if (response.hasCreateSubscriptionResult()) {
log.info("create subscription succeed, subcrption name {}", response.getCreateSubscriptionResult().getSubscriptionName());
Builder builder = CDCRequest.newBuilder();
builder.setStartSubscription(buildStartSubscriptionRequest(response.getCreateSubscriptionResult().getSubscriptionName()));
builder.setRequestId(RequestIdUtil.generateRequestId());
ctx.writeAndFlush(builder.build());
}
}
private StartSubscriptionRequest buildStartSubscriptionRequest(final String subscriptionName) {
return StartSubscriptionRequest.newBuilder().setSubscriptionName(subscriptionName).build();
}
@Override
public void exceptionCaught(final ChannelHandlerContext ctx, final Throwable cause) {
log.error("subscription handler error", cause);
}
} |
According to your Comment, fixed in new version. | public void run() throws InterruptedException {
ServiceBusSenderClient senderClient = new ServiceBusClientBuilder()
.connectionString(connectionString)
.sender()
.queueName(queueName)
.buildClient();
sendMessagesAsync(senderClient, 1);
deadLetterByExceedingMaxDelivery(connectionString, queueName);
sendMessagesAsync(senderClient, Integer.MAX_VALUE);
this.receiveAndDeadletterMessagesAsync(connectionString, queueName);
this.pickUpAndFixDeadletters(connectionString, queueName, senderClient);
senderClient.close();
} | sendMessagesAsync(senderClient, 1); | public void run() {
final String connectionString = System.getenv("AZURE_SERVICEBUS_NAMESPACE_CONNECTION_STRING");
final String queueName = System.getenv("AZURE_SERVICEBUS_SAMPLE_QUEUE_NAME");
final ServiceBusClientBuilder builder = new ServiceBusClientBuilder().connectionString(connectionString);
try (ServiceBusSenderClient sender = builder.sender().queueName(queueName).buildClient()) {
sendMessages(sender, 1);
deadLetterByExceedingMaxDelivery(builder, queueName).block();
receiveAndCompleteDeadLetterQueueMessages(builder, queueName).block();
sendMessages(sender, personList.size());
receiveAndDeadletterMessages(builder, queueName).block();
receiveAndFixDeadLetterQueueMessages(builder, queueName, sender).block();
}
} | class DeadletterQueueSample {
String connectionString = System.getenv("AZURE_SERVICEBUS_NAMESPACE_CONNECTION_STRING");
String queueName = System.getenv("AZURE_SERVICEBUS_SAMPLE_QUEUE_NAME");
/**
* Main method to show how to dead letter within an Azure Service Bus Queue.
*
* @param args Unused arguments to the program.
* @throws InterruptedException If the program is unable to sleep while waiting for the receive to complete.
*/
public static void main(String[] args) throws InterruptedException {
DeadletterQueueSample sample = new DeadletterQueueSample();
sample.run();
}
/**
* Run method to invoke this demo on how to dead letter within an Azure Service Bus Queue.
*
* @throws InterruptedException If the program is unable to sleep while waiting for the receive to complete.
*/
@Test
/**
* Send {@link ServiceBusMessage messages} to an Azure Service Bus Queue.
*
* @Param senderAsyncClient Service Bus Sender Client
* @Param maxMessages Maximum Number Of Messages
*/
void sendMessagesAsync(ServiceBusSenderClient senderClient, int maxMessages) {
List<ServiceBusMessage> messageList = new ArrayList<ServiceBusMessage>();
messageList.add(createServiceBusMessage("{\"name\" : \"Einstein\", \"firstName\" : \"Albert\"}"));
messageList.add(createServiceBusMessage("{\"name\" : \"Heisenberg\", \"firstName\" : \"Werner\"}"));
messageList.add(createServiceBusMessage("{\"name\" : \"Curie\", \"firstName\" : \"Marie\"}"));
messageList.add(createServiceBusMessage("{\"name\" : \"Hawking\", \"firstName\" : \"Steven\"}"));
messageList.add(createServiceBusMessage("{\"name\" : \"Newton\", \"firstName\" : \"Isaac\"}"));
messageList.add(createServiceBusMessage("{\"name\" : \"Bohr\", \"firstName\" : \"Niels\"}"));
messageList.add(createServiceBusMessage("{\"name\" : \"Faraday\", \"firstName\" : \"Michael\"}"));
messageList.add(createServiceBusMessage("{\"name\" : \"Galilei\", \"firstName\" : \"Galileo\"}"));
messageList.add(createServiceBusMessage("{\"name\" : \"Kepler\", \"firstName\" : \"Johannes\"}"));
messageList.add(createServiceBusMessage("{\"name\" : \"Kopernikus\", \"firstName\" : \"Nikolaus\"}"));
for (int i = 0; i < Math.min(messageList.size(), maxMessages); i++) {
final String messageId = Integer.toString(i);
ServiceBusMessage message = messageList.get(i);
message.setContentType("application/json");
message.setSubject(i % 2 == 0 ? "Scientist" : "Physicist");
message.setMessageId(messageId);
message.setTimeToLive(Duration.ofMinutes(2));
System.out.printf("\tMessage sending: Id = %s\n", message.getMessageId());
senderClient.sendMessage(message);
System.out.printf("\tMessage acknowledged: Id = %s\n", message.getMessageId());
}
}
/**
* Receive {@link ServiceBusMessage messages} and return {@link ServiceBusMessage messages} back to the queue.
* When the time to life of the {@link ServiceBusMessage messages} expires,
* the {@link ServiceBusMessage messages} will be dumped as dead letters into the dead letter queue.
* We can receive these {@link ServiceBusMessage messages} from the dead letter queue.
*
* @Param connectionString Service Bus Connection String
* @Param queueName Queue Name
* @throws InterruptedException If the program is unable to sleep while waiting for the receive to complete.
*/
void deadLetterByExceedingMaxDelivery(String connectionString, String queueName) throws InterruptedException {
ServiceBusReceiverAsyncClient receiverAsyncClient
= new ServiceBusClientBuilder()
.connectionString(connectionString)
.receiver()
.queueName(queueName)
.receiveMode(ServiceBusReceiveMode.PEEK_LOCK)
.buildAsyncClient();
receiverAsyncClient.receiveMessages().subscribe(receiveMessage -> {
System.out.printf("Picked up message; DeliveryCount %d\n", receiveMessage.getDeliveryCount());
receiverAsyncClient.abandon(receiveMessage);
});
Thread.sleep(10000);
receiverAsyncClient.close();
Thread.sleep(120000);
ServiceBusReceiverAsyncClient deadletterReceiverAsyncClient
= new ServiceBusClientBuilder()
.connectionString(connectionString)
.receiver()
.queueName(queueName.concat("/$deadletterqueue"))
.receiveMode(ServiceBusReceiveMode.PEEK_LOCK)
.buildAsyncClient();
deadletterReceiverAsyncClient.receiveMessages().subscribe(receiveMessage -> {
System.out.printf("\nDeadletter message:\n");
receiveMessage.getApplicationProperties().keySet().forEach(key -> System.out.printf("\t%s=%s\n", key, receiveMessage.getApplicationProperties().get(key)));
deadletterReceiverAsyncClient.complete(receiveMessage);
});
Thread.sleep(10000);
deadletterReceiverAsyncClient.close();
}
/**
* Receive {@link ServiceBusMessage messages} and transfer to the dead letter queue as a dead letter.
*
* @Param connectionString Service Bus Connection String
* @Param queueName Queue Name
*/
void receiveAndDeadletterMessagesAsync(String connectionString, String queueName) {
ServiceBusReceiverAsyncClient receiverAsyncClient
= new ServiceBusClientBuilder()
.connectionString(connectionString)
.receiver()
.receiveMode(ServiceBusReceiveMode.PEEK_LOCK)
.queueName(queueName)
.buildAsyncClient();
receiverAsyncClient.receiveMessages().subscribe(receiveMessage -> {
if (receiveMessage.getSubject() != null
&& receiveMessage.getContentType() != null
&& receiveMessage.getSubject().contentEquals("Scientist")
&& receiveMessage.getContentType().contentEquals("application/json")) {
byte[] body = receiveMessage.getBody().toBytes();
JSONObject jsonObject = null;
try {
jsonObject = JSONObjectUtils.parse(new String(body, UTF_8));
} catch (ParseException e) {
e.printStackTrace();
}
System.out.printf(
"\n\t\t\t\tMessage received: \n\t\t\t\t\t\tMessageId = %s, \n\t\t\t\t\t\tSequenceNumber = %s, \n\t\t\t\t\t\tEnqueuedTimeUtc = %s,"
+ "\n\t\t\t\t\t\tExpiresAtUtc = %s, \n\t\t\t\t\t\tContentType = \"%s\", \n\t\t\t\t\t\tContent: [ firstName = %s, name = %s ]\n",
receiveMessage.getMessageId(),
receiveMessage.getSequenceNumber(),
receiveMessage.getEnqueuedTime(),
receiveMessage.getExpiresAt(),
receiveMessage.getContentType(),
jsonObject != null ? jsonObject.get("firstName") : "",
jsonObject != null ? jsonObject.get("name") : "");
} else {
receiverAsyncClient.deadLetter(receiveMessage);
}
receiverAsyncClient.complete(receiveMessage);
});
}
/**
* Receive dead letter {@link ServiceBusMessage messages} and resend its.
*
* @Param connectionString Service Bus Connection String
* @Param queueName Queue Name
* @Param resubmitSender Service Bus Send Client
*/
void pickUpAndFixDeadletters(String connectionString, String queueName, ServiceBusSenderClient resubmitSender) {
ServiceBusReceiverAsyncClient receiverAsyncClient
= new ServiceBusClientBuilder()
.connectionString(connectionString)
.receiver()
.receiveMode(ServiceBusReceiveMode.PEEK_LOCK)
.queueName(queueName.concat("/$deadletterqueue"))
.buildAsyncClient();
receiverAsyncClient.receiveMessages().subscribe(receiveMessage -> {
if (receiveMessage.getSubject() != null && receiveMessage.getSubject().contentEquals("Physicist")) {
ServiceBusMessage resubmitMessage = new ServiceBusMessage(receiveMessage.getBody());
System.out.printf(
"\n\t\tFixing: \n\t\t\tMessageId = %s, \n\t\t\tSequenceNumber = %s, \n\t\t\tLabel = %s\n",
receiveMessage.getMessageId(),
receiveMessage.getSequenceNumber(),
receiveMessage.getSubject());
resubmitMessage.setMessageId(receiveMessage.getMessageId());
resubmitMessage.setSubject("Scientist");
resubmitMessage.setContentType(receiveMessage.getContentType());
resubmitMessage.setTimeToLive(Duration.ofMinutes(2));
resubmitSender.sendMessage(resubmitMessage);
}
receiverAsyncClient.complete(receiveMessage);
});
}
/**
* Create a {@link ServiceBusMessage} for add to a {@link ServiceBusMessageBatch}.
*/
static ServiceBusMessage createServiceBusMessage(String label) {
ServiceBusMessage message = new ServiceBusMessage(label.getBytes(UTF_8));
return message;
}
} | class DeadletterQueueSample {
private final List<Person> personList = Arrays.asList(
new Person("Einstein", "Albert"),
new Person("Heisenberg", "Werner"),
new Person("Curie", "Marie"),
new Person("Hawking", "Steven"),
new Person("Newton", "Isaac"),
new Person("Bohr", "Niels"),
new Person("Faraday", "Michael"),
new Person("Galilei", "Galileo"),
new Person("Kepler", "Johannes"),
new Person("Kopernikus", "Nikolaus")
);
/**
* Main method to show how to dead letter within an Azure Service Bus Queue.
*
* @param args Unused arguments to the program.
*/
public static void main(String[] args) {
final DeadletterQueueSample sample = new DeadletterQueueSample();
sample.run();
}
/**
* Run method to invoke this demo on how to dead letter within an Azure Service Bus Queue.
*/
@Test
/**
* Sends {@link ServiceBusMessage messages} to an Azure Service Bus Queue.
*
* @param sender Sender client.
* @param maxMessages Maximum number of messages to send.
*/
private void sendMessages(ServiceBusSenderClient sender, int maxMessages) {
final int numberOfMessages = Math.min(personList.size(), maxMessages);
final List<ServiceBusMessage> serviceBusMessages = IntStream.range(0, numberOfMessages)
.mapToObj(index -> {
final Person person = personList.get(index);
return new ServiceBusMessage(person.toJson())
.setContentType("application/json")
.setSubject(index % 2 == 0 ? "Scientist" : "Physicist")
.setMessageId(Integer.toString(index))
.setTimeToLive(Duration.ofMinutes(2));
}).collect(Collectors.toList());
sender.sendMessages(serviceBusMessages);
}
/**
* <strong>Scenario 1: Part 1</strong>
*
* <p>
* Receive {@link ServiceBusMessage messages} and return the {@link ServiceBusMessage messages} back to the queue.
* When the max number of deliveries for each {@link ServiceBusMessage message} expires, then it is moved into the
* dead letter queue.
* </p>
*
* @param builder Service Bus client builder.
* @param queueName Name of the queue to receive from.
*
* @return A Mono that completes when all messages in queue have been processed.(because a message has not been
* received in the last 30 seconds).
*/
private Mono<Void> deadLetterByExceedingMaxDelivery(ServiceBusClientBuilder builder, String queueName) {
return Mono.using(() -> {
return builder.receiver()
.queueName(queueName)
.receiveMode(ServiceBusReceiveMode.PEEK_LOCK)
.buildAsyncClient();
}, receiver -> {
return receiver.receiveMessages()
.timeout(Duration.ofSeconds(30))
.flatMap(message -> {
System.out.printf("Received message. Sequence
message.getDeliveryCount());
return receiver.abandon(message);
})
.onErrorResume(TimeoutException.class, exception -> {
System.out.println("No messages received after 30 seconds. Queue is empty.");
return Mono.empty();
})
.then();
}, receiver -> {
receiver.close();
});
}
/**
* <strong>Scenario 1: Part 2</strong>
*
* <p>
* This method continues to receive messages from the dead letter queue, then completes them.
* </p>
*
* @param builder Service Bus client builder.
* @param queueName Name of the queue to receive from.
*
* @return A Mono that completes when all messages in queue have been processed (because a message has not been
* received in the last 30 seconds).
*/
private Mono<Void> receiveAndCompleteDeadLetterQueueMessages(ServiceBusClientBuilder builder, String queueName) {
return Mono.using(() -> {
return builder.receiver()
.queueName(queueName)
.subQueue(SubQueue.DEAD_LETTER_QUEUE)
.buildAsyncClient();
}, deadLetterQueueReceiver -> {
return deadLetterQueueReceiver.receiveMessages()
.timeout(Duration.ofSeconds(30))
.flatMap(message -> {
System.out.printf("Received message from dead-letter queue. Sequence
message.getSequenceNumber(), message.getDeliveryCount());
System.out.printf("Dead-Letter Reason: %s. Description: %s. Source: %s%n",
message.getDeadLetterReason(), message.getDeadLetterErrorDescription(),
message.getDeadLetterSource());
System.out.println("Application properties:");
message.getApplicationProperties().forEach((key, value) ->
System.out.printf("\t%s=%s%n", key, value));
return deadLetterQueueReceiver.complete(message);
})
.onErrorResume(TimeoutException.class, exception -> {
System.out.println("No messages received after 30 seconds. Dead-letter queue is empty.");
return Mono.empty();
})
.then();
}, deadLetterQueueReceiver -> {
deadLetterQueueReceiver.close();
});
}
/**
* <strong>Scenario 2: Part 1</strong>
*
* <p>
* Receives {@link ServiceBusMessage messages} and dead letters them if it has a subject of "Scientist" and content
* type of "application/json". This is to simulate that the message may be malformed or didn't contain the right
* content, so we dead letter it so other receivers don't process this message.
* </p>
*
* @param builder Service Bus client builder.
* @param queueName Name of queue to receive messages from.
*
* @return A Mono that completes when all the messages in the queue have been processed (because a message has not
* been received in the last 30 seconds).
*/
private Mono<Void> receiveAndDeadletterMessages(ServiceBusClientBuilder builder, String queueName) {
return Mono.using(
() -> {
return builder.receiver()
.queueName(queueName)
.receiveMode(ServiceBusReceiveMode.PEEK_LOCK)
.buildAsyncClient();
},
receiver -> {
return receiver.receiveMessages()
.timeout(Duration.ofSeconds(30))
.flatMap(message -> {
final String subject = message.getSubject();
final String contentType = message.getContentType();
final Person person;
try {
person = Person.fromJson(message.getBody().toString());
} catch (RuntimeException e) {
return Mono.error(new RuntimeException("Could not deserialize message: "
+ message.getSequenceNumber(), e));
}
System.out.printf("Received message. SequenceNumber = %s. EnqueuedTimeUtc = %s. "
+ "ExpiresAtUtc = %s. ContentType = %s. Content: [ %s ]%n",
message.getSequenceNumber(), message.getEnqueuedTime(), message.getExpiresAt(),
message.getContentType(), person);
if ("Scientist".equals(subject) && "application/json".equals(contentType)) {
return receiver.complete(message);
} else {
return receiver.deadLetter(message);
}
})
.onErrorResume(TimeoutException.class, exception -> {
System.out.println("No messages received after 30 seconds. Queue is empty.");
return Mono.empty();
})
.then();
},
receiver -> {
receiver.close();
});
}
/**
* <strong>Scenario 2: Part 2</strong>
*
* <p>
* Receives {@link ServiceBusMessage messages} from the dead letter queue, it fixes up the message then resends the
* fixed message to the queue again. This simulates messages that may have errors in them, were dead-lettered,
* and reprocessed in the dead-letter queue so the data is correct again.
* </p>
*
* @param builder Service Bus client builder.
* @param queueName Name of queue to receive messages from.
* @param resubmitSender Service Bus sender client. When messages are fixed, they are published via this sender.
*
* @return A Mono that completes when all the messages in the queue have been processed (because a message has not
* been received in the last 30 seconds).
*/
private Mono<Void> receiveAndFixDeadLetterQueueMessages(ServiceBusClientBuilder builder, String queueName,
ServiceBusSenderClient resubmitSender) {
return Mono.using(() -> {
return builder.receiver()
.queueName(queueName)
.subQueue(SubQueue.DEAD_LETTER_QUEUE)
.buildAsyncClient();
}, deadLetterQueueReceiver -> {
return deadLetterQueueReceiver.receiveMessages()
.timeout(Duration.ofSeconds(30))
.flatMap(message -> {
if ("Physicist".equals(message.getSubject())) {
System.out.printf("Fixing DLQ message. MessageId = %s. SequenceNumber = %s. Subject = %s%n",
message.getMessageId(), message.getSequenceNumber(), message.getSubject());
ServiceBusMessage resubmitMessage = new ServiceBusMessage(message)
.setSubject("Scientist");
resubmitSender.sendMessage(resubmitMessage);
} else {
System.out.printf("Message resubmission is not required. MessageId = %s. SequenceNumber = %s. "
+ "Subject = %s%n",
message.getMessageId(), message.getSequenceNumber(), message.getSubject());
}
return deadLetterQueueReceiver.complete(message);
})
.onErrorResume(TimeoutException.class, exception -> {
System.out.println("No messages received after 30 seconds. Dead-letter queue is empty.");
return Mono.empty();
})
.then();
}, deadLetterQueueReceiver -> {
deadLetterQueueReceiver.close();
});
}
private static final class Person {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private final String lastName;
private final String firstName;
Person(@JsonProperty String lastName, @JsonProperty String firstName) {
this.lastName = lastName;
this.firstName = firstName;
}
String getLastName() {
return lastName;
}
String getFirstName() {
return firstName;
}
/**
* Serializes an item into its JSON string equivalent.
*
* @return The JSON representation.
*
* @throws RuntimeException if the person could not be serialized.
*/
String toJson() {
try {
return OBJECT_MAPPER.writeValueAsString(this);
} catch (JsonProcessingException e) {
throw new RuntimeException("Could not serialize object.", e);
}
}
/**
* Deserializes a JSON string into a Person.
*
* @return The corresponding person.
*
* @throws RuntimeException if the JSON string could not be deserialized.
*/
private static Person fromJson(String json) {
try {
return OBJECT_MAPPER.readValue(json, Person.class);
} catch (JsonProcessingException e) {
throw new RuntimeException("Could not deserialize object.", e);
}
}
}
} |
Thanks @snuyanzin, I'm trying to support `RESPECT NULLS | IGNORE NULLS` syntax. | Stream<TestSpec> getTestCaseSpecs() {
return Stream.of(
TestSpec.forFunction(BuiltInFunctionDefinitions.ARRAY_AGG)
.withDescription("ARRAY changelog stream aggregation")
.withSource(
ROW(STRING(), INT()),
Arrays.asList(
Row.ofKind(INSERT, "A", 1),
Row.ofKind(INSERT, "A", 2),
Row.ofKind(INSERT, "B", 2),
Row.ofKind(INSERT, "B", 2),
Row.ofKind(INSERT, "B", 3),
Row.ofKind(INSERT, "C", 3),
Row.ofKind(INSERT, "C", null),
Row.ofKind(INSERT, "D", null),
Row.ofKind(INSERT, "E", 4),
Row.ofKind(INSERT, "E", 5),
Row.ofKind(DELETE, "E", 5),
Row.ofKind(UPDATE_BEFORE, "E", 4),
Row.ofKind(UPDATE_AFTER, "E", 6)))
.testResult(
source ->
"SELECT f0, array_agg(f1) FROM " + source + " GROUP BY f0",
TableApiAggSpec.groupBySelect(
Collections.singletonList($("f0")),
$("f0"),
$("f1").arrayAgg()),
ROW(STRING(), ARRAY(INT())),
ROW(STRING(), ARRAY(INT())),
Arrays.asList(
Row.of("A", new Integer[] {1, 2}),
Row.of("B", new Integer[] {2, 2, 3}),
Row.of("C", new Integer[] {3}),
Row.of("D", null),
Row.of("E", new Integer[] {6})))
.testResult(
source ->
"SELECT f0, array_agg(DISTINCT f1) FROM "
+ source
+ " GROUP BY f0",
TableApiAggSpec.groupBySelect(
Collections.singletonList($("f0")),
$("f0"),
$("f1").arrayAgg().distinct()),
ROW(STRING(), ARRAY(INT())),
ROW(STRING(), ARRAY(INT())),
Arrays.asList(
Row.of("A", new Integer[] {1, 2}),
Row.of("B", new Integer[] {2, 3}),
Row.of("C", new Integer[] {3}),
Row.of("D", null),
Row.of("E", new Integer[] {6}))));
} | Row.of("D", null), | Stream<TestSpec> getTestCaseSpecs() {
return Stream.of(
TestSpec.forFunction(BuiltInFunctionDefinitions.ARRAY_AGG)
.withDescription("ARRAY changelog stream aggregation")
.withSource(
ROW(STRING(), INT()),
Arrays.asList(
Row.ofKind(INSERT, "A", 1),
Row.ofKind(INSERT, "A", 2),
Row.ofKind(INSERT, "B", 2),
Row.ofKind(INSERT, "B", 2),
Row.ofKind(INSERT, "B", 3),
Row.ofKind(INSERT, "C", 3),
Row.ofKind(INSERT, "C", null),
Row.ofKind(DELETE, "C", null),
Row.ofKind(INSERT, "D", null),
Row.ofKind(INSERT, "E", 4),
Row.ofKind(INSERT, "E", 5),
Row.ofKind(DELETE, "E", 5),
Row.ofKind(UPDATE_BEFORE, "E", 4),
Row.ofKind(UPDATE_AFTER, "E", 6)))
.testResult(
source ->
"SELECT f0, array_agg(f1) FROM " + source + " GROUP BY f0",
TableApiAggSpec.groupBySelect(
Collections.singletonList($("f0")),
$("f0"),
$("f1").arrayAgg()),
ROW(STRING(), ARRAY(INT())),
ROW(STRING(), ARRAY(INT())),
Arrays.asList(
Row.of("A", new Integer[] {1, 2}),
Row.of("B", new Integer[] {2, 2, 3}),
Row.of("C", new Integer[] {3}),
Row.of("D", new Integer[] {null}),
Row.of("E", new Integer[] {6})))
.testSqlResult(
source ->
"SELECT f0, array_agg(DISTINCT f1 IGNORE NULLS) FROM "
+ source
+ " GROUP BY f0",
ROW(STRING(), ARRAY(INT())),
Arrays.asList(
Row.of("A", new Integer[] {1, 2}),
Row.of("B", new Integer[] {2, 3}),
Row.of("C", new Integer[] {3}),
Row.of("D", null),
Row.of("E", new Integer[] {6}))));
} | class ArrayAggFunctionITCase extends BuiltInAggregateFunctionTestBase {
@Override
} | class ArrayAggFunctionITCase extends BuiltInAggregateFunctionTestBase {
@Override
} |
The main part of the fix is `GeneratedClassGizmoAdaptor.isApplicationClass(name)` -> `GeneratedClassGizmoAdaptor.isApplicationClass(className)`! | public boolean test(String name) {
int idx = name.lastIndexOf(SUFFIX);
String className = name.substring(0, idx).replace("/", ".");
if (className.contains(ValueResolverGenerator.NESTED_SEPARATOR)) {
className = className.replace(ValueResolverGenerator.NESTED_SEPARATOR, "$");
}
className = additionalClassNameSanitizer.apply(className);
return GeneratedClassGizmoAdaptor.isApplicationClass(className);
} | return GeneratedClassGizmoAdaptor.isApplicationClass(className); | public boolean test(String name) {
int idx = name.lastIndexOf(SUFFIX);
String className = name.substring(0, idx).replace("/", ".");
if (className.contains(ValueResolverGenerator.NESTED_SEPARATOR)) {
className = className.replace(ValueResolverGenerator.NESTED_SEPARATOR, "$");
}
className = additionalClassNameSanitizer.apply(className);
return GeneratedClassGizmoAdaptor.isApplicationClass(className);
} | class AppClassPredicate implements Predicate<String> {
private final Function<String, String> additionalClassNameSanitizer;
public AppClassPredicate() {
this(Function.identity());
}
public AppClassPredicate(Function<String, String> additionalClassNameSanitizer) {
this.additionalClassNameSanitizer = additionalClassNameSanitizer;
}
@Override
} | class AppClassPredicate implements Predicate<String> {
private final Function<String, String> additionalClassNameSanitizer;
public AppClassPredicate() {
this(Function.identity());
}
public AppClassPredicate(Function<String, String> additionalClassNameSanitizer) {
this.additionalClassNameSanitizer = additionalClassNameSanitizer;
}
@Override
} |
We could make this a normal GET that just uses the values from the persisted settings. | private HttpResponse handlePOST(Path path, HttpRequest request) {
if (path.matches("/application/v4/tenant/{tenant}")) return createTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/key")) return addDeveloperKey(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/secret-store/{name}/validate")) return validateSecretStore(path.get("tenant"), path.get("name"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return createApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying/platform")) return deployPlatform(path.get("tenant"), path.get("application"), "default", false, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying/pin")) return deployPlatform(path.get("tenant"), path.get("application"), "default", true, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying/application")) return deployApplication(path.get("tenant"), path.get("application"), "default", request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/key")) return addDeployKey(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/submit")) return submit(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}")) return createInstance(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploy/{jobtype}")) return jobDeploy(appIdFromPath(path), jobTypeFromPath(path), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying/platform")) return deployPlatform(path.get("tenant"), path.get("application"), path.get("instance"), false, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying/pin")) return deployPlatform(path.get("tenant"), path.get("application"), path.get("instance"), true, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying/application")) return deployApplication(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/submit")) return submit(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}")) return trigger(appIdFromPath(path), jobTypeFromPath(path), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/pause")) return pause(appIdFromPath(path), jobTypeFromPath(path));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/deploy")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/reindex")) return reindex(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/reindexing")) return enableReindexing(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/restart")) return restart(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/suspend")) return suspend(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), true);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/deploy")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/restart")) return restart(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
return ErrorResponse.notFoundError("Nothing at " + path);
} | if (path.matches("/application/v4/tenant/{tenant}/secret-store/{name}/validate")) return validateSecretStore(path.get("tenant"), path.get("name"), request); | private HttpResponse handlePOST(Path path, HttpRequest request) {
if (path.matches("/application/v4/tenant/{tenant}")) return createTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/key")) return addDeveloperKey(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return createApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying/platform")) return deployPlatform(path.get("tenant"), path.get("application"), "default", false, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying/pin")) return deployPlatform(path.get("tenant"), path.get("application"), "default", true, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying/application")) return deployApplication(path.get("tenant"), path.get("application"), "default", request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/key")) return addDeployKey(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/submit")) return submit(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}")) return createInstance(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploy/{jobtype}")) return jobDeploy(appIdFromPath(path), jobTypeFromPath(path), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying/platform")) return deployPlatform(path.get("tenant"), path.get("application"), path.get("instance"), false, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying/pin")) return deployPlatform(path.get("tenant"), path.get("application"), path.get("instance"), true, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying/application")) return deployApplication(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/submit")) return submit(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}")) return trigger(appIdFromPath(path), jobTypeFromPath(path), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/pause")) return pause(appIdFromPath(path), jobTypeFromPath(path));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/deploy")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/reindex")) return reindex(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/reindexing")) return enableReindexing(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/restart")) return restart(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/suspend")) return suspend(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), true);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/deploy")) return deploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/restart")) return restart(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
return ErrorResponse.notFoundError("Nothing at " + path);
} | class ApplicationApiHandler extends LoggingRequestHandler {
private static final ObjectMapper jsonMapper = new ObjectMapper();
private static final String OPTIONAL_PREFIX = "/api";
private final Controller controller;
private final AccessControlRequests accessControlRequests;
private final TestConfigSerializer testConfigSerializer;
@Inject
public ApplicationApiHandler(LoggingRequestHandler.Context parentCtx,
Controller controller,
AccessControlRequests accessControlRequests) {
super(parentCtx);
this.controller = controller;
this.accessControlRequests = accessControlRequests;
this.testConfigSerializer = new TestConfigSerializer(controller.system());
}
@Override
public Duration getTimeout() {
return Duration.ofMinutes(20);
}
@Override
public HttpResponse handle(HttpRequest request) {
try {
Path path = new Path(request.getUri(), OPTIONAL_PREFIX);
switch (request.getMethod()) {
case GET: return handleGET(path, request);
case PUT: return handlePUT(path, request);
case POST: return handlePOST(path, request);
case PATCH: return handlePATCH(path, request);
case DELETE: return handleDELETE(path, request);
case OPTIONS: return handleOPTIONS();
default: return ErrorResponse.methodNotAllowed("Method '" + request.getMethod() + "' is not supported");
}
}
catch (ForbiddenException e) {
return ErrorResponse.forbidden(Exceptions.toMessageString(e));
}
catch (NotAuthorizedException e) {
return ErrorResponse.unauthorized(Exceptions.toMessageString(e));
}
catch (NotExistsException e) {
return ErrorResponse.notFoundError(Exceptions.toMessageString(e));
}
catch (IllegalArgumentException e) {
return ErrorResponse.badRequest(Exceptions.toMessageString(e));
}
catch (ConfigServerException e) {
switch (e.getErrorCode()) {
case NOT_FOUND:
return new ErrorResponse(NOT_FOUND, e.getErrorCode().name(), Exceptions.toMessageString(e));
case ACTIVATION_CONFLICT:
return new ErrorResponse(CONFLICT, e.getErrorCode().name(), Exceptions.toMessageString(e));
case INTERNAL_SERVER_ERROR:
return new ErrorResponse(INTERNAL_SERVER_ERROR, e.getErrorCode().name(), Exceptions.toMessageString(e));
default:
return new ErrorResponse(BAD_REQUEST, e.getErrorCode().name(), Exceptions.toMessageString(e));
}
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Unexpected error handling '" + request.getUri() + "'", e);
return ErrorResponse.internalServerError(Exceptions.toMessageString(e));
}
}
private HttpResponse handleGET(Path path, HttpRequest request) {
if (path.matches("/application/v4/")) return root(request);
if (path.matches("/application/v4/tenant")) return tenants(request);
if (path.matches("/application/v4/tenant/{tenant}")) return tenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/info")) return tenantInfo(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application")) return applications(path.get("tenant"), Optional.empty(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return application(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/compile-version")) return compileVersion(path.get("tenant"), path.get("application"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deployment")) return JobControllerApiHandlerHelper.overviewResponse(controller, TenantAndApplicationId.from(path.get("tenant"), path.get("application")), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/package")) return applicationPackage(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return deploying(path.get("tenant"), path.get("application"), "default", request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying/pin")) return deploying(path.get("tenant"), path.get("application"), "default", request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/metering")) return metering(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance")) return applications(path.get("tenant"), Optional.of(path.get("application")), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}")) return instance(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying")) return deploying(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying/pin")) return deploying(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job")) return JobControllerApiHandlerHelper.jobTypeResponse(controller, appIdFromPath(path), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}")) return JobControllerApiHandlerHelper.runResponse(controller.jobController().runs(appIdFromPath(path), jobTypeFromPath(path)), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/package")) return devApplicationPackage(appIdFromPath(path), jobTypeFromPath(path));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/test-config")) return testConfig(appIdFromPath(path), jobTypeFromPath(path));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/run/{number}")) return JobControllerApiHandlerHelper.runDetailsResponse(controller.jobController(), runIdFromPath(path), request.getProperty("after"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}")) return deployment(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/reindexing")) return getReindexing(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/suspended")) return suspended(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/service")) return services(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/service/{service}/{*}")) return service(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), path.get("service"), path.getRest(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/nodes")) return nodes(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/clusters")) return clusters(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/content/{*}")) return content(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), path.getRest(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/logs")) return logs(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request.propertyMap());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/metrics")) return metrics(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/global-rotation")) return rotationStatus(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), Optional.ofNullable(request.getProperty("endpointId")));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/global-rotation/override")) return getGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deployment(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deployment(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/suspended")) return suspended(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service")) return services(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service/{service}/{*}")) return service(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), path.get("service"), path.getRest(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/nodes")) return nodes(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/clusters")) return clusters(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/logs")) return logs(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request.propertyMap());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation")) return rotationStatus(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), Optional.ofNullable(request.getProperty("endpointId")));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override")) return getGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePUT(Path path, HttpRequest request) {
if (path.matches("/application/v4/tenant/{tenant}")) return updateTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/info")) return updateTenantInfo(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/secret-store/{name}")) return addSecretStore(path.get("tenant"), path.get("name"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/global-rotation/override")) return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), false, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override")) return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), false, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePATCH(Path path, HttpRequest request) {
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return patchApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}")) return patchApplication(path.get("tenant"), path.get("application"), request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleDELETE(Path path, HttpRequest request) {
if (path.matches("/application/v4/tenant/{tenant}")) return deleteTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/key")) return removeDeveloperKey(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return deleteApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deployment")) return removeAllProdDeployments(path.get("tenant"), path.get("application"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return cancelDeploy(path.get("tenant"), path.get("application"), "default", "all");
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying/{choice}")) return cancelDeploy(path.get("tenant"), path.get("application"), "default", path.get("choice"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/key")) return removeDeployKey(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}")) return deleteInstance(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying")) return cancelDeploy(path.get("tenant"), path.get("application"), path.get("instance"), "all");
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying/{choice}")) return cancelDeploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("choice"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}")) return JobControllerApiHandlerHelper.abortJobResponse(controller.jobController(), appIdFromPath(path), jobTypeFromPath(path));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/pause")) return resume(appIdFromPath(path), jobTypeFromPath(path));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}")) return deactivate(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/reindexing")) return disableReindexing(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/suspend")) return suspend(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), false);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/global-rotation/override")) return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), true, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deactivate(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override")) return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), true, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleOPTIONS() {
EmptyResponse response = new EmptyResponse();
response.headers().put("Allow", "GET,PUT,POST,PATCH,DELETE,OPTIONS");
return response;
}
private HttpResponse recursiveRoot(HttpRequest request) {
Slime slime = new Slime();
Cursor tenantArray = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
toSlime(tenantArray.addObject(), tenant, request);
return new SlimeJsonResponse(slime);
}
private HttpResponse root(HttpRequest request) {
return recurseOverTenants(request)
? recursiveRoot(request)
: new ResourceResponse(request, "tenant");
}
private HttpResponse tenants(HttpRequest request) {
Slime slime = new Slime();
Cursor response = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
tenantInTenantsListToSlime(tenant, request.getUri(), response.addObject());
return new SlimeJsonResponse(slime);
}
private HttpResponse tenant(String tenantName, HttpRequest request) {
return controller.tenants().get(TenantName.from(tenantName))
.map(tenant -> tenant(tenant, request))
.orElseGet(() -> ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist"));
}
private HttpResponse tenant(Tenant tenant, HttpRequest request) {
Slime slime = new Slime();
toSlime(slime.setObject(), tenant, request);
return new SlimeJsonResponse(slime);
}
private HttpResponse tenantInfo(String tenantName, HttpRequest request) {
return controller.tenants().get(TenantName.from(tenantName))
.filter(tenant -> tenant.type() == Tenant.Type.cloud)
.map(tenant -> tenantInfo(((CloudTenant)tenant).info(), request))
.orElseGet(() -> ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist or does not support this"));
}
private SlimeJsonResponse tenantInfo(TenantInfo info, HttpRequest request) {
Slime slime = new Slime();
Cursor infoCursor = slime.setObject();
if (!info.isEmpty()) {
infoCursor.setString("name", info.name());
infoCursor.setString("email", info.email());
infoCursor.setString("website", info.website());
infoCursor.setString("invoiceEmail", info.invoiceEmail());
infoCursor.setString("contactName", info.contactName());
infoCursor.setString("contactEmail", info.contactEmail());
toSlime(info.address(), infoCursor);
toSlime(info.billingContact(), infoCursor);
}
return new SlimeJsonResponse(slime);
}
private void toSlime(TenantInfoAddress address, Cursor parentCursor) {
if (address.isEmpty()) return;
Cursor addressCursor = parentCursor.setObject("address");
addressCursor.setString("addressLines", address.addressLines());
addressCursor.setString("postalCodeOrZip", address.postalCodeOrZip());
addressCursor.setString("city", address.city());
addressCursor.setString("stateRegionProvince", address.stateRegionProvince());
addressCursor.setString("country", address.country());
}
private void toSlime(TenantInfoBillingContact billingContact, Cursor parentCursor) {
if (billingContact.isEmpty()) return;
Cursor addressCursor = parentCursor.setObject("billingContact");
addressCursor.setString("name", billingContact.name());
addressCursor.setString("email", billingContact.email());
addressCursor.setString("phone", billingContact.phone());
toSlime(billingContact.address(), addressCursor);
}
private HttpResponse updateTenantInfo(String tenantName, HttpRequest request) {
return controller.tenants().get(TenantName.from(tenantName))
.filter(tenant -> tenant.type() == Tenant.Type.cloud)
.map(tenant -> updateTenantInfo(((CloudTenant)tenant), request))
.orElseGet(() -> ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist or does not support this"));
}
private String getString(Inspector field, String defaultVale) {
return field.valid() ? field.asString() : defaultVale;
}
private SlimeJsonResponse updateTenantInfo(CloudTenant tenant, HttpRequest request) {
TenantInfo oldInfo = tenant.info();
Inspector insp = toSlime(request.getData()).get();
TenantInfo mergedInfo = TenantInfo.EMPTY
.withName(getString(insp.field("name"), oldInfo.name()))
.withEmail(getString(insp.field("email"), oldInfo.email()))
.withWebsite(getString(insp.field("website"), oldInfo.email()))
.withInvoiceEmail(getString(insp.field("invoiceEmail"), oldInfo.invoiceEmail()))
.withContactName(getString(insp.field("contactName"), oldInfo.contactName()))
.withContactEmail(getString(insp.field("contactEmail"), oldInfo.contactName()))
.withAddress(updateTenantInfoAddress(insp.field("address"), oldInfo.address()))
.withBillingContact(updateTenantInfoBillingContact(insp.field("billingContact"), oldInfo.billingContact()));
controller.tenants().lockOrThrow(tenant.name(), LockedTenant.Cloud.class, lockedTenant -> {
lockedTenant = lockedTenant.withInfo(mergedInfo);
controller.tenants().store(lockedTenant);
});
return new MessageResponse("Tenant info updated");
}
private TenantInfoAddress updateTenantInfoAddress(Inspector insp, TenantInfoAddress oldAddress) {
if (!insp.valid()) return oldAddress;
return TenantInfoAddress.EMPTY
.withCountry(getString(insp.field("country"), oldAddress.country()))
.withStateRegionProvince(getString(insp.field("stateRegionProvince"), oldAddress.stateRegionProvince()))
.withCity(getString(insp.field("city"), oldAddress.city()))
.withPostalCodeOrZip(getString(insp.field("postalCodeOrZip"), oldAddress.postalCodeOrZip()))
.withAddressLines(getString(insp.field("addressLines"), oldAddress.addressLines()));
}
private TenantInfoBillingContact updateTenantInfoBillingContact(Inspector insp, TenantInfoBillingContact oldContact) {
if (!insp.valid()) return oldContact;
return TenantInfoBillingContact.EMPTY
.withName(getString(insp.field("name"), oldContact.name()))
.withEmail(getString(insp.field("email"), oldContact.email()))
.withPhone(getString(insp.field("phone"), oldContact.phone()))
.withAddress(updateTenantInfoAddress(insp.field("address"), oldContact.address()));
}
private HttpResponse applications(String tenantName, Optional<String> applicationName, HttpRequest request) {
TenantName tenant = TenantName.from(tenantName);
if (controller.tenants().get(tenantName).isEmpty())
return ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist");
Slime slime = new Slime();
Cursor applicationArray = slime.setArray();
for (com.yahoo.vespa.hosted.controller.Application application : controller.applications().asList(tenant)) {
if (applicationName.map(application.id().application().value()::equals).orElse(true)) {
Cursor applicationObject = applicationArray.addObject();
applicationObject.setString("tenant", application.id().tenant().value());
applicationObject.setString("application", application.id().application().value());
applicationObject.setString("url", withPath("/application/v4" +
"/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value(),
request.getUri()).toString());
Cursor instanceArray = applicationObject.setArray("instances");
for (InstanceName instance : showOnlyProductionInstances(request) ? application.productionInstances().keySet()
: application.instances().keySet()) {
Cursor instanceObject = instanceArray.addObject();
instanceObject.setString("instance", instance.value());
instanceObject.setString("url", withPath("/application/v4" +
"/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value() +
"/instance/" + instance.value(),
request.getUri()).toString());
}
}
}
return new SlimeJsonResponse(slime);
}
private HttpResponse devApplicationPackage(ApplicationId id, JobType type) {
if ( ! type.environment().isManuallyDeployed())
throw new IllegalArgumentException("Only manually deployed zones have dev packages");
ZoneId zone = type.zone(controller.system());
byte[] applicationPackage = controller.applications().applicationStore().getDev(id, zone);
return new ZipResponse(id.toFullString() + "." + zone.value() + ".zip", applicationPackage);
}
private HttpResponse applicationPackage(String tenantName, String applicationName, HttpRequest request) {
var tenantAndApplication = TenantAndApplicationId.from(tenantName, applicationName);
var applicationId = ApplicationId.from(tenantName, applicationName, InstanceName.defaultName().value());
long buildNumber;
var requestedBuild = Optional.ofNullable(request.getProperty("build")).map(build -> {
try {
return Long.parseLong(build);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid build number", e);
}
});
if (requestedBuild.isEmpty()) {
var application = controller.applications().requireApplication(tenantAndApplication);
var latestBuild = application.latestVersion().map(ApplicationVersion::buildNumber).orElse(OptionalLong.empty());
if (latestBuild.isEmpty()) {
throw new NotExistsException("No application package has been submitted for '" + tenantAndApplication + "'");
}
buildNumber = latestBuild.getAsLong();
} else {
buildNumber = requestedBuild.get();
}
var applicationPackage = controller.applications().applicationStore().find(tenantAndApplication.tenant(), tenantAndApplication.application(), buildNumber);
var filename = tenantAndApplication + "-build" + buildNumber + ".zip";
if (applicationPackage.isEmpty()) {
throw new NotExistsException("No application package found for '" +
tenantAndApplication +
"' with build number " + buildNumber);
}
return new ZipResponse(filename, applicationPackage.get());
}
private HttpResponse application(String tenantName, String applicationName, HttpRequest request) {
Slime slime = new Slime();
toSlime(slime.setObject(), getApplication(tenantName, applicationName), request);
return new SlimeJsonResponse(slime);
}
private HttpResponse compileVersion(String tenantName, String applicationName) {
Slime slime = new Slime();
slime.setObject().setString("compileVersion",
compileVersion(TenantAndApplicationId.from(tenantName, applicationName)).toFullString());
return new SlimeJsonResponse(slime);
}
private HttpResponse instance(String tenantName, String applicationName, String instanceName, HttpRequest request) {
Slime slime = new Slime();
toSlime(slime.setObject(), getInstance(tenantName, applicationName, instanceName),
controller.jobController().deploymentStatus(getApplication(tenantName, applicationName)), request);
return new SlimeJsonResponse(slime);
}
private HttpResponse addDeveloperKey(String tenantName, HttpRequest request) {
if (controller.tenants().require(TenantName.from(tenantName)).type() != Tenant.Type.cloud)
throw new IllegalArgumentException("Tenant '" + tenantName + "' is not a cloud tenant");
Principal user = request.getJDiscRequest().getUserPrincipal();
String pemDeveloperKey = toSlime(request.getData()).get().field("key").asString();
PublicKey developerKey = KeyUtils.fromPemEncodedPublicKey(pemDeveloperKey);
Slime root = new Slime();
controller.tenants().lockOrThrow(TenantName.from(tenantName), LockedTenant.Cloud.class, tenant -> {
tenant = tenant.withDeveloperKey(developerKey, user);
toSlime(root.setObject().setArray("keys"), tenant.get().developerKeys());
controller.tenants().store(tenant);
});
return new SlimeJsonResponse(root);
}
private HttpResponse validateSecretStore(String tenantName, String name, HttpRequest request) {
var tenant = TenantName.from(tenantName);
if (controller.tenants().require(tenant).type() != Tenant.Type.cloud)
return ErrorResponse.badRequest("Tenant '" + tenant + "' is not a cloud tenant");
var cloudTenant = (CloudTenant)controller.tenants().require(tenant);
var tenantSecretStore = cloudTenant.tenantSecretStores()
.stream()
.filter(secretStore -> secretStore.getName().equals(name))
.findFirst();
var deployment = getActiveDeployment(tenant);
if (deployment.isEmpty())
return ErrorResponse.badRequest("Tenant '" + tenantName + "' has no active deployments");
if (tenantSecretStore.isEmpty())
return ErrorResponse.notFoundError("No secret store '" + name + "' configured for tenant '" + tenantName + "'");
var response = controller.serviceRegistry().configServer().validateSecretStore(deployment.get(), tenantSecretStore.get());
return new MessageResponse(response);
}
private Optional<DeploymentId> getActiveDeployment(TenantName tenant) {
for (var application : controller.applications().asList(tenant)) {
var optionalInstance = application.instances().values()
.stream()
.filter(instance -> instance.deployments().keySet().size() > 0)
.findFirst();
if (optionalInstance.isPresent()) {
var instance = optionalInstance.get();
var applicationId = instance.id();
var zoneId = instance.deployments().keySet().stream().findFirst().orElseThrow();
return Optional.of(new DeploymentId(applicationId, zoneId));
}
}
return Optional.empty();
}
private HttpResponse removeDeveloperKey(String tenantName, HttpRequest request) {
if (controller.tenants().require(TenantName.from(tenantName)).type() != Tenant.Type.cloud)
throw new IllegalArgumentException("Tenant '" + tenantName + "' is not a cloud tenant");
String pemDeveloperKey = toSlime(request.getData()).get().field("key").asString();
PublicKey developerKey = KeyUtils.fromPemEncodedPublicKey(pemDeveloperKey);
Principal user = ((CloudTenant) controller.tenants().require(TenantName.from(tenantName))).developerKeys().get(developerKey);
Slime root = new Slime();
controller.tenants().lockOrThrow(TenantName.from(tenantName), LockedTenant.Cloud.class, tenant -> {
tenant = tenant.withoutDeveloperKey(developerKey);
toSlime(root.setObject().setArray("keys"), tenant.get().developerKeys());
controller.tenants().store(tenant);
});
return new SlimeJsonResponse(root);
}
private void toSlime(Cursor keysArray, Map<PublicKey, Principal> keys) {
keys.forEach((key, principal) -> {
Cursor keyObject = keysArray.addObject();
keyObject.setString("key", KeyUtils.toPem(key));
keyObject.setString("user", principal.getName());
});
}
private HttpResponse addDeployKey(String tenantName, String applicationName, HttpRequest request) {
String pemDeployKey = toSlime(request.getData()).get().field("key").asString();
PublicKey deployKey = KeyUtils.fromPemEncodedPublicKey(pemDeployKey);
Slime root = new Slime();
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(tenantName, applicationName), application -> {
application = application.withDeployKey(deployKey);
application.get().deployKeys().stream()
.map(KeyUtils::toPem)
.forEach(root.setObject().setArray("keys")::addString);
controller.applications().store(application);
});
return new SlimeJsonResponse(root);
}
private HttpResponse removeDeployKey(String tenantName, String applicationName, HttpRequest request) {
String pemDeployKey = toSlime(request.getData()).get().field("key").asString();
PublicKey deployKey = KeyUtils.fromPemEncodedPublicKey(pemDeployKey);
Slime root = new Slime();
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(tenantName, applicationName), application -> {
application = application.withoutDeployKey(deployKey);
application.get().deployKeys().stream()
.map(KeyUtils::toPem)
.forEach(root.setObject().setArray("keys")::addString);
controller.applications().store(application);
});
return new SlimeJsonResponse(root);
}
private HttpResponse addSecretStore(String tenantName, String name, HttpRequest request) {
if (controller.tenants().require(TenantName.from(tenantName)).type() != Tenant.Type.cloud)
throw new IllegalArgumentException("Tenant '" + tenantName + "' is not a cloud tenant");
var data = toSlime(request.getData()).get();
var awsId = mandatory("awsId", data).asString();
var externalId = mandatory("externalId", data).asString();
var role = mandatory("role", data).asString();
var tenant = (CloudTenant) controller.tenants().require(TenantName.from(tenantName));
var tenantSecretStore = new TenantSecretStore(name, awsId, role);
if (!tenantSecretStore.isValid()) {
return ErrorResponse.badRequest(String.format("Secret store " + tenantSecretStore + " is invalid"));
}
if (tenant.tenantSecretStores().contains(tenantSecretStore)) {
return ErrorResponse.badRequest(String.format("Secret store " + tenantSecretStore + " is already configured"));
}
controller.serviceRegistry().roleService().createTenantPolicy(TenantName.from(tenantName), name, awsId, role);
controller.serviceRegistry().tenantSecretService().addSecretStore(tenant.name(), tenantSecretStore, externalId);
controller.tenants().lockOrThrow(tenant.name(), LockedTenant.Cloud.class, lockedTenant -> {
lockedTenant = lockedTenant.withSecretStore(tenantSecretStore);
controller.tenants().store(lockedTenant);
});
return new MessageResponse("Configured secret store: " + tenantSecretStore);
}
private HttpResponse patchApplication(String tenantName, String applicationName, HttpRequest request) {
Inspector requestObject = toSlime(request.getData()).get();
StringJoiner messageBuilder = new StringJoiner("\n").setEmptyValue("No applicable changes.");
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(tenantName, applicationName), application -> {
Inspector majorVersionField = requestObject.field("majorVersion");
if (majorVersionField.valid()) {
Integer majorVersion = majorVersionField.asLong() == 0 ? null : (int) majorVersionField.asLong();
application = application.withMajorVersion(majorVersion);
messageBuilder.add("Set major version to " + (majorVersion == null ? "empty" : majorVersion));
}
Inspector pemDeployKeyField = requestObject.field("pemDeployKey");
if (pemDeployKeyField.valid()) {
String pemDeployKey = pemDeployKeyField.asString();
PublicKey deployKey = KeyUtils.fromPemEncodedPublicKey(pemDeployKey);
application = application.withDeployKey(deployKey);
messageBuilder.add("Added deploy key " + pemDeployKey);
}
controller.applications().store(application);
});
return new MessageResponse(messageBuilder.toString());
}
private com.yahoo.vespa.hosted.controller.Application getApplication(String tenantName, String applicationName) {
TenantAndApplicationId applicationId = TenantAndApplicationId.from(tenantName, applicationName);
return controller.applications().getApplication(applicationId)
.orElseThrow(() -> new NotExistsException(applicationId + " not found"));
}
private Instance getInstance(String tenantName, String applicationName, String instanceName) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
return controller.applications().getInstance(applicationId)
.orElseThrow(() -> new NotExistsException(applicationId + " not found"));
}
private HttpResponse nodes(String tenantName, String applicationName, String instanceName, String environment, String region) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(zone, id);
Slime slime = new Slime();
Cursor nodesArray = slime.setObject().setArray("nodes");
for (Node node : nodes) {
Cursor nodeObject = nodesArray.addObject();
nodeObject.setString("hostname", node.hostname().value());
nodeObject.setString("state", valueOf(node.state()));
node.reservedTo().ifPresent(tenant -> nodeObject.setString("reservedTo", tenant.value()));
nodeObject.setString("orchestration", valueOf(node.serviceState()));
nodeObject.setString("version", node.currentVersion().toString());
nodeObject.setString("flavor", node.flavor());
toSlime(node.resources(), nodeObject);
nodeObject.setBool("fastDisk", node.resources().diskSpeed() == NodeResources.DiskSpeed.fast);
nodeObject.setString("clusterId", node.clusterId());
nodeObject.setString("clusterType", valueOf(node.clusterType()));
}
return new SlimeJsonResponse(slime);
}
private HttpResponse clusters(String tenantName, String applicationName, String instanceName, String environment, String region) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
Application application = controller.serviceRegistry().configServer().nodeRepository().getApplication(zone, id);
Slime slime = new Slime();
Cursor clustersObject = slime.setObject().setObject("clusters");
for (Cluster cluster : application.clusters().values()) {
Cursor clusterObject = clustersObject.setObject(cluster.id().value());
toSlime(cluster.min(), clusterObject.setObject("min"));
toSlime(cluster.max(), clusterObject.setObject("max"));
toSlime(cluster.current(), clusterObject.setObject("current"));
if (cluster.target().isPresent()
&& ! cluster.target().get().justNumbers().equals(cluster.current().justNumbers()))
toSlime(cluster.target().get(), clusterObject.setObject("target"));
cluster.suggested().ifPresent(suggested -> toSlime(suggested, clusterObject.setObject("suggested")));
utilizationToSlime(cluster.utilization(), clusterObject.setObject("utilization"));
scalingEventsToSlime(cluster.scalingEvents(), clusterObject.setArray("scalingEvents"));
clusterObject.setString("autoscalingStatus", cluster.autoscalingStatus());
}
return new SlimeJsonResponse(slime);
}
private static String valueOf(Node.State state) {
switch (state) {
case failed: return "failed";
case parked: return "parked";
case dirty: return "dirty";
case ready: return "ready";
case active: return "active";
case inactive: return "inactive";
case reserved: return "reserved";
case provisioned: return "provisioned";
default: throw new IllegalArgumentException("Unexpected node state '" + state + "'.");
}
}
static String valueOf(Node.ServiceState state) {
switch (state) {
case expectedUp: return "expectedUp";
case allowedDown: return "allowedDown";
case permanentlyDown: return "permanentlyDown";
case unorchestrated: return "unorchestrated";
case unknown: break;
}
return "unknown";
}
private static String valueOf(Node.ClusterType type) {
switch (type) {
case admin: return "admin";
case content: return "content";
case container: return "container";
case combined: return "combined";
default: throw new IllegalArgumentException("Unexpected node cluster type '" + type + "'.");
}
}
private static String valueOf(NodeResources.DiskSpeed diskSpeed) {
switch (diskSpeed) {
case fast : return "fast";
case slow : return "slow";
case any : return "any";
default: throw new IllegalArgumentException("Unknown disk speed '" + diskSpeed.name() + "'");
}
}
private static String valueOf(NodeResources.StorageType storageType) {
switch (storageType) {
case remote : return "remote";
case local : return "local";
case any : return "any";
default: throw new IllegalArgumentException("Unknown storage type '" + storageType.name() + "'");
}
}
private HttpResponse logs(String tenantName, String applicationName, String instanceName, String environment, String region, Map<String, String> queryParameters) {
ApplicationId application = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
DeploymentId deployment = new DeploymentId(application, zone);
InputStream logStream = controller.serviceRegistry().configServer().getLogs(deployment, queryParameters);
return new HttpResponse(200) {
@Override
public void render(OutputStream outputStream) throws IOException {
logStream.transferTo(outputStream);
}
};
}
private HttpResponse metrics(String tenantName, String applicationName, String instanceName, String environment, String region) {
ApplicationId application = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
DeploymentId deployment = new DeploymentId(application, zone);
List<ProtonMetrics> protonMetrics = controller.serviceRegistry().configServer().getProtonMetrics(deployment);
return buildResponseFromProtonMetrics(protonMetrics);
}
private JsonResponse buildResponseFromProtonMetrics(List<ProtonMetrics> protonMetrics) {
try {
var jsonObject = jsonMapper.createObjectNode();
var jsonArray = jsonMapper.createArrayNode();
for (ProtonMetrics metrics : protonMetrics) {
jsonArray.add(metrics.toJson());
}
jsonObject.set("metrics", jsonArray);
return new JsonResponse(200, jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonObject));
} catch (JsonProcessingException e) {
log.log(Level.SEVERE, "Unable to build JsonResponse with Proton data: " + e.getMessage(), e);
return new JsonResponse(500, "");
}
}
private HttpResponse trigger(ApplicationId id, JobType type, HttpRequest request) {
Inspector requestObject = toSlime(request.getData()).get();
boolean requireTests = ! requestObject.field("skipTests").asBool();
boolean reTrigger = requestObject.field("reTrigger").asBool();
String triggered = reTrigger
? controller.applications().deploymentTrigger()
.reTrigger(id, type).type().jobName()
: controller.applications().deploymentTrigger()
.forceTrigger(id, type, request.getJDiscRequest().getUserPrincipal().getName(), requireTests)
.stream().map(job -> job.type().jobName()).collect(joining(", "));
return new MessageResponse(triggered.isEmpty() ? "Job " + type.jobName() + " for " + id + " not triggered"
: "Triggered " + triggered + " for " + id);
}
private HttpResponse pause(ApplicationId id, JobType type) {
Instant until = controller.clock().instant().plus(DeploymentTrigger.maxPause);
controller.applications().deploymentTrigger().pauseJob(id, type, until);
return new MessageResponse(type.jobName() + " for " + id + " paused for " + DeploymentTrigger.maxPause);
}
private HttpResponse resume(ApplicationId id, JobType type) {
controller.applications().deploymentTrigger().resumeJob(id, type);
return new MessageResponse(type.jobName() + " for " + id + " resumed");
}
private void toSlime(Cursor object, com.yahoo.vespa.hosted.controller.Application application, HttpRequest request) {
object.setString("tenant", application.id().tenant().value());
object.setString("application", application.id().application().value());
object.setString("deployments", withPath("/application/v4" +
"/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value() +
"/job/",
request.getUri()).toString());
DeploymentStatus status = controller.jobController().deploymentStatus(application);
application.latestVersion().ifPresent(version -> toSlime(version, object.setObject("latestVersion")));
application.projectId().ifPresent(id -> object.setLong("projectId", id));
application.instances().values().stream().findFirst().ifPresent(instance -> {
if ( ! instance.change().isEmpty())
toSlime(object.setObject("deploying"), instance.change());
if ( ! status.outstandingChange(instance.name()).isEmpty())
toSlime(object.setObject("outstandingChange"), status.outstandingChange(instance.name()));
});
application.majorVersion().ifPresent(majorVersion -> object.setLong("majorVersion", majorVersion));
Cursor instancesArray = object.setArray("instances");
for (Instance instance : showOnlyProductionInstances(request) ? application.productionInstances().values()
: application.instances().values())
toSlime(instancesArray.addObject(), status, instance, application.deploymentSpec(), request);
application.deployKeys().stream().map(KeyUtils::toPem).forEach(object.setArray("pemDeployKeys")::addString);
Cursor metricsObject = object.setObject("metrics");
metricsObject.setDouble("queryServiceQuality", application.metrics().queryServiceQuality());
metricsObject.setDouble("writeServiceQuality", application.metrics().writeServiceQuality());
Cursor activity = object.setObject("activity");
application.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried", instant.toEpochMilli()));
application.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten", instant.toEpochMilli()));
application.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
application.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
application.ownershipIssueId().ifPresent(issueId -> object.setString("ownershipIssueId", issueId.value()));
application.owner().ifPresent(owner -> object.setString("owner", owner.username()));
application.deploymentIssueId().ifPresent(issueId -> object.setString("deploymentIssueId", issueId.value()));
}
private void toSlime(Cursor object, DeploymentStatus status, Instance instance, DeploymentSpec deploymentSpec, HttpRequest request) {
object.setString("instance", instance.name().value());
if (deploymentSpec.instance(instance.name()).isPresent()) {
List<JobStatus> jobStatus = controller.applications().deploymentTrigger()
.steps(deploymentSpec.requireInstance(instance.name()))
.sortedJobs(status.instanceJobs(instance.name()).values());
if ( ! instance.change().isEmpty())
toSlime(object.setObject("deploying"), instance.change());
if ( ! status.outstandingChange(instance.name()).isEmpty())
toSlime(object.setObject("outstandingChange"), status.outstandingChange(instance.name()));
Cursor changeBlockers = object.setArray("changeBlockers");
deploymentSpec.instance(instance.name()).ifPresent(spec -> spec.changeBlocker().forEach(changeBlocker -> {
Cursor changeBlockerObject = changeBlockers.addObject();
changeBlockerObject.setBool("versions", changeBlocker.blocksVersions());
changeBlockerObject.setBool("revisions", changeBlocker.blocksRevisions());
changeBlockerObject.setString("timeZone", changeBlocker.window().zone().getId());
Cursor days = changeBlockerObject.setArray("days");
changeBlocker.window().days().stream().map(DayOfWeek::getValue).forEach(days::addLong);
Cursor hours = changeBlockerObject.setArray("hours");
changeBlocker.window().hours().forEach(hours::addLong);
}));
}
globalEndpointsToSlime(object, instance);
List<Deployment> deployments = deploymentSpec.instance(instance.name())
.map(spec -> new DeploymentSteps(spec, controller::system))
.map(steps -> steps.sortedDeployments(instance.deployments().values()))
.orElse(List.copyOf(instance.deployments().values()));
Cursor deploymentsArray = object.setArray("deployments");
for (Deployment deployment : deployments) {
Cursor deploymentObject = deploymentsArray.addObject();
if (deployment.zone().environment() == Environment.prod && ! instance.rotations().isEmpty())
toSlime(instance.rotations(), instance.rotationStatus(), deployment, deploymentObject);
if (recurseOverDeployments(request))
toSlime(deploymentObject, new DeploymentId(instance.id(), deployment.zone()), deployment, request);
else {
deploymentObject.setString("environment", deployment.zone().environment().value());
deploymentObject.setString("region", deployment.zone().region().value());
deploymentObject.setString("url", withPath(request.getUri().getPath() +
"/instance/" + instance.name().value() +
"/environment/" + deployment.zone().environment().value() +
"/region/" + deployment.zone().region().value(),
request.getUri()).toString());
}
}
}
private void globalEndpointsToSlime(Cursor object, Instance instance) {
var globalEndpointUrls = new LinkedHashSet<String>();
controller.routing().endpointsOf(instance.id())
.requiresRotation()
.not().legacy()
.asList().stream()
.map(Endpoint::url)
.map(URI::toString)
.forEach(globalEndpointUrls::add);
var globalRotationsArray = object.setArray("globalRotations");
globalEndpointUrls.forEach(globalRotationsArray::addString);
instance.rotations().stream()
.map(AssignedRotation::rotationId)
.findFirst()
.ifPresent(rotation -> object.setString("rotationId", rotation.asString()));
}
private void toSlime(Cursor object, Instance instance, DeploymentStatus status, HttpRequest request) {
com.yahoo.vespa.hosted.controller.Application application = status.application();
object.setString("tenant", instance.id().tenant().value());
object.setString("application", instance.id().application().value());
object.setString("instance", instance.id().instance().value());
object.setString("deployments", withPath("/application/v4" +
"/tenant/" + instance.id().tenant().value() +
"/application/" + instance.id().application().value() +
"/instance/" + instance.id().instance().value() + "/job/",
request.getUri()).toString());
application.latestVersion().ifPresent(version -> {
sourceRevisionToSlime(version.source(), object.setObject("source"));
version.sourceUrl().ifPresent(url -> object.setString("sourceUrl", url));
version.commit().ifPresent(commit -> object.setString("commit", commit));
});
application.projectId().ifPresent(id -> object.setLong("projectId", id));
if (application.deploymentSpec().instance(instance.name()).isPresent()) {
List<JobStatus> jobStatus = controller.applications().deploymentTrigger()
.steps(application.deploymentSpec().requireInstance(instance.name()))
.sortedJobs(status.instanceJobs(instance.name()).values());
if ( ! instance.change().isEmpty())
toSlime(object.setObject("deploying"), instance.change());
if ( ! status.outstandingChange(instance.name()).isEmpty())
toSlime(object.setObject("outstandingChange"), status.outstandingChange(instance.name()));
Cursor changeBlockers = object.setArray("changeBlockers");
application.deploymentSpec().instance(instance.name()).ifPresent(spec -> spec.changeBlocker().forEach(changeBlocker -> {
Cursor changeBlockerObject = changeBlockers.addObject();
changeBlockerObject.setBool("versions", changeBlocker.blocksVersions());
changeBlockerObject.setBool("revisions", changeBlocker.blocksRevisions());
changeBlockerObject.setString("timeZone", changeBlocker.window().zone().getId());
Cursor days = changeBlockerObject.setArray("days");
changeBlocker.window().days().stream().map(DayOfWeek::getValue).forEach(days::addLong);
Cursor hours = changeBlockerObject.setArray("hours");
changeBlocker.window().hours().forEach(hours::addLong);
}));
}
application.majorVersion().ifPresent(majorVersion -> object.setLong("majorVersion", majorVersion));
globalEndpointsToSlime(object, instance);
List<Deployment> deployments =
application.deploymentSpec().instance(instance.name())
.map(spec -> new DeploymentSteps(spec, controller::system))
.map(steps -> steps.sortedDeployments(instance.deployments().values()))
.orElse(List.copyOf(instance.deployments().values()));
Cursor instancesArray = object.setArray("instances");
for (Deployment deployment : deployments) {
Cursor deploymentObject = instancesArray.addObject();
if (deployment.zone().environment() == Environment.prod) {
if (instance.rotations().size() == 1) {
toSlime(instance.rotationStatus().of(instance.rotations().get(0).rotationId(), deployment),
deploymentObject);
}
if ( ! recurseOverDeployments(request) && ! instance.rotations().isEmpty()) {
toSlime(instance.rotations(), instance.rotationStatus(), deployment, deploymentObject);
}
}
if (recurseOverDeployments(request))
toSlime(deploymentObject, new DeploymentId(instance.id(), deployment.zone()), deployment, request);
else {
deploymentObject.setString("environment", deployment.zone().environment().value());
deploymentObject.setString("region", deployment.zone().region().value());
deploymentObject.setString("instance", instance.id().instance().value());
deploymentObject.setString("url", withPath(request.getUri().getPath() +
"/environment/" + deployment.zone().environment().value() +
"/region/" + deployment.zone().region().value(),
request.getUri()).toString());
}
}
status.jobSteps().keySet().stream()
.filter(job -> job.application().instance().equals(instance.name()))
.filter(job -> job.type().isProduction() && job.type().isDeployment())
.map(job -> job.type().zone(controller.system()))
.filter(zone -> ! instance.deployments().containsKey(zone))
.forEach(zone -> {
Cursor deploymentObject = instancesArray.addObject();
deploymentObject.setString("environment", zone.environment().value());
deploymentObject.setString("region", zone.region().value());
});
application.deployKeys().stream().findFirst().ifPresent(key -> object.setString("pemDeployKey", KeyUtils.toPem(key)));
application.deployKeys().stream().map(KeyUtils::toPem).forEach(object.setArray("pemDeployKeys")::addString);
Cursor metricsObject = object.setObject("metrics");
metricsObject.setDouble("queryServiceQuality", application.metrics().queryServiceQuality());
metricsObject.setDouble("writeServiceQuality", application.metrics().writeServiceQuality());
Cursor activity = object.setObject("activity");
application.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried", instant.toEpochMilli()));
application.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten", instant.toEpochMilli()));
application.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
application.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
application.ownershipIssueId().ifPresent(issueId -> object.setString("ownershipIssueId", issueId.value()));
application.owner().ifPresent(owner -> object.setString("owner", owner.username()));
application.deploymentIssueId().ifPresent(issueId -> object.setString("deploymentIssueId", issueId.value()));
}
private HttpResponse deployment(String tenantName, String applicationName, String instanceName, String environment,
String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
Instance instance = controller.applications().getInstance(id)
.orElseThrow(() -> new NotExistsException(id + " not found"));
DeploymentId deploymentId = new DeploymentId(instance.id(),
requireZone(environment, region));
Deployment deployment = instance.deployments().get(deploymentId.zoneId());
if (deployment == null)
throw new NotExistsException(instance + " is not deployed in " + deploymentId.zoneId());
Slime slime = new Slime();
toSlime(slime.setObject(), deploymentId, deployment, request);
return new SlimeJsonResponse(slime);
}
private void toSlime(Cursor object, Change change) {
change.platform().ifPresent(version -> object.setString("version", version.toString()));
change.application()
.filter(version -> !version.isUnknown())
.ifPresent(version -> toSlime(version, object.setObject("revision")));
}
private void toSlime(Endpoint endpoint, Cursor object) {
object.setString("cluster", endpoint.cluster().value());
object.setBool("tls", endpoint.tls());
object.setString("url", endpoint.url().toString());
object.setString("scope", endpointScopeString(endpoint.scope()));
object.setString("routingMethod", routingMethodString(endpoint.routingMethod()));
}
private void toSlime(Cursor response, DeploymentId deploymentId, Deployment deployment, HttpRequest request) {
response.setString("tenant", deploymentId.applicationId().tenant().value());
response.setString("application", deploymentId.applicationId().application().value());
response.setString("instance", deploymentId.applicationId().instance().value());
response.setString("environment", deploymentId.zoneId().environment().value());
response.setString("region", deploymentId.zoneId().region().value());
var application = controller.applications().requireApplication(TenantAndApplicationId.from(deploymentId.applicationId()));
var endpointArray = response.setArray("endpoints");
EndpointList zoneEndpoints = controller.routing().endpointsOf(deploymentId)
.scope(Endpoint.Scope.zone)
.not().legacy();
for (var endpoint : controller.routing().directEndpoints(zoneEndpoints, deploymentId.applicationId())) {
toSlime(endpoint, endpointArray.addObject());
}
EndpointList globalEndpoints = controller.routing().endpointsOf(application, deploymentId.applicationId().instance())
.not().legacy()
.targets(deploymentId.zoneId());
for (var endpoint : controller.routing().directEndpoints(globalEndpoints, deploymentId.applicationId())) {
toSlime(endpoint, endpointArray.addObject());
}
response.setString("clusters", withPath(toPath(deploymentId) + "/clusters", request.getUri()).toString());
response.setString("nodes", withPathAndQuery("/zone/v2/" + deploymentId.zoneId().environment() + "/" + deploymentId.zoneId().region() + "/nodes/v2/node/", "recursive=true&application=" + deploymentId.applicationId().tenant() + "." + deploymentId.applicationId().application() + "." + deploymentId.applicationId().instance(), request.getUri()).toString());
response.setString("yamasUrl", monitoringSystemUri(deploymentId).toString());
response.setString("version", deployment.version().toFullString());
response.setString("revision", deployment.applicationVersion().id());
response.setLong("deployTimeEpochMs", deployment.at().toEpochMilli());
controller.zoneRegistry().getDeploymentTimeToLive(deploymentId.zoneId())
.ifPresent(deploymentTimeToLive -> response.setLong("expiryTimeEpochMs", deployment.at().plus(deploymentTimeToLive).toEpochMilli()));
DeploymentStatus status = controller.jobController().deploymentStatus(application);
application.projectId().ifPresent(i -> response.setString("screwdriverId", String.valueOf(i)));
sourceRevisionToSlime(deployment.applicationVersion().source(), response);
var instance = application.instances().get(deploymentId.applicationId().instance());
if (instance != null) {
if (!instance.rotations().isEmpty() && deployment.zone().environment() == Environment.prod)
toSlime(instance.rotations(), instance.rotationStatus(), deployment, response);
JobType.from(controller.system(), deployment.zone())
.map(type -> new JobId(instance.id(), type))
.map(status.jobSteps()::get)
.ifPresent(stepStatus -> {
JobControllerApiHandlerHelper.applicationVersionToSlime(
response.setObject("applicationVersion"), deployment.applicationVersion());
if (!status.jobsToRun().containsKey(stepStatus.job().get()))
response.setString("status", "complete");
else if (stepStatus.readyAt(instance.change()).map(controller.clock().instant()::isBefore).orElse(true))
response.setString("status", "pending");
else response.setString("status", "running");
});
}
Cursor activity = response.setObject("activity");
deployment.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried",
instant.toEpochMilli()));
deployment.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten",
instant.toEpochMilli()));
deployment.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
deployment.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
DeploymentMetrics metrics = deployment.metrics();
Cursor metricsObject = response.setObject("metrics");
metricsObject.setDouble("queriesPerSecond", metrics.queriesPerSecond());
metricsObject.setDouble("writesPerSecond", metrics.writesPerSecond());
metricsObject.setDouble("documentCount", metrics.documentCount());
metricsObject.setDouble("queryLatencyMillis", metrics.queryLatencyMillis());
metricsObject.setDouble("writeLatencyMillis", metrics.writeLatencyMillis());
metrics.instant().ifPresent(instant -> metricsObject.setLong("lastUpdated", instant.toEpochMilli()));
}
private void toSlime(ApplicationVersion applicationVersion, Cursor object) {
if ( ! applicationVersion.isUnknown()) {
object.setLong("buildNumber", applicationVersion.buildNumber().getAsLong());
object.setString("hash", applicationVersion.id());
sourceRevisionToSlime(applicationVersion.source(), object.setObject("source"));
applicationVersion.sourceUrl().ifPresent(url -> object.setString("sourceUrl", url));
applicationVersion.commit().ifPresent(commit -> object.setString("commit", commit));
}
}
private void sourceRevisionToSlime(Optional<SourceRevision> revision, Cursor object) {
if (revision.isEmpty()) return;
object.setString("gitRepository", revision.get().repository());
object.setString("gitBranch", revision.get().branch());
object.setString("gitCommit", revision.get().commit());
}
private void toSlime(RotationState state, Cursor object) {
Cursor bcpStatus = object.setObject("bcpStatus");
bcpStatus.setString("rotationStatus", rotationStateString(state));
}
private void toSlime(List<AssignedRotation> rotations, RotationStatus status, Deployment deployment, Cursor object) {
var array = object.setArray("endpointStatus");
for (var rotation : rotations) {
var statusObject = array.addObject();
var targets = status.of(rotation.rotationId());
statusObject.setString("endpointId", rotation.endpointId().id());
statusObject.setString("rotationId", rotation.rotationId().asString());
statusObject.setString("clusterId", rotation.clusterId().value());
statusObject.setString("status", rotationStateString(status.of(rotation.rotationId(), deployment)));
statusObject.setLong("lastUpdated", targets.lastUpdated().toEpochMilli());
}
}
private URI monitoringSystemUri(DeploymentId deploymentId) {
return controller.zoneRegistry().getMonitoringSystemUri(deploymentId);
}
/**
* Returns a non-broken, released version at least as old as the oldest platform the given application is on.
*
* If no known version is applicable, the newest version at least as old as the oldest platform is selected,
* among all versions released for this system. If no such versions exists, throws an IllegalStateException.
*/
private Version compileVersion(TenantAndApplicationId id) {
Version oldestPlatform = controller.applications().oldestInstalledPlatform(id);
VersionStatus versionStatus = controller.readVersionStatus();
return versionStatus.versions().stream()
.filter(version -> version.confidence().equalOrHigherThan(VespaVersion.Confidence.low))
.filter(VespaVersion::isReleased)
.map(VespaVersion::versionNumber)
.filter(version -> ! version.isAfter(oldestPlatform))
.max(Comparator.naturalOrder())
.orElseGet(() -> controller.mavenRepository().metadata().versions().stream()
.filter(version -> ! version.isAfter(oldestPlatform))
.filter(version -> ! versionStatus.versions().stream()
.map(VespaVersion::versionNumber)
.collect(Collectors.toSet()).contains(version))
.max(Comparator.naturalOrder())
.orElseThrow(() -> new IllegalStateException("No available releases of " +
controller.mavenRepository().artifactId())));
}
private HttpResponse setGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region, boolean inService, HttpRequest request) {
Instance instance = controller.applications().requireInstance(ApplicationId.from(tenantName, applicationName, instanceName));
ZoneId zone = requireZone(environment, region);
Deployment deployment = instance.deployments().get(zone);
if (deployment == null) {
throw new NotExistsException(instance + " has no deployment in " + zone);
}
var deploymentId = new DeploymentId(instance.id(), zone);
setGlobalRotationStatus(deploymentId, inService, request);
setGlobalEndpointStatus(deploymentId, inService, request);
return new MessageResponse(String.format("Successfully set %s in %s %s service",
instance.id().toShortString(), zone, inService ? "in" : "out of"));
}
/** Set the global endpoint status for given deployment. This only applies to global endpoints backed by a cloud service */
private void setGlobalEndpointStatus(DeploymentId deployment, boolean inService, HttpRequest request) {
var agent = isOperator(request) ? GlobalRouting.Agent.operator : GlobalRouting.Agent.tenant;
var status = inService ? GlobalRouting.Status.in : GlobalRouting.Status.out;
controller.routing().policies().setGlobalRoutingStatus(deployment, status, agent);
}
/** Set the global rotation status for given deployment. This only applies to global endpoints backed by a rotation */
private void setGlobalRotationStatus(DeploymentId deployment, boolean inService, HttpRequest request) {
var requestData = toSlime(request.getData()).get();
var reason = mandatory("reason", requestData).asString();
var agent = isOperator(request) ? GlobalRouting.Agent.operator : GlobalRouting.Agent.tenant;
long timestamp = controller.clock().instant().getEpochSecond();
var status = inService ? EndpointStatus.Status.in : EndpointStatus.Status.out;
var endpointStatus = new EndpointStatus(status, reason, agent.name(), timestamp);
controller.routing().setGlobalRotationStatus(deployment, endpointStatus);
}
private HttpResponse getGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
requireZone(environment, region));
Slime slime = new Slime();
Cursor array = slime.setObject().setArray("globalrotationoverride");
controller.routing().globalRotationStatus(deploymentId)
.forEach((endpoint, status) -> {
array.addString(endpoint.upstreamIdOf(deploymentId));
Cursor statusObject = array.addObject();
statusObject.setString("status", status.getStatus().name());
statusObject.setString("reason", status.getReason() == null ? "" : status.getReason());
statusObject.setString("agent", status.getAgent() == null ? "" : status.getAgent());
statusObject.setLong("timestamp", status.getEpoch());
});
return new SlimeJsonResponse(slime);
}
private HttpResponse rotationStatus(String tenantName, String applicationName, String instanceName, String environment, String region, Optional<String> endpointId) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
Instance instance = controller.applications().requireInstance(applicationId);
ZoneId zone = requireZone(environment, region);
RotationId rotation = findRotationId(instance, endpointId);
Deployment deployment = instance.deployments().get(zone);
if (deployment == null) {
throw new NotExistsException(instance + " has no deployment in " + zone);
}
Slime slime = new Slime();
Cursor response = slime.setObject();
toSlime(instance.rotationStatus().of(rotation, deployment), response);
return new SlimeJsonResponse(slime);
}
private HttpResponse metering(String tenant, String application, HttpRequest request) {
Slime slime = new Slime();
Cursor root = slime.setObject();
MeteringData meteringData = controller.serviceRegistry()
.meteringService()
.getMeteringData(TenantName.from(tenant), ApplicationName.from(application));
ResourceAllocation currentSnapshot = meteringData.getCurrentSnapshot();
Cursor currentRate = root.setObject("currentrate");
currentRate.setDouble("cpu", currentSnapshot.getCpuCores());
currentRate.setDouble("mem", currentSnapshot.getMemoryGb());
currentRate.setDouble("disk", currentSnapshot.getDiskGb());
ResourceAllocation thisMonth = meteringData.getThisMonth();
Cursor thismonth = root.setObject("thismonth");
thismonth.setDouble("cpu", thisMonth.getCpuCores());
thismonth.setDouble("mem", thisMonth.getMemoryGb());
thismonth.setDouble("disk", thisMonth.getDiskGb());
ResourceAllocation lastMonth = meteringData.getLastMonth();
Cursor lastmonth = root.setObject("lastmonth");
lastmonth.setDouble("cpu", lastMonth.getCpuCores());
lastmonth.setDouble("mem", lastMonth.getMemoryGb());
lastmonth.setDouble("disk", lastMonth.getDiskGb());
Map<ApplicationId, List<ResourceSnapshot>> history = meteringData.getSnapshotHistory();
Cursor details = root.setObject("details");
Cursor detailsCpu = details.setObject("cpu");
Cursor detailsMem = details.setObject("mem");
Cursor detailsDisk = details.setObject("disk");
history.forEach((applicationId, resources) -> {
String instanceName = applicationId.instance().value();
Cursor detailsCpuApp = detailsCpu.setObject(instanceName);
Cursor detailsMemApp = detailsMem.setObject(instanceName);
Cursor detailsDiskApp = detailsDisk.setObject(instanceName);
Cursor detailsCpuData = detailsCpuApp.setArray("data");
Cursor detailsMemData = detailsMemApp.setArray("data");
Cursor detailsDiskData = detailsDiskApp.setArray("data");
resources.forEach(resourceSnapshot -> {
Cursor cpu = detailsCpuData.addObject();
cpu.setLong("unixms", resourceSnapshot.getTimestamp().toEpochMilli());
cpu.setDouble("value", resourceSnapshot.getCpuCores());
Cursor mem = detailsMemData.addObject();
mem.setLong("unixms", resourceSnapshot.getTimestamp().toEpochMilli());
mem.setDouble("value", resourceSnapshot.getMemoryGb());
Cursor disk = detailsDiskData.addObject();
disk.setLong("unixms", resourceSnapshot.getTimestamp().toEpochMilli());
disk.setDouble("value", resourceSnapshot.getDiskGb());
});
});
return new SlimeJsonResponse(slime);
}
private HttpResponse deploying(String tenantName, String applicationName, String instanceName, HttpRequest request) {
Instance instance = controller.applications().requireInstance(ApplicationId.from(tenantName, applicationName, instanceName));
Slime slime = new Slime();
Cursor root = slime.setObject();
if ( ! instance.change().isEmpty()) {
instance.change().platform().ifPresent(version -> root.setString("platform", version.toString()));
instance.change().application().ifPresent(applicationVersion -> root.setString("application", applicationVersion.id()));
root.setBool("pinned", instance.change().isPinned());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse suspended(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
requireZone(environment, region));
boolean suspended = controller.applications().isSuspended(deploymentId);
Slime slime = new Slime();
Cursor response = slime.setObject();
response.setBool("suspended", suspended);
return new SlimeJsonResponse(slime);
}
private HttpResponse services(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationView applicationView = controller.getApplicationView(tenantName, applicationName, instanceName, environment, region);
ZoneId zone = requireZone(environment, region);
ServiceApiResponse response = new ServiceApiResponse(zone,
new ApplicationId.Builder().tenant(tenantName).applicationName(applicationName).instanceName(instanceName).build(),
controller.zoneRegistry().getConfigServerApiUris(zone),
request.getUri());
response.setResponse(applicationView);
return response;
}
private HttpResponse service(String tenantName, String applicationName, String instanceName, String environment, String region, String serviceName, String restPath, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName), requireZone(environment, region));
if ("container-clustercontroller".equals((serviceName)) && restPath.contains("/status/")) {
String[] parts = restPath.split("/status/");
String result = controller.serviceRegistry().configServer().getClusterControllerStatus(deploymentId, parts[0], parts[1]);
return new HtmlResponse(result);
}
Map<?,?> result = controller.serviceRegistry().configServer().getServiceApiResponse(deploymentId, serviceName, restPath);
ServiceApiResponse response = new ServiceApiResponse(deploymentId.zoneId(),
deploymentId.applicationId(),
controller.zoneRegistry().getConfigServerApiUris(deploymentId.zoneId()),
request.getUri());
response.setResponse(result, serviceName, restPath);
return response;
}
private HttpResponse content(String tenantName, String applicationName, String instanceName, String environment, String region, String restPath, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName), requireZone(environment, region));
return controller.serviceRegistry().configServer().getApplicationPackageContent(deploymentId, "/" + restPath, request.getUri());
}
private HttpResponse updateTenant(String tenantName, HttpRequest request) {
getTenantOrThrow(tenantName);
TenantName tenant = TenantName.from(tenantName);
Inspector requestObject = toSlime(request.getData()).get();
controller.tenants().update(accessControlRequests.specification(tenant, requestObject),
accessControlRequests.credentials(tenant, requestObject, request.getJDiscRequest()));
return tenant(controller.tenants().require(TenantName.from(tenantName)), request);
}
private HttpResponse createTenant(String tenantName, HttpRequest request) {
TenantName tenant = TenantName.from(tenantName);
Inspector requestObject = toSlime(request.getData()).get();
controller.tenants().create(accessControlRequests.specification(tenant, requestObject),
accessControlRequests.credentials(tenant, requestObject, request.getJDiscRequest()));
return tenant(controller.tenants().require(TenantName.from(tenantName)), request);
}
private HttpResponse createApplication(String tenantName, String applicationName, HttpRequest request) {
Inspector requestObject = toSlime(request.getData()).get();
TenantAndApplicationId id = TenantAndApplicationId.from(tenantName, applicationName);
Credentials credentials = accessControlRequests.credentials(id.tenant(), requestObject, request.getJDiscRequest());
com.yahoo.vespa.hosted.controller.Application application = controller.applications().createApplication(id, credentials);
Slime slime = new Slime();
toSlime(id, slime.setObject(), request);
return new SlimeJsonResponse(slime);
}
private HttpResponse createInstance(String tenantName, String applicationName, String instanceName, HttpRequest request) {
TenantAndApplicationId applicationId = TenantAndApplicationId.from(tenantName, applicationName);
if (controller.applications().getApplication(applicationId).isEmpty())
createApplication(tenantName, applicationName, request);
controller.applications().createInstance(applicationId.instance(instanceName));
Slime slime = new Slime();
toSlime(applicationId.instance(instanceName), slime.setObject(), request);
return new SlimeJsonResponse(slime);
}
/** Trigger deployment of the given Vespa version if a valid one is given, e.g., "7.8.9". */
private HttpResponse deployPlatform(String tenantName, String applicationName, String instanceName, boolean pin, HttpRequest request) {
request = controller.auditLogger().log(request);
String versionString = readToString(request.getData());
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
StringBuilder response = new StringBuilder();
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), application -> {
Version version = Version.fromString(versionString);
VersionStatus versionStatus = controller.readVersionStatus();
if (version.equals(Version.emptyVersion))
version = controller.systemVersion(versionStatus);
if (!versionStatus.isActive(version))
throw new IllegalArgumentException("Cannot trigger deployment of version '" + version + "': " +
"Version is not active in this system. " +
"Active versions: " + versionStatus.versions()
.stream()
.map(VespaVersion::versionNumber)
.map(Version::toString)
.collect(joining(", ")));
Change change = Change.of(version);
if (pin)
change = change.withPin();
controller.applications().deploymentTrigger().forceChange(id, change);
response.append("Triggered ").append(change).append(" for ").append(id);
});
return new MessageResponse(response.toString());
}
/** Trigger deployment to the last known application package for the given application. */
private HttpResponse deployApplication(String tenantName, String applicationName, String instanceName, HttpRequest request) {
controller.auditLogger().log(request);
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
StringBuilder response = new StringBuilder();
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), application -> {
Change change = Change.of(application.get().latestVersion().get());
controller.applications().deploymentTrigger().forceChange(id, change);
response.append("Triggered ").append(change).append(" for ").append(id);
});
return new MessageResponse(response.toString());
}
/** Cancel ongoing change for given application, e.g., everything with {"cancel":"all"} */
private HttpResponse cancelDeploy(String tenantName, String applicationName, String instanceName, String choice) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
StringBuilder response = new StringBuilder();
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), application -> {
Change change = application.get().require(id.instance()).change();
if (change.isEmpty()) {
response.append("No deployment in progress for ").append(id).append(" at this time");
return;
}
ChangesToCancel cancel = ChangesToCancel.valueOf(choice.toUpperCase());
controller.applications().deploymentTrigger().cancelChange(id, cancel);
response.append("Changed deployment from '").append(change).append("' to '").append(controller.applications().requireInstance(id).change()).append("' for ").append(id);
});
return new MessageResponse(response.toString());
}
/** Schedule reindexing of an application, or a subset of clusters, possibly on a subset of documents. */
private HttpResponse reindex(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
List<String> clusterNames = Optional.ofNullable(request.getProperty("clusterId")).stream()
.flatMap(clusters -> Stream.of(clusters.split(",")))
.filter(cluster -> ! cluster.isBlank())
.collect(toUnmodifiableList());
List<String> documentTypes = Optional.ofNullable(request.getProperty("documentType")).stream()
.flatMap(types -> Stream.of(types.split(",")))
.filter(type -> ! type.isBlank())
.collect(toUnmodifiableList());
controller.applications().reindex(id, zone, clusterNames, documentTypes, request.getBooleanProperty("indexedOnly"));
return new MessageResponse("Requested reindexing of " + id + " in " + zone +
(clusterNames.isEmpty() ? "" : ", on clusters " + String.join(", ", clusterNames) +
(documentTypes.isEmpty() ? "" : ", for types " + String.join(", ", documentTypes))));
}
/** Gets reindexing status of an application in a zone. */
private HttpResponse getReindexing(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
ApplicationReindexing reindexing = controller.applications().applicationReindexing(id, zone);
Slime slime = new Slime();
Cursor root = slime.setObject();
root.setBool("enabled", reindexing.enabled());
Cursor clustersArray = root.setArray("clusters");
reindexing.clusters().entrySet().stream().sorted(comparingByKey())
.forEach(cluster -> {
Cursor clusterObject = clustersArray.addObject();
clusterObject.setString("name", cluster.getKey());
Cursor pendingArray = clusterObject.setArray("pending");
cluster.getValue().pending().entrySet().stream().sorted(comparingByKey())
.forEach(pending -> {
Cursor pendingObject = pendingArray.addObject();
pendingObject.setString("type", pending.getKey());
pendingObject.setLong("requiredGeneration", pending.getValue());
});
Cursor readyArray = clusterObject.setArray("ready");
cluster.getValue().ready().entrySet().stream().sorted(comparingByKey())
.forEach(ready -> {
Cursor readyObject = readyArray.addObject();
readyObject.setString("type", ready.getKey());
setStatus(readyObject, ready.getValue());
});
});
return new SlimeJsonResponse(slime);
}
void setStatus(Cursor statusObject, ApplicationReindexing.Status status) {
status.readyAt().ifPresent(readyAt -> statusObject.setLong("readyAtMillis", readyAt.toEpochMilli()));
status.startedAt().ifPresent(startedAt -> statusObject.setLong("startedAtMillis", startedAt.toEpochMilli()));
status.endedAt().ifPresent(endedAt -> statusObject.setLong("endedAtMillis", endedAt.toEpochMilli()));
status.state().map(ApplicationApiHandler::toString).ifPresent(state -> statusObject.setString("state", state));
status.message().ifPresent(message -> statusObject.setString("message", message));
status.progress().ifPresent(progress -> statusObject.setDouble("progress", progress));
}
private static String toString(ApplicationReindexing.State state) {
switch (state) {
case PENDING: return "pending";
case RUNNING: return "running";
case FAILED: return "failed";
case SUCCESSFUL: return "successful";
default: return null;
}
}
/** Enables reindexing of an application in a zone. */
private HttpResponse enableReindexing(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
controller.applications().enableReindexing(id, zone);
return new MessageResponse("Enabled reindexing of " + id + " in " + zone);
}
/** Disables reindexing of an application in a zone. */
private HttpResponse disableReindexing(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
controller.applications().disableReindexing(id, zone);
return new MessageResponse("Disabled reindexing of " + id + " in " + zone);
}
/** Schedule restart of deployment, or specific host in a deployment */
private HttpResponse restart(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
requireZone(environment, region));
RestartFilter restartFilter = new RestartFilter()
.withHostName(Optional.ofNullable(request.getProperty("hostname")).map(HostName::from))
.withClusterType(Optional.ofNullable(request.getProperty("clusterType")).map(ClusterSpec.Type::from))
.withClusterId(Optional.ofNullable(request.getProperty("clusterId")).map(ClusterSpec.Id::from));
controller.applications().restart(deploymentId, restartFilter);
return new MessageResponse("Requested restart of " + deploymentId);
}
/** Set suspension status of the given deployment. */
private HttpResponse suspend(String tenantName, String applicationName, String instanceName, String environment, String region, boolean suspend) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
requireZone(environment, region));
controller.applications().setSuspension(deploymentId, suspend);
return new MessageResponse((suspend ? "Suspended" : "Resumed") + " orchestration of " + deploymentId);
}
private HttpResponse jobDeploy(ApplicationId id, JobType type, HttpRequest request) {
if ( ! type.environment().isManuallyDeployed() && ! isOperator(request))
throw new IllegalArgumentException("Direct deployments are only allowed to manually deployed environments.");
Map<String, byte[]> dataParts = parseDataParts(request);
if ( ! dataParts.containsKey("applicationZip"))
throw new IllegalArgumentException("Missing required form part 'applicationZip'");
ApplicationPackage applicationPackage = new ApplicationPackage(dataParts.get(EnvironmentResource.APPLICATION_ZIP));
controller.applications().verifyApplicationIdentityConfiguration(id.tenant(),
Optional.of(id.instance()),
Optional.of(type.zone(controller.system())),
applicationPackage,
Optional.of(requireUserPrincipal(request)));
Optional<Version> version = Optional.ofNullable(dataParts.get("deployOptions"))
.map(json -> SlimeUtils.jsonToSlime(json).get())
.flatMap(options -> optional("vespaVersion", options))
.map(Version::fromString);
controller.jobController().deploy(id, type, version, applicationPackage);
RunId runId = controller.jobController().last(id, type).get().id();
Slime slime = new Slime();
Cursor rootObject = slime.setObject();
rootObject.setString("message", "Deployment started in " + runId +
". This may take about 15 minutes the first time.");
rootObject.setLong("run", runId.number());
return new SlimeJsonResponse(slime);
}
private HttpResponse deploy(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
Map<String, byte[]> dataParts = parseDataParts(request);
if ( ! dataParts.containsKey("deployOptions"))
return ErrorResponse.badRequest("Missing required form part 'deployOptions'");
Inspector deployOptions = SlimeUtils.jsonToSlime(dataParts.get("deployOptions")).get();
/*
* Special handling of the proxy application (the only system application with an application package)
* Setting any other deployOptions here is not supported for now (e.g. specifying version), but
* this might be handy later to handle emergency downgrades.
*/
boolean isZoneApplication = SystemApplication.proxy.id().equals(applicationId);
if (isZoneApplication) {
String versionStr = deployOptions.field("vespaVersion").asString();
boolean versionPresent = !versionStr.isEmpty() && !versionStr.equals("null");
if (versionPresent) {
throw new RuntimeException("Version not supported for system applications");
}
VersionStatus versionStatus = controller.readVersionStatus();
if (versionStatus.isUpgrading()) {
throw new IllegalArgumentException("Deployment of system applications during a system upgrade is not allowed");
}
Optional<VespaVersion> systemVersion = versionStatus.systemVersion();
if (systemVersion.isEmpty()) {
throw new IllegalArgumentException("Deployment of system applications is not permitted until system version is determined");
}
ActivateResult result = controller.applications()
.deploySystemApplicationPackage(SystemApplication.proxy, zone, systemVersion.get().versionNumber());
return new SlimeJsonResponse(toSlime(result));
}
/*
* Normal applications from here
*/
Optional<ApplicationPackage> applicationPackage = Optional.ofNullable(dataParts.get("applicationZip"))
.map(ApplicationPackage::new);
Optional<com.yahoo.vespa.hosted.controller.Application> application = controller.applications().getApplication(TenantAndApplicationId.from(applicationId));
Inspector sourceRevision = deployOptions.field("sourceRevision");
Inspector buildNumber = deployOptions.field("buildNumber");
if (sourceRevision.valid() != buildNumber.valid())
throw new IllegalArgumentException("Source revision and build number must both be provided, or not");
Optional<ApplicationVersion> applicationVersion = Optional.empty();
if (sourceRevision.valid()) {
if (applicationPackage.isPresent())
throw new IllegalArgumentException("Application version and application package can't both be provided.");
applicationVersion = Optional.of(ApplicationVersion.from(toSourceRevision(sourceRevision),
buildNumber.asLong()));
applicationPackage = Optional.of(controller.applications().getApplicationPackage(applicationId,
applicationVersion.get()));
}
boolean deployDirectly = deployOptions.field("deployDirectly").asBool();
Optional<Version> vespaVersion = optional("vespaVersion", deployOptions).map(Version::new);
if (deployDirectly && applicationPackage.isEmpty() && applicationVersion.isEmpty() && vespaVersion.isEmpty()) {
Optional<Deployment> deployment = controller.applications().getInstance(applicationId)
.map(Instance::deployments)
.flatMap(deployments -> Optional.ofNullable(deployments.get(zone)));
if(deployment.isEmpty())
throw new IllegalArgumentException("Can't redeploy application, no deployment currently exist");
ApplicationVersion version = deployment.get().applicationVersion();
if(version.isUnknown())
throw new IllegalArgumentException("Can't redeploy application, application version is unknown");
applicationVersion = Optional.of(version);
vespaVersion = Optional.of(deployment.get().version());
applicationPackage = Optional.of(controller.applications().getApplicationPackage(applicationId,
applicationVersion.get()));
}
DeployOptions deployOptionsJsonClass = new DeployOptions(deployDirectly,
vespaVersion,
deployOptions.field("ignoreValidationErrors").asBool(),
deployOptions.field("deployCurrentVersion").asBool());
applicationPackage.ifPresent(aPackage -> controller.applications().verifyApplicationIdentityConfiguration(applicationId.tenant(),
Optional.of(applicationId.instance()),
Optional.of(zone),
aPackage,
Optional.of(requireUserPrincipal(request))));
ActivateResult result = controller.applications().deploy(applicationId,
zone,
applicationPackage,
applicationVersion,
deployOptionsJsonClass);
return new SlimeJsonResponse(toSlime(result));
}
private HttpResponse deleteTenant(String tenantName, HttpRequest request) {
Optional<Tenant> tenant = controller.tenants().get(tenantName);
if (tenant.isEmpty())
return ErrorResponse.notFoundError("Could not delete tenant '" + tenantName + "': Tenant not found");
controller.tenants().delete(tenant.get().name(),
accessControlRequests.credentials(tenant.get().name(),
toSlime(request.getData()).get(),
request.getJDiscRequest()));
return tenant(tenant.get(), request);
}
private HttpResponse deleteApplication(String tenantName, String applicationName, HttpRequest request) {
TenantAndApplicationId id = TenantAndApplicationId.from(tenantName, applicationName);
Credentials credentials = accessControlRequests.credentials(id.tenant(), toSlime(request.getData()).get(), request.getJDiscRequest());
controller.applications().deleteApplication(id, credentials);
return new MessageResponse("Deleted application " + id);
}
private HttpResponse deleteInstance(String tenantName, String applicationName, String instanceName, HttpRequest request) {
TenantAndApplicationId id = TenantAndApplicationId.from(tenantName, applicationName);
controller.applications().deleteInstance(id.instance(instanceName));
if (controller.applications().requireApplication(id).instances().isEmpty()) {
Credentials credentials = accessControlRequests.credentials(id.tenant(), toSlime(request.getData()).get(), request.getJDiscRequest());
controller.applications().deleteApplication(id, credentials);
}
return new MessageResponse("Deleted instance " + id.instance(instanceName).toFullString());
}
private HttpResponse deactivate(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
DeploymentId id = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
requireZone(environment, region));
controller.applications().deactivate(id.applicationId(), id.zoneId());
return new MessageResponse("Deactivated " + id);
}
/** Returns test config for indicated job, with production deployments of the default instance. */
private HttpResponse testConfig(ApplicationId id, JobType type) {
ApplicationId defaultInstanceId = TenantAndApplicationId.from(id).defaultInstance();
HashSet<DeploymentId> deployments = controller.applications()
.getInstance(defaultInstanceId).stream()
.flatMap(instance -> instance.productionDeployments().keySet().stream())
.map(zone -> new DeploymentId(defaultInstanceId, zone))
.collect(Collectors.toCollection(HashSet::new));
var testedZone = type.zone(controller.system());
if ( ! type.isProduction())
deployments.add(new DeploymentId(id, testedZone));
return new SlimeJsonResponse(testConfigSerializer.configSlime(id,
type,
false,
controller.routing().zoneEndpointsOf(deployments),
controller.applications().reachableContentClustersByZone(deployments)));
}
private static SourceRevision toSourceRevision(Inspector object) {
if (!object.field("repository").valid() ||
!object.field("branch").valid() ||
!object.field("commit").valid()) {
throw new IllegalArgumentException("Must specify \"repository\", \"branch\", and \"commit\".");
}
return new SourceRevision(object.field("repository").asString(),
object.field("branch").asString(),
object.field("commit").asString());
}
private Tenant getTenantOrThrow(String tenantName) {
return controller.tenants().get(tenantName)
.orElseThrow(() -> new NotExistsException(new TenantId(tenantName)));
}
private void toSlime(Cursor object, Tenant tenant, HttpRequest request) {
object.setString("tenant", tenant.name().value());
object.setString("type", tenantType(tenant));
List<com.yahoo.vespa.hosted.controller.Application> applications = controller.applications().asList(tenant.name());
switch (tenant.type()) {
case athenz:
AthenzTenant athenzTenant = (AthenzTenant) tenant;
object.setString("athensDomain", athenzTenant.domain().getName());
object.setString("property", athenzTenant.property().id());
athenzTenant.propertyId().ifPresent(id -> object.setString("propertyId", id.toString()));
athenzTenant.contact().ifPresent(c -> {
object.setString("propertyUrl", c.propertyUrl().toString());
object.setString("contactsUrl", c.url().toString());
object.setString("issueCreationUrl", c.issueTrackerUrl().toString());
Cursor contactsArray = object.setArray("contacts");
c.persons().forEach(persons -> {
Cursor personArray = contactsArray.addArray();
persons.forEach(personArray::addString);
});
});
break;
case cloud: {
CloudTenant cloudTenant = (CloudTenant) tenant;
cloudTenant.creator().ifPresent(creator -> object.setString("creator", creator.getName()));
Cursor pemDeveloperKeysArray = object.setArray("pemDeveloperKeys");
cloudTenant.developerKeys().forEach((key, user) -> {
Cursor keyObject = pemDeveloperKeysArray.addObject();
keyObject.setString("key", KeyUtils.toPem(key));
keyObject.setString("user", user.getName());
});
Cursor secretStore = object.setArray("secretStores");
cloudTenant.tenantSecretStores().forEach(store -> {
Cursor storeObject = secretStore.addObject();
storeObject.setString("name", store.getName());
storeObject.setString("awsId", store.getAwsId());
storeObject.setString("role", store.getRole());
});
var tenantQuota = controller.serviceRegistry().billingController().getQuota(tenant.name());
var usedQuota = applications.stream()
.map(com.yahoo.vespa.hosted.controller.Application::quotaUsage)
.reduce(QuotaUsage.none, QuotaUsage::add);
toSlime(tenantQuota, usedQuota, object.setObject("quota"));
break;
}
default: throw new IllegalArgumentException("Unexpected tenant type '" + tenant.type() + "'.");
}
Cursor applicationArray = object.setArray("applications");
for (com.yahoo.vespa.hosted.controller.Application application : applications) {
DeploymentStatus status = controller.jobController().deploymentStatus(application);
for (Instance instance : showOnlyProductionInstances(request) ? application.productionInstances().values()
: application.instances().values())
if (recurseOverApplications(request))
toSlime(applicationArray.addObject(), instance, status, request);
else
toSlime(instance.id(), applicationArray.addObject(), request);
}
tenantMetaDataToSlime(tenant, object.setObject("metaData"));
}
private void toSlime(Quota quota, QuotaUsage usage, Cursor object) {
quota.budget().ifPresentOrElse(
budget -> object.setDouble("budget", budget.doubleValue()),
() -> object.setNix("budget")
);
object.setDouble("budgetUsed", usage.rate());
quota.maxClusterSize().ifPresent(maxClusterSize -> object.setLong("clusterSize", maxClusterSize));
}
private void toSlime(ClusterResources resources, Cursor object) {
object.setLong("nodes", resources.nodes());
object.setLong("groups", resources.groups());
toSlime(resources.nodeResources(), object.setObject("nodeResources"));
double costDivisor = controller.zoneRegistry().system().isPublic() ? 1.0 : 3.0;
object.setDouble("cost", Math.round(resources.nodes() * resources.nodeResources().cost() * 100.0 / costDivisor) / 100.0);
}
private void utilizationToSlime(Cluster.Utilization utilization, Cursor utilizationObject) {
utilizationObject.setDouble("cpu", utilization.cpu());
utilizationObject.setDouble("memory", utilization.memory());
utilizationObject.setDouble("disk", utilization.disk());
}
private void scalingEventsToSlime(List<Cluster.ScalingEvent> scalingEvents, Cursor scalingEventsArray) {
for (Cluster.ScalingEvent scalingEvent : scalingEvents) {
Cursor scalingEventObject = scalingEventsArray.addObject();
toSlime(scalingEvent.from(), scalingEventObject.setObject("from"));
toSlime(scalingEvent.to(), scalingEventObject.setObject("to"));
scalingEventObject.setLong("at", scalingEvent.at().toEpochMilli());
}
}
private void toSlime(NodeResources resources, Cursor object) {
object.setDouble("vcpu", resources.vcpu());
object.setDouble("memoryGb", resources.memoryGb());
object.setDouble("diskGb", resources.diskGb());
object.setDouble("bandwidthGbps", resources.bandwidthGbps());
object.setString("diskSpeed", valueOf(resources.diskSpeed()));
object.setString("storageType", valueOf(resources.storageType()));
}
private void tenantInTenantsListToSlime(Tenant tenant, URI requestURI, Cursor object) {
object.setString("tenant", tenant.name().value());
Cursor metaData = object.setObject("metaData");
metaData.setString("type", tenantType(tenant));
switch (tenant.type()) {
case athenz:
AthenzTenant athenzTenant = (AthenzTenant) tenant;
metaData.setString("athensDomain", athenzTenant.domain().getName());
metaData.setString("property", athenzTenant.property().id());
break;
case cloud: break;
default: throw new IllegalArgumentException("Unexpected tenant type '" + tenant.type() + "'.");
}
object.setString("url", withPath("/application/v4/tenant/" + tenant.name().value(), requestURI).toString());
}
private void tenantMetaDataToSlime(Tenant tenant, Cursor object) {
List<com.yahoo.vespa.hosted.controller.Application> applications = controller.applications().asList(tenant.name());
Optional<Instant> lastDev = applications.stream()
.flatMap(application -> application.instances().values().stream())
.flatMap(instance -> controller.jobController().jobs(instance.id()).stream()
.filter(jobType -> jobType.environment() == Environment.dev)
.flatMap(jobType -> controller.jobController().last(instance.id(), jobType).stream()))
.map(Run::start)
.max(Comparator.naturalOrder());
Optional<Instant> lastSubmission = applications.stream()
.flatMap(app -> app.latestVersion().flatMap(ApplicationVersion::buildTime).stream())
.max(Comparator.naturalOrder());
object.setLong("createdAtMillis", tenant.createdAt().toEpochMilli());
lastDev.ifPresent(instant -> object.setLong("lastDeploymentToDevMillis", instant.toEpochMilli()));
lastSubmission.ifPresent(instant -> object.setLong("lastSubmissionToProdMillis", instant.toEpochMilli()));
tenant.lastLoginInfo().get(LastLoginInfo.UserLevel.user)
.ifPresent(instant -> object.setLong("lastLoginByUserMillis", instant.toEpochMilli()));
tenant.lastLoginInfo().get(LastLoginInfo.UserLevel.developer)
.ifPresent(instant -> object.setLong("lastLoginByDeveloperMillis", instant.toEpochMilli()));
tenant.lastLoginInfo().get(LastLoginInfo.UserLevel.administrator)
.ifPresent(instant -> object.setLong("lastLoginByAdministratorMillis", instant.toEpochMilli()));
}
/** Returns a copy of the given URI with the host and port from the given URI, the path set to the given path and the query set to given query*/
private URI withPathAndQuery(String newPath, String newQuery, URI uri) {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), newPath, newQuery, null);
}
catch (URISyntaxException e) {
throw new RuntimeException("Will not happen", e);
}
}
/** Returns a copy of the given URI with the host and port from the given URI and the path set to the given path */
private URI withPath(String newPath, URI uri) {
return withPathAndQuery(newPath, null, uri);
}
private String toPath(DeploymentId id) {
return path("/application", "v4",
"tenant", id.applicationId().tenant(),
"application", id.applicationId().application(),
"instance", id.applicationId().instance(),
"environment", id.zoneId().environment(),
"region", id.zoneId().region());
}
private long asLong(String valueOrNull, long defaultWhenNull) {
if (valueOrNull == null) return defaultWhenNull;
try {
return Long.parseLong(valueOrNull);
}
catch (NumberFormatException e) {
throw new IllegalArgumentException("Expected an integer but got '" + valueOrNull + "'");
}
}
private void toSlime(Run run, Cursor object) {
object.setLong("id", run.id().number());
object.setString("version", run.versions().targetPlatform().toFullString());
if ( ! run.versions().targetApplication().isUnknown())
toSlime(run.versions().targetApplication(), object.setObject("revision"));
object.setString("reason", "unknown reason");
object.setLong("at", run.end().orElse(run.start()).toEpochMilli());
}
private Slime toSlime(InputStream jsonStream) {
try {
byte[] jsonBytes = IOUtils.readBytes(jsonStream, 1000 * 1000);
return SlimeUtils.jsonToSlime(jsonBytes);
} catch (IOException e) {
throw new RuntimeException();
}
}
private static Principal requireUserPrincipal(HttpRequest request) {
Principal principal = request.getJDiscRequest().getUserPrincipal();
if (principal == null) throw new InternalServerErrorException("Expected a user principal");
return principal;
}
private Inspector mandatory(String key, Inspector object) {
if ( ! object.field(key).valid())
throw new IllegalArgumentException("'" + key + "' is missing");
return object.field(key);
}
private Optional<String> optional(String key, Inspector object) {
return SlimeUtils.optionalString(object.field(key));
}
private static String path(Object... elements) {
return Joiner.on("/").join(elements);
}
private void toSlime(TenantAndApplicationId id, Cursor object, HttpRequest request) {
object.setString("tenant", id.tenant().value());
object.setString("application", id.application().value());
object.setString("url", withPath("/application/v4" +
"/tenant/" + id.tenant().value() +
"/application/" + id.application().value(),
request.getUri()).toString());
}
private void toSlime(ApplicationId id, Cursor object, HttpRequest request) {
object.setString("tenant", id.tenant().value());
object.setString("application", id.application().value());
object.setString("instance", id.instance().value());
object.setString("url", withPath("/application/v4" +
"/tenant/" + id.tenant().value() +
"/application/" + id.application().value() +
"/instance/" + id.instance().value(),
request.getUri()).toString());
}
private Slime toSlime(ActivateResult result) {
Slime slime = new Slime();
Cursor object = slime.setObject();
object.setString("revisionId", result.revisionId().id());
object.setLong("applicationZipSize", result.applicationZipSizeBytes());
Cursor logArray = object.setArray("prepareMessages");
if (result.prepareResponse().log != null) {
for (Log logMessage : result.prepareResponse().log) {
Cursor logObject = logArray.addObject();
logObject.setLong("time", logMessage.time);
logObject.setString("level", logMessage.level);
logObject.setString("message", logMessage.message);
}
}
Cursor changeObject = object.setObject("configChangeActions");
Cursor restartActionsArray = changeObject.setArray("restart");
for (RestartAction restartAction : result.prepareResponse().configChangeActions.restartActions) {
Cursor restartActionObject = restartActionsArray.addObject();
restartActionObject.setString("clusterName", restartAction.clusterName);
restartActionObject.setString("clusterType", restartAction.clusterType);
restartActionObject.setString("serviceType", restartAction.serviceType);
serviceInfosToSlime(restartAction.services, restartActionObject.setArray("services"));
stringsToSlime(restartAction.messages, restartActionObject.setArray("messages"));
}
Cursor refeedActionsArray = changeObject.setArray("refeed");
for (RefeedAction refeedAction : result.prepareResponse().configChangeActions.refeedActions) {
Cursor refeedActionObject = refeedActionsArray.addObject();
refeedActionObject.setString("name", refeedAction.name);
refeedActionObject.setString("documentType", refeedAction.documentType);
refeedActionObject.setString("clusterName", refeedAction.clusterName);
serviceInfosToSlime(refeedAction.services, refeedActionObject.setArray("services"));
stringsToSlime(refeedAction.messages, refeedActionObject.setArray("messages"));
}
return slime;
}
private void serviceInfosToSlime(List<ServiceInfo> serviceInfoList, Cursor array) {
for (ServiceInfo serviceInfo : serviceInfoList) {
Cursor serviceInfoObject = array.addObject();
serviceInfoObject.setString("serviceName", serviceInfo.serviceName);
serviceInfoObject.setString("serviceType", serviceInfo.serviceType);
serviceInfoObject.setString("configId", serviceInfo.configId);
serviceInfoObject.setString("hostName", serviceInfo.hostName);
}
}
private void stringsToSlime(List<String> strings, Cursor array) {
for (String string : strings)
array.addString(string);
}
private String readToString(InputStream stream) {
Scanner scanner = new Scanner(stream).useDelimiter("\\A");
if ( ! scanner.hasNext()) return null;
return scanner.next();
}
private static boolean recurseOverTenants(HttpRequest request) {
return recurseOverApplications(request) || "tenant".equals(request.getProperty("recursive"));
}
private static boolean recurseOverApplications(HttpRequest request) {
return recurseOverDeployments(request) || "application".equals(request.getProperty("recursive"));
}
private static boolean recurseOverDeployments(HttpRequest request) {
return ImmutableSet.of("all", "true", "deployment").contains(request.getProperty("recursive"));
}
private static boolean showOnlyProductionInstances(HttpRequest request) {
return "true".equals(request.getProperty("production"));
}
private static String tenantType(Tenant tenant) {
switch (tenant.type()) {
case athenz: return "ATHENS";
case cloud: return "CLOUD";
default: throw new IllegalArgumentException("Unknown tenant type: " + tenant.getClass().getSimpleName());
}
}
private static ApplicationId appIdFromPath(Path path) {
return ApplicationId.from(path.get("tenant"), path.get("application"), path.get("instance"));
}
private static JobType jobTypeFromPath(Path path) {
return JobType.fromJobName(path.get("jobtype"));
}
private static RunId runIdFromPath(Path path) {
long number = Long.parseLong(path.get("number"));
return new RunId(appIdFromPath(path), jobTypeFromPath(path), number);
}
private HttpResponse submit(String tenant, String application, HttpRequest request) {
Map<String, byte[]> dataParts = parseDataParts(request);
Inspector submitOptions = SlimeUtils.jsonToSlime(dataParts.get(EnvironmentResource.SUBMIT_OPTIONS)).get();
long projectId = Math.max(1, submitOptions.field("projectId").asLong());
Optional<String> repository = optional("repository", submitOptions);
Optional<String> branch = optional("branch", submitOptions);
Optional<String> commit = optional("commit", submitOptions);
Optional<SourceRevision> sourceRevision = repository.isPresent() && branch.isPresent() && commit.isPresent()
? Optional.of(new SourceRevision(repository.get(), branch.get(), commit.get()))
: Optional.empty();
Optional<String> sourceUrl = optional("sourceUrl", submitOptions);
Optional<String> authorEmail = optional("authorEmail", submitOptions);
sourceUrl.map(URI::create).ifPresent(url -> {
if (url.getHost() == null || url.getScheme() == null)
throw new IllegalArgumentException("Source URL must include scheme and host");
});
ApplicationPackage applicationPackage = new ApplicationPackage(dataParts.get(EnvironmentResource.APPLICATION_ZIP), true);
controller.applications().verifyApplicationIdentityConfiguration(TenantName.from(tenant),
Optional.empty(),
Optional.empty(),
applicationPackage,
Optional.of(requireUserPrincipal(request)));
return JobControllerApiHandlerHelper.submitResponse(controller.jobController(),
tenant,
application,
sourceRevision,
authorEmail,
sourceUrl,
projectId,
applicationPackage,
dataParts.get(EnvironmentResource.APPLICATION_TEST_ZIP));
}
private HttpResponse removeAllProdDeployments(String tenant, String application) {
JobControllerApiHandlerHelper.submitResponse(controller.jobController(), tenant, application,
Optional.empty(), Optional.empty(), Optional.empty(), 1,
ApplicationPackage.deploymentRemoval(), new byte[0]);
return new MessageResponse("All deployments removed");
}
private ZoneId requireZone(String environment, String region) {
ZoneId zone = ZoneId.from(environment, region);
if (zone.environment() == Environment.prod && zone.region().value().equals("controller")) {
return zone;
}
if (!controller.zoneRegistry().hasZone(zone)) {
throw new IllegalArgumentException("Zone " + zone + " does not exist in this system");
}
return zone;
}
private static Map<String, byte[]> parseDataParts(HttpRequest request) {
String contentHash = request.getHeader("X-Content-Hash");
if (contentHash == null)
return new MultipartParser().parse(request);
DigestInputStream digester = Signatures.sha256Digester(request.getData());
var dataParts = new MultipartParser().parse(request.getHeader("Content-Type"), digester, request.getUri());
if ( ! Arrays.equals(digester.getMessageDigest().digest(), Base64.getDecoder().decode(contentHash)))
throw new IllegalArgumentException("Value of X-Content-Hash header does not match computed content hash");
return dataParts;
}
private static RotationId findRotationId(Instance instance, Optional<String> endpointId) {
if (instance.rotations().isEmpty()) {
throw new NotExistsException("global rotation does not exist for " + instance);
}
if (endpointId.isPresent()) {
return instance.rotations().stream()
.filter(r -> r.endpointId().id().equals(endpointId.get()))
.map(AssignedRotation::rotationId)
.findFirst()
.orElseThrow(() -> new NotExistsException("endpoint " + endpointId.get() +
" does not exist for " + instance));
} else if (instance.rotations().size() > 1) {
throw new IllegalArgumentException(instance + " has multiple rotations. Query parameter 'endpointId' must be given");
}
return instance.rotations().get(0).rotationId();
}
private static String rotationStateString(RotationState state) {
switch (state) {
case in: return "IN";
case out: return "OUT";
}
return "UNKNOWN";
}
private static String endpointScopeString(Endpoint.Scope scope) {
switch (scope) {
case region: return "region";
case global: return "global";
case zone: return "zone";
}
throw new IllegalArgumentException("Unknown endpoint scope " + scope);
}
private static String routingMethodString(RoutingMethod method) {
switch (method) {
case exclusive: return "exclusive";
case shared: return "shared";
case sharedLayer4: return "sharedLayer4";
}
throw new IllegalArgumentException("Unknown routing method " + method);
}
private static <T> T getAttribute(HttpRequest request, String attributeName, Class<T> cls) {
return Optional.ofNullable(request.getJDiscRequest().context().get(attributeName))
.filter(cls::isInstance)
.map(cls::cast)
.orElseThrow(() -> new IllegalArgumentException("Attribute '" + attributeName + "' was not set on request"));
}
/** Returns whether given request is by an operator */
private static boolean isOperator(HttpRequest request) {
var securityContext = getAttribute(request, SecurityContext.ATTRIBUTE_NAME, SecurityContext.class);
return securityContext.roles().stream()
.map(Role::definition)
.anyMatch(definition -> definition == RoleDefinition.hostedOperator);
}
} | class ApplicationApiHandler extends LoggingRequestHandler {
private static final ObjectMapper jsonMapper = new ObjectMapper();
private static final String OPTIONAL_PREFIX = "/api";
private final Controller controller;
private final AccessControlRequests accessControlRequests;
private final TestConfigSerializer testConfigSerializer;
@Inject
public ApplicationApiHandler(LoggingRequestHandler.Context parentCtx,
Controller controller,
AccessControlRequests accessControlRequests) {
super(parentCtx);
this.controller = controller;
this.accessControlRequests = accessControlRequests;
this.testConfigSerializer = new TestConfigSerializer(controller.system());
}
@Override
public Duration getTimeout() {
return Duration.ofMinutes(20);
}
@Override
public HttpResponse handle(HttpRequest request) {
try {
Path path = new Path(request.getUri(), OPTIONAL_PREFIX);
switch (request.getMethod()) {
case GET: return handleGET(path, request);
case PUT: return handlePUT(path, request);
case POST: return handlePOST(path, request);
case PATCH: return handlePATCH(path, request);
case DELETE: return handleDELETE(path, request);
case OPTIONS: return handleOPTIONS();
default: return ErrorResponse.methodNotAllowed("Method '" + request.getMethod() + "' is not supported");
}
}
catch (ForbiddenException e) {
return ErrorResponse.forbidden(Exceptions.toMessageString(e));
}
catch (NotAuthorizedException e) {
return ErrorResponse.unauthorized(Exceptions.toMessageString(e));
}
catch (NotExistsException e) {
return ErrorResponse.notFoundError(Exceptions.toMessageString(e));
}
catch (IllegalArgumentException e) {
return ErrorResponse.badRequest(Exceptions.toMessageString(e));
}
catch (ConfigServerException e) {
switch (e.getErrorCode()) {
case NOT_FOUND:
return new ErrorResponse(NOT_FOUND, e.getErrorCode().name(), Exceptions.toMessageString(e));
case ACTIVATION_CONFLICT:
return new ErrorResponse(CONFLICT, e.getErrorCode().name(), Exceptions.toMessageString(e));
case INTERNAL_SERVER_ERROR:
return new ErrorResponse(INTERNAL_SERVER_ERROR, e.getErrorCode().name(), Exceptions.toMessageString(e));
default:
return new ErrorResponse(BAD_REQUEST, e.getErrorCode().name(), Exceptions.toMessageString(e));
}
}
catch (RuntimeException e) {
log.log(Level.WARNING, "Unexpected error handling '" + request.getUri() + "'", e);
return ErrorResponse.internalServerError(Exceptions.toMessageString(e));
}
}
private HttpResponse handleGET(Path path, HttpRequest request) {
if (path.matches("/application/v4/")) return root(request);
if (path.matches("/application/v4/tenant")) return tenants(request);
if (path.matches("/application/v4/tenant/{tenant}")) return tenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/info")) return tenantInfo(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/secret-store/{name}/validate")) return validateSecretStore(path.get("tenant"), path.get("name"));
if (path.matches("/application/v4/tenant/{tenant}/application")) return applications(path.get("tenant"), Optional.empty(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return application(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/compile-version")) return compileVersion(path.get("tenant"), path.get("application"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deployment")) return JobControllerApiHandlerHelper.overviewResponse(controller, TenantAndApplicationId.from(path.get("tenant"), path.get("application")), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/package")) return applicationPackage(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return deploying(path.get("tenant"), path.get("application"), "default", request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying/pin")) return deploying(path.get("tenant"), path.get("application"), "default", request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/metering")) return metering(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance")) return applications(path.get("tenant"), Optional.of(path.get("application")), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}")) return instance(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying")) return deploying(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying/pin")) return deploying(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job")) return JobControllerApiHandlerHelper.jobTypeResponse(controller, appIdFromPath(path), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}")) return JobControllerApiHandlerHelper.runResponse(controller.jobController().runs(appIdFromPath(path), jobTypeFromPath(path)), request.getUri());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/package")) return devApplicationPackage(appIdFromPath(path), jobTypeFromPath(path));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/test-config")) return testConfig(appIdFromPath(path), jobTypeFromPath(path));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/run/{number}")) return JobControllerApiHandlerHelper.runDetailsResponse(controller.jobController(), runIdFromPath(path), request.getProperty("after"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}")) return deployment(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/reindexing")) return getReindexing(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/suspended")) return suspended(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/service")) return services(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/service/{service}/{*}")) return service(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), path.get("service"), path.getRest(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/nodes")) return nodes(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/clusters")) return clusters(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/content/{*}")) return content(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), path.getRest(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/logs")) return logs(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request.propertyMap());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/metrics")) return metrics(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/global-rotation")) return rotationStatus(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), Optional.ofNullable(request.getProperty("endpointId")));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/global-rotation/override")) return getGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deployment(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deployment(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/suspended")) return suspended(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service")) return services(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/service/{service}/{*}")) return service(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), path.get("service"), path.getRest(), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/nodes")) return nodes(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/clusters")) return clusters(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/logs")) return logs(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request.propertyMap());
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation")) return rotationStatus(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), Optional.ofNullable(request.getProperty("endpointId")));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override")) return getGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"));
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePUT(Path path, HttpRequest request) {
if (path.matches("/application/v4/tenant/{tenant}")) return updateTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/info")) return updateTenantInfo(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/secret-store/{name}")) return addSecretStore(path.get("tenant"), path.get("name"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/global-rotation/override")) return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), false, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override")) return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), false, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handlePATCH(Path path, HttpRequest request) {
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return patchApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}")) return patchApplication(path.get("tenant"), path.get("application"), request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleDELETE(Path path, HttpRequest request) {
if (path.matches("/application/v4/tenant/{tenant}")) return deleteTenant(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/key")) return removeDeveloperKey(path.get("tenant"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}")) return deleteApplication(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deployment")) return removeAllProdDeployments(path.get("tenant"), path.get("application"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying")) return cancelDeploy(path.get("tenant"), path.get("application"), "default", "all");
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/deploying/{choice}")) return cancelDeploy(path.get("tenant"), path.get("application"), "default", path.get("choice"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/key")) return removeDeployKey(path.get("tenant"), path.get("application"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}")) return deleteInstance(path.get("tenant"), path.get("application"), path.get("instance"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying")) return cancelDeploy(path.get("tenant"), path.get("application"), path.get("instance"), "all");
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/deploying/{choice}")) return cancelDeploy(path.get("tenant"), path.get("application"), path.get("instance"), path.get("choice"));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}")) return JobControllerApiHandlerHelper.abortJobResponse(controller.jobController(), appIdFromPath(path), jobTypeFromPath(path));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/job/{jobtype}/pause")) return resume(appIdFromPath(path), jobTypeFromPath(path));
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}")) return deactivate(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/reindexing")) return disableReindexing(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/suspend")) return suspend(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), false);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/instance/{instance}/environment/{environment}/region/{region}/global-rotation/override")) return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), true, request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}")) return deactivate(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), request);
if (path.matches("/application/v4/tenant/{tenant}/application/{application}/environment/{environment}/region/{region}/instance/{instance}/global-rotation/override")) return setGlobalRotationOverride(path.get("tenant"), path.get("application"), path.get("instance"), path.get("environment"), path.get("region"), true, request);
return ErrorResponse.notFoundError("Nothing at " + path);
}
private HttpResponse handleOPTIONS() {
EmptyResponse response = new EmptyResponse();
response.headers().put("Allow", "GET,PUT,POST,PATCH,DELETE,OPTIONS");
return response;
}
private HttpResponse recursiveRoot(HttpRequest request) {
Slime slime = new Slime();
Cursor tenantArray = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
toSlime(tenantArray.addObject(), tenant, request);
return new SlimeJsonResponse(slime);
}
private HttpResponse root(HttpRequest request) {
return recurseOverTenants(request)
? recursiveRoot(request)
: new ResourceResponse(request, "tenant");
}
private HttpResponse tenants(HttpRequest request) {
Slime slime = new Slime();
Cursor response = slime.setArray();
for (Tenant tenant : controller.tenants().asList())
tenantInTenantsListToSlime(tenant, request.getUri(), response.addObject());
return new SlimeJsonResponse(slime);
}
private HttpResponse tenant(String tenantName, HttpRequest request) {
return controller.tenants().get(TenantName.from(tenantName))
.map(tenant -> tenant(tenant, request))
.orElseGet(() -> ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist"));
}
private HttpResponse tenant(Tenant tenant, HttpRequest request) {
Slime slime = new Slime();
toSlime(slime.setObject(), tenant, request);
return new SlimeJsonResponse(slime);
}
private HttpResponse tenantInfo(String tenantName, HttpRequest request) {
return controller.tenants().get(TenantName.from(tenantName))
.filter(tenant -> tenant.type() == Tenant.Type.cloud)
.map(tenant -> tenantInfo(((CloudTenant)tenant).info(), request))
.orElseGet(() -> ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist or does not support this"));
}
private SlimeJsonResponse tenantInfo(TenantInfo info, HttpRequest request) {
Slime slime = new Slime();
Cursor infoCursor = slime.setObject();
if (!info.isEmpty()) {
infoCursor.setString("name", info.name());
infoCursor.setString("email", info.email());
infoCursor.setString("website", info.website());
infoCursor.setString("invoiceEmail", info.invoiceEmail());
infoCursor.setString("contactName", info.contactName());
infoCursor.setString("contactEmail", info.contactEmail());
toSlime(info.address(), infoCursor);
toSlime(info.billingContact(), infoCursor);
}
return new SlimeJsonResponse(slime);
}
private void toSlime(TenantInfoAddress address, Cursor parentCursor) {
if (address.isEmpty()) return;
Cursor addressCursor = parentCursor.setObject("address");
addressCursor.setString("addressLines", address.addressLines());
addressCursor.setString("postalCodeOrZip", address.postalCodeOrZip());
addressCursor.setString("city", address.city());
addressCursor.setString("stateRegionProvince", address.stateRegionProvince());
addressCursor.setString("country", address.country());
}
private void toSlime(TenantInfoBillingContact billingContact, Cursor parentCursor) {
if (billingContact.isEmpty()) return;
Cursor addressCursor = parentCursor.setObject("billingContact");
addressCursor.setString("name", billingContact.name());
addressCursor.setString("email", billingContact.email());
addressCursor.setString("phone", billingContact.phone());
toSlime(billingContact.address(), addressCursor);
}
private HttpResponse updateTenantInfo(String tenantName, HttpRequest request) {
return controller.tenants().get(TenantName.from(tenantName))
.filter(tenant -> tenant.type() == Tenant.Type.cloud)
.map(tenant -> updateTenantInfo(((CloudTenant)tenant), request))
.orElseGet(() -> ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist or does not support this"));
}
private String getString(Inspector field, String defaultVale) {
return field.valid() ? field.asString() : defaultVale;
}
private SlimeJsonResponse updateTenantInfo(CloudTenant tenant, HttpRequest request) {
TenantInfo oldInfo = tenant.info();
Inspector insp = toSlime(request.getData()).get();
TenantInfo mergedInfo = TenantInfo.EMPTY
.withName(getString(insp.field("name"), oldInfo.name()))
.withEmail(getString(insp.field("email"), oldInfo.email()))
.withWebsite(getString(insp.field("website"), oldInfo.email()))
.withInvoiceEmail(getString(insp.field("invoiceEmail"), oldInfo.invoiceEmail()))
.withContactName(getString(insp.field("contactName"), oldInfo.contactName()))
.withContactEmail(getString(insp.field("contactEmail"), oldInfo.contactName()))
.withAddress(updateTenantInfoAddress(insp.field("address"), oldInfo.address()))
.withBillingContact(updateTenantInfoBillingContact(insp.field("billingContact"), oldInfo.billingContact()));
controller.tenants().lockOrThrow(tenant.name(), LockedTenant.Cloud.class, lockedTenant -> {
lockedTenant = lockedTenant.withInfo(mergedInfo);
controller.tenants().store(lockedTenant);
});
return new MessageResponse("Tenant info updated");
}
private TenantInfoAddress updateTenantInfoAddress(Inspector insp, TenantInfoAddress oldAddress) {
if (!insp.valid()) return oldAddress;
return TenantInfoAddress.EMPTY
.withCountry(getString(insp.field("country"), oldAddress.country()))
.withStateRegionProvince(getString(insp.field("stateRegionProvince"), oldAddress.stateRegionProvince()))
.withCity(getString(insp.field("city"), oldAddress.city()))
.withPostalCodeOrZip(getString(insp.field("postalCodeOrZip"), oldAddress.postalCodeOrZip()))
.withAddressLines(getString(insp.field("addressLines"), oldAddress.addressLines()));
}
private TenantInfoBillingContact updateTenantInfoBillingContact(Inspector insp, TenantInfoBillingContact oldContact) {
if (!insp.valid()) return oldContact;
return TenantInfoBillingContact.EMPTY
.withName(getString(insp.field("name"), oldContact.name()))
.withEmail(getString(insp.field("email"), oldContact.email()))
.withPhone(getString(insp.field("phone"), oldContact.phone()))
.withAddress(updateTenantInfoAddress(insp.field("address"), oldContact.address()));
}
private HttpResponse applications(String tenantName, Optional<String> applicationName, HttpRequest request) {
TenantName tenant = TenantName.from(tenantName);
if (controller.tenants().get(tenantName).isEmpty())
return ErrorResponse.notFoundError("Tenant '" + tenantName + "' does not exist");
Slime slime = new Slime();
Cursor applicationArray = slime.setArray();
for (com.yahoo.vespa.hosted.controller.Application application : controller.applications().asList(tenant)) {
if (applicationName.map(application.id().application().value()::equals).orElse(true)) {
Cursor applicationObject = applicationArray.addObject();
applicationObject.setString("tenant", application.id().tenant().value());
applicationObject.setString("application", application.id().application().value());
applicationObject.setString("url", withPath("/application/v4" +
"/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value(),
request.getUri()).toString());
Cursor instanceArray = applicationObject.setArray("instances");
for (InstanceName instance : showOnlyProductionInstances(request) ? application.productionInstances().keySet()
: application.instances().keySet()) {
Cursor instanceObject = instanceArray.addObject();
instanceObject.setString("instance", instance.value());
instanceObject.setString("url", withPath("/application/v4" +
"/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value() +
"/instance/" + instance.value(),
request.getUri()).toString());
}
}
}
return new SlimeJsonResponse(slime);
}
private HttpResponse devApplicationPackage(ApplicationId id, JobType type) {
if ( ! type.environment().isManuallyDeployed())
throw new IllegalArgumentException("Only manually deployed zones have dev packages");
ZoneId zone = type.zone(controller.system());
byte[] applicationPackage = controller.applications().applicationStore().getDev(id, zone);
return new ZipResponse(id.toFullString() + "." + zone.value() + ".zip", applicationPackage);
}
private HttpResponse applicationPackage(String tenantName, String applicationName, HttpRequest request) {
var tenantAndApplication = TenantAndApplicationId.from(tenantName, applicationName);
var applicationId = ApplicationId.from(tenantName, applicationName, InstanceName.defaultName().value());
long buildNumber;
var requestedBuild = Optional.ofNullable(request.getProperty("build")).map(build -> {
try {
return Long.parseLong(build);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid build number", e);
}
});
if (requestedBuild.isEmpty()) {
var application = controller.applications().requireApplication(tenantAndApplication);
var latestBuild = application.latestVersion().map(ApplicationVersion::buildNumber).orElse(OptionalLong.empty());
if (latestBuild.isEmpty()) {
throw new NotExistsException("No application package has been submitted for '" + tenantAndApplication + "'");
}
buildNumber = latestBuild.getAsLong();
} else {
buildNumber = requestedBuild.get();
}
var applicationPackage = controller.applications().applicationStore().find(tenantAndApplication.tenant(), tenantAndApplication.application(), buildNumber);
var filename = tenantAndApplication + "-build" + buildNumber + ".zip";
if (applicationPackage.isEmpty()) {
throw new NotExistsException("No application package found for '" +
tenantAndApplication +
"' with build number " + buildNumber);
}
return new ZipResponse(filename, applicationPackage.get());
}
private HttpResponse application(String tenantName, String applicationName, HttpRequest request) {
Slime slime = new Slime();
toSlime(slime.setObject(), getApplication(tenantName, applicationName), request);
return new SlimeJsonResponse(slime);
}
private HttpResponse compileVersion(String tenantName, String applicationName) {
Slime slime = new Slime();
slime.setObject().setString("compileVersion",
compileVersion(TenantAndApplicationId.from(tenantName, applicationName)).toFullString());
return new SlimeJsonResponse(slime);
}
private HttpResponse instance(String tenantName, String applicationName, String instanceName, HttpRequest request) {
Slime slime = new Slime();
toSlime(slime.setObject(), getInstance(tenantName, applicationName, instanceName),
controller.jobController().deploymentStatus(getApplication(tenantName, applicationName)), request);
return new SlimeJsonResponse(slime);
}
private HttpResponse addDeveloperKey(String tenantName, HttpRequest request) {
if (controller.tenants().require(TenantName.from(tenantName)).type() != Tenant.Type.cloud)
throw new IllegalArgumentException("Tenant '" + tenantName + "' is not a cloud tenant");
Principal user = request.getJDiscRequest().getUserPrincipal();
String pemDeveloperKey = toSlime(request.getData()).get().field("key").asString();
PublicKey developerKey = KeyUtils.fromPemEncodedPublicKey(pemDeveloperKey);
Slime root = new Slime();
controller.tenants().lockOrThrow(TenantName.from(tenantName), LockedTenant.Cloud.class, tenant -> {
tenant = tenant.withDeveloperKey(developerKey, user);
toSlime(root.setObject().setArray("keys"), tenant.get().developerKeys());
controller.tenants().store(tenant);
});
return new SlimeJsonResponse(root);
}
private HttpResponse validateSecretStore(String tenantName, String name) {
var tenant = TenantName.from(tenantName);
if (controller.tenants().require(tenant).type() != Tenant.Type.cloud)
return ErrorResponse.badRequest("Tenant '" + tenant + "' is not a cloud tenant");
var cloudTenant = (CloudTenant)controller.tenants().require(tenant);
var tenantSecretStore = cloudTenant.tenantSecretStores()
.stream()
.filter(secretStore -> secretStore.getName().equals(name))
.findFirst();
var deployment = getActiveDeployment(tenant);
if (deployment.isEmpty())
return ErrorResponse.badRequest("Tenant '" + tenantName + "' has no active deployments");
if (tenantSecretStore.isEmpty())
return ErrorResponse.notFoundError("No secret store '" + name + "' configured for tenant '" + tenantName + "'");
var response = controller.serviceRegistry().configServer().validateSecretStore(deployment.get(), tenantSecretStore.get());
return new MessageResponse(response);
}
private Optional<DeploymentId> getActiveDeployment(TenantName tenant) {
for (var application : controller.applications().asList(tenant)) {
var optionalInstance = application.instances().values()
.stream()
.filter(instance -> instance.deployments().keySet().size() > 0)
.findFirst();
if (optionalInstance.isPresent()) {
var instance = optionalInstance.get();
var applicationId = instance.id();
var zoneId = instance.deployments().keySet().stream().findFirst().orElseThrow();
return Optional.of(new DeploymentId(applicationId, zoneId));
}
}
return Optional.empty();
}
private HttpResponse removeDeveloperKey(String tenantName, HttpRequest request) {
if (controller.tenants().require(TenantName.from(tenantName)).type() != Tenant.Type.cloud)
throw new IllegalArgumentException("Tenant '" + tenantName + "' is not a cloud tenant");
String pemDeveloperKey = toSlime(request.getData()).get().field("key").asString();
PublicKey developerKey = KeyUtils.fromPemEncodedPublicKey(pemDeveloperKey);
Principal user = ((CloudTenant) controller.tenants().require(TenantName.from(tenantName))).developerKeys().get(developerKey);
Slime root = new Slime();
controller.tenants().lockOrThrow(TenantName.from(tenantName), LockedTenant.Cloud.class, tenant -> {
tenant = tenant.withoutDeveloperKey(developerKey);
toSlime(root.setObject().setArray("keys"), tenant.get().developerKeys());
controller.tenants().store(tenant);
});
return new SlimeJsonResponse(root);
}
private void toSlime(Cursor keysArray, Map<PublicKey, Principal> keys) {
keys.forEach((key, principal) -> {
Cursor keyObject = keysArray.addObject();
keyObject.setString("key", KeyUtils.toPem(key));
keyObject.setString("user", principal.getName());
});
}
private HttpResponse addDeployKey(String tenantName, String applicationName, HttpRequest request) {
String pemDeployKey = toSlime(request.getData()).get().field("key").asString();
PublicKey deployKey = KeyUtils.fromPemEncodedPublicKey(pemDeployKey);
Slime root = new Slime();
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(tenantName, applicationName), application -> {
application = application.withDeployKey(deployKey);
application.get().deployKeys().stream()
.map(KeyUtils::toPem)
.forEach(root.setObject().setArray("keys")::addString);
controller.applications().store(application);
});
return new SlimeJsonResponse(root);
}
private HttpResponse removeDeployKey(String tenantName, String applicationName, HttpRequest request) {
String pemDeployKey = toSlime(request.getData()).get().field("key").asString();
PublicKey deployKey = KeyUtils.fromPemEncodedPublicKey(pemDeployKey);
Slime root = new Slime();
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(tenantName, applicationName), application -> {
application = application.withoutDeployKey(deployKey);
application.get().deployKeys().stream()
.map(KeyUtils::toPem)
.forEach(root.setObject().setArray("keys")::addString);
controller.applications().store(application);
});
return new SlimeJsonResponse(root);
}
private HttpResponse addSecretStore(String tenantName, String name, HttpRequest request) {
if (controller.tenants().require(TenantName.from(tenantName)).type() != Tenant.Type.cloud)
throw new IllegalArgumentException("Tenant '" + tenantName + "' is not a cloud tenant");
var data = toSlime(request.getData()).get();
var awsId = mandatory("awsId", data).asString();
var externalId = mandatory("externalId", data).asString();
var role = mandatory("role", data).asString();
var tenant = (CloudTenant) controller.tenants().require(TenantName.from(tenantName));
var tenantSecretStore = new TenantSecretStore(name, awsId, role);
if (!tenantSecretStore.isValid()) {
return ErrorResponse.badRequest(String.format("Secret store " + tenantSecretStore + " is invalid"));
}
if (tenant.tenantSecretStores().contains(tenantSecretStore)) {
return ErrorResponse.badRequest(String.format("Secret store " + tenantSecretStore + " is already configured"));
}
controller.serviceRegistry().roleService().createTenantPolicy(TenantName.from(tenantName), name, awsId, role);
controller.serviceRegistry().tenantSecretService().addSecretStore(tenant.name(), tenantSecretStore, externalId);
controller.tenants().lockOrThrow(tenant.name(), LockedTenant.Cloud.class, lockedTenant -> {
lockedTenant = lockedTenant.withSecretStore(tenantSecretStore);
controller.tenants().store(lockedTenant);
});
return new MessageResponse("Configured secret store: " + tenantSecretStore);
}
private HttpResponse patchApplication(String tenantName, String applicationName, HttpRequest request) {
Inspector requestObject = toSlime(request.getData()).get();
StringJoiner messageBuilder = new StringJoiner("\n").setEmptyValue("No applicable changes.");
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(tenantName, applicationName), application -> {
Inspector majorVersionField = requestObject.field("majorVersion");
if (majorVersionField.valid()) {
Integer majorVersion = majorVersionField.asLong() == 0 ? null : (int) majorVersionField.asLong();
application = application.withMajorVersion(majorVersion);
messageBuilder.add("Set major version to " + (majorVersion == null ? "empty" : majorVersion));
}
Inspector pemDeployKeyField = requestObject.field("pemDeployKey");
if (pemDeployKeyField.valid()) {
String pemDeployKey = pemDeployKeyField.asString();
PublicKey deployKey = KeyUtils.fromPemEncodedPublicKey(pemDeployKey);
application = application.withDeployKey(deployKey);
messageBuilder.add("Added deploy key " + pemDeployKey);
}
controller.applications().store(application);
});
return new MessageResponse(messageBuilder.toString());
}
private com.yahoo.vespa.hosted.controller.Application getApplication(String tenantName, String applicationName) {
TenantAndApplicationId applicationId = TenantAndApplicationId.from(tenantName, applicationName);
return controller.applications().getApplication(applicationId)
.orElseThrow(() -> new NotExistsException(applicationId + " not found"));
}
private Instance getInstance(String tenantName, String applicationName, String instanceName) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
return controller.applications().getInstance(applicationId)
.orElseThrow(() -> new NotExistsException(applicationId + " not found"));
}
private HttpResponse nodes(String tenantName, String applicationName, String instanceName, String environment, String region) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
List<Node> nodes = controller.serviceRegistry().configServer().nodeRepository().list(zone, id);
Slime slime = new Slime();
Cursor nodesArray = slime.setObject().setArray("nodes");
for (Node node : nodes) {
Cursor nodeObject = nodesArray.addObject();
nodeObject.setString("hostname", node.hostname().value());
nodeObject.setString("state", valueOf(node.state()));
node.reservedTo().ifPresent(tenant -> nodeObject.setString("reservedTo", tenant.value()));
nodeObject.setString("orchestration", valueOf(node.serviceState()));
nodeObject.setString("version", node.currentVersion().toString());
nodeObject.setString("flavor", node.flavor());
toSlime(node.resources(), nodeObject);
nodeObject.setBool("fastDisk", node.resources().diskSpeed() == NodeResources.DiskSpeed.fast);
nodeObject.setString("clusterId", node.clusterId());
nodeObject.setString("clusterType", valueOf(node.clusterType()));
}
return new SlimeJsonResponse(slime);
}
private HttpResponse clusters(String tenantName, String applicationName, String instanceName, String environment, String region) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
Application application = controller.serviceRegistry().configServer().nodeRepository().getApplication(zone, id);
Slime slime = new Slime();
Cursor clustersObject = slime.setObject().setObject("clusters");
for (Cluster cluster : application.clusters().values()) {
Cursor clusterObject = clustersObject.setObject(cluster.id().value());
toSlime(cluster.min(), clusterObject.setObject("min"));
toSlime(cluster.max(), clusterObject.setObject("max"));
toSlime(cluster.current(), clusterObject.setObject("current"));
if (cluster.target().isPresent()
&& ! cluster.target().get().justNumbers().equals(cluster.current().justNumbers()))
toSlime(cluster.target().get(), clusterObject.setObject("target"));
cluster.suggested().ifPresent(suggested -> toSlime(suggested, clusterObject.setObject("suggested")));
utilizationToSlime(cluster.utilization(), clusterObject.setObject("utilization"));
scalingEventsToSlime(cluster.scalingEvents(), clusterObject.setArray("scalingEvents"));
clusterObject.setString("autoscalingStatus", cluster.autoscalingStatus());
}
return new SlimeJsonResponse(slime);
}
private static String valueOf(Node.State state) {
switch (state) {
case failed: return "failed";
case parked: return "parked";
case dirty: return "dirty";
case ready: return "ready";
case active: return "active";
case inactive: return "inactive";
case reserved: return "reserved";
case provisioned: return "provisioned";
default: throw new IllegalArgumentException("Unexpected node state '" + state + "'.");
}
}
static String valueOf(Node.ServiceState state) {
switch (state) {
case expectedUp: return "expectedUp";
case allowedDown: return "allowedDown";
case permanentlyDown: return "permanentlyDown";
case unorchestrated: return "unorchestrated";
case unknown: break;
}
return "unknown";
}
private static String valueOf(Node.ClusterType type) {
switch (type) {
case admin: return "admin";
case content: return "content";
case container: return "container";
case combined: return "combined";
default: throw new IllegalArgumentException("Unexpected node cluster type '" + type + "'.");
}
}
private static String valueOf(NodeResources.DiskSpeed diskSpeed) {
switch (diskSpeed) {
case fast : return "fast";
case slow : return "slow";
case any : return "any";
default: throw new IllegalArgumentException("Unknown disk speed '" + diskSpeed.name() + "'");
}
}
private static String valueOf(NodeResources.StorageType storageType) {
switch (storageType) {
case remote : return "remote";
case local : return "local";
case any : return "any";
default: throw new IllegalArgumentException("Unknown storage type '" + storageType.name() + "'");
}
}
private HttpResponse logs(String tenantName, String applicationName, String instanceName, String environment, String region, Map<String, String> queryParameters) {
ApplicationId application = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
DeploymentId deployment = new DeploymentId(application, zone);
InputStream logStream = controller.serviceRegistry().configServer().getLogs(deployment, queryParameters);
return new HttpResponse(200) {
@Override
public void render(OutputStream outputStream) throws IOException {
logStream.transferTo(outputStream);
}
};
}
private HttpResponse metrics(String tenantName, String applicationName, String instanceName, String environment, String region) {
ApplicationId application = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
DeploymentId deployment = new DeploymentId(application, zone);
List<ProtonMetrics> protonMetrics = controller.serviceRegistry().configServer().getProtonMetrics(deployment);
return buildResponseFromProtonMetrics(protonMetrics);
}
private JsonResponse buildResponseFromProtonMetrics(List<ProtonMetrics> protonMetrics) {
try {
var jsonObject = jsonMapper.createObjectNode();
var jsonArray = jsonMapper.createArrayNode();
for (ProtonMetrics metrics : protonMetrics) {
jsonArray.add(metrics.toJson());
}
jsonObject.set("metrics", jsonArray);
return new JsonResponse(200, jsonMapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonObject));
} catch (JsonProcessingException e) {
log.log(Level.SEVERE, "Unable to build JsonResponse with Proton data: " + e.getMessage(), e);
return new JsonResponse(500, "");
}
}
private HttpResponse trigger(ApplicationId id, JobType type, HttpRequest request) {
Inspector requestObject = toSlime(request.getData()).get();
boolean requireTests = ! requestObject.field("skipTests").asBool();
boolean reTrigger = requestObject.field("reTrigger").asBool();
String triggered = reTrigger
? controller.applications().deploymentTrigger()
.reTrigger(id, type).type().jobName()
: controller.applications().deploymentTrigger()
.forceTrigger(id, type, request.getJDiscRequest().getUserPrincipal().getName(), requireTests)
.stream().map(job -> job.type().jobName()).collect(joining(", "));
return new MessageResponse(triggered.isEmpty() ? "Job " + type.jobName() + " for " + id + " not triggered"
: "Triggered " + triggered + " for " + id);
}
private HttpResponse pause(ApplicationId id, JobType type) {
Instant until = controller.clock().instant().plus(DeploymentTrigger.maxPause);
controller.applications().deploymentTrigger().pauseJob(id, type, until);
return new MessageResponse(type.jobName() + " for " + id + " paused for " + DeploymentTrigger.maxPause);
}
private HttpResponse resume(ApplicationId id, JobType type) {
controller.applications().deploymentTrigger().resumeJob(id, type);
return new MessageResponse(type.jobName() + " for " + id + " resumed");
}
private void toSlime(Cursor object, com.yahoo.vespa.hosted.controller.Application application, HttpRequest request) {
object.setString("tenant", application.id().tenant().value());
object.setString("application", application.id().application().value());
object.setString("deployments", withPath("/application/v4" +
"/tenant/" + application.id().tenant().value() +
"/application/" + application.id().application().value() +
"/job/",
request.getUri()).toString());
DeploymentStatus status = controller.jobController().deploymentStatus(application);
application.latestVersion().ifPresent(version -> toSlime(version, object.setObject("latestVersion")));
application.projectId().ifPresent(id -> object.setLong("projectId", id));
application.instances().values().stream().findFirst().ifPresent(instance -> {
if ( ! instance.change().isEmpty())
toSlime(object.setObject("deploying"), instance.change());
if ( ! status.outstandingChange(instance.name()).isEmpty())
toSlime(object.setObject("outstandingChange"), status.outstandingChange(instance.name()));
});
application.majorVersion().ifPresent(majorVersion -> object.setLong("majorVersion", majorVersion));
Cursor instancesArray = object.setArray("instances");
for (Instance instance : showOnlyProductionInstances(request) ? application.productionInstances().values()
: application.instances().values())
toSlime(instancesArray.addObject(), status, instance, application.deploymentSpec(), request);
application.deployKeys().stream().map(KeyUtils::toPem).forEach(object.setArray("pemDeployKeys")::addString);
Cursor metricsObject = object.setObject("metrics");
metricsObject.setDouble("queryServiceQuality", application.metrics().queryServiceQuality());
metricsObject.setDouble("writeServiceQuality", application.metrics().writeServiceQuality());
Cursor activity = object.setObject("activity");
application.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried", instant.toEpochMilli()));
application.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten", instant.toEpochMilli()));
application.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
application.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
application.ownershipIssueId().ifPresent(issueId -> object.setString("ownershipIssueId", issueId.value()));
application.owner().ifPresent(owner -> object.setString("owner", owner.username()));
application.deploymentIssueId().ifPresent(issueId -> object.setString("deploymentIssueId", issueId.value()));
}
private void toSlime(Cursor object, DeploymentStatus status, Instance instance, DeploymentSpec deploymentSpec, HttpRequest request) {
object.setString("instance", instance.name().value());
if (deploymentSpec.instance(instance.name()).isPresent()) {
List<JobStatus> jobStatus = controller.applications().deploymentTrigger()
.steps(deploymentSpec.requireInstance(instance.name()))
.sortedJobs(status.instanceJobs(instance.name()).values());
if ( ! instance.change().isEmpty())
toSlime(object.setObject("deploying"), instance.change());
if ( ! status.outstandingChange(instance.name()).isEmpty())
toSlime(object.setObject("outstandingChange"), status.outstandingChange(instance.name()));
Cursor changeBlockers = object.setArray("changeBlockers");
deploymentSpec.instance(instance.name()).ifPresent(spec -> spec.changeBlocker().forEach(changeBlocker -> {
Cursor changeBlockerObject = changeBlockers.addObject();
changeBlockerObject.setBool("versions", changeBlocker.blocksVersions());
changeBlockerObject.setBool("revisions", changeBlocker.blocksRevisions());
changeBlockerObject.setString("timeZone", changeBlocker.window().zone().getId());
Cursor days = changeBlockerObject.setArray("days");
changeBlocker.window().days().stream().map(DayOfWeek::getValue).forEach(days::addLong);
Cursor hours = changeBlockerObject.setArray("hours");
changeBlocker.window().hours().forEach(hours::addLong);
}));
}
globalEndpointsToSlime(object, instance);
List<Deployment> deployments = deploymentSpec.instance(instance.name())
.map(spec -> new DeploymentSteps(spec, controller::system))
.map(steps -> steps.sortedDeployments(instance.deployments().values()))
.orElse(List.copyOf(instance.deployments().values()));
Cursor deploymentsArray = object.setArray("deployments");
for (Deployment deployment : deployments) {
Cursor deploymentObject = deploymentsArray.addObject();
if (deployment.zone().environment() == Environment.prod && ! instance.rotations().isEmpty())
toSlime(instance.rotations(), instance.rotationStatus(), deployment, deploymentObject);
if (recurseOverDeployments(request))
toSlime(deploymentObject, new DeploymentId(instance.id(), deployment.zone()), deployment, request);
else {
deploymentObject.setString("environment", deployment.zone().environment().value());
deploymentObject.setString("region", deployment.zone().region().value());
deploymentObject.setString("url", withPath(request.getUri().getPath() +
"/instance/" + instance.name().value() +
"/environment/" + deployment.zone().environment().value() +
"/region/" + deployment.zone().region().value(),
request.getUri()).toString());
}
}
}
private void globalEndpointsToSlime(Cursor object, Instance instance) {
var globalEndpointUrls = new LinkedHashSet<String>();
controller.routing().endpointsOf(instance.id())
.requiresRotation()
.not().legacy()
.asList().stream()
.map(Endpoint::url)
.map(URI::toString)
.forEach(globalEndpointUrls::add);
var globalRotationsArray = object.setArray("globalRotations");
globalEndpointUrls.forEach(globalRotationsArray::addString);
instance.rotations().stream()
.map(AssignedRotation::rotationId)
.findFirst()
.ifPresent(rotation -> object.setString("rotationId", rotation.asString()));
}
private void toSlime(Cursor object, Instance instance, DeploymentStatus status, HttpRequest request) {
com.yahoo.vespa.hosted.controller.Application application = status.application();
object.setString("tenant", instance.id().tenant().value());
object.setString("application", instance.id().application().value());
object.setString("instance", instance.id().instance().value());
object.setString("deployments", withPath("/application/v4" +
"/tenant/" + instance.id().tenant().value() +
"/application/" + instance.id().application().value() +
"/instance/" + instance.id().instance().value() + "/job/",
request.getUri()).toString());
application.latestVersion().ifPresent(version -> {
sourceRevisionToSlime(version.source(), object.setObject("source"));
version.sourceUrl().ifPresent(url -> object.setString("sourceUrl", url));
version.commit().ifPresent(commit -> object.setString("commit", commit));
});
application.projectId().ifPresent(id -> object.setLong("projectId", id));
if (application.deploymentSpec().instance(instance.name()).isPresent()) {
List<JobStatus> jobStatus = controller.applications().deploymentTrigger()
.steps(application.deploymentSpec().requireInstance(instance.name()))
.sortedJobs(status.instanceJobs(instance.name()).values());
if ( ! instance.change().isEmpty())
toSlime(object.setObject("deploying"), instance.change());
if ( ! status.outstandingChange(instance.name()).isEmpty())
toSlime(object.setObject("outstandingChange"), status.outstandingChange(instance.name()));
Cursor changeBlockers = object.setArray("changeBlockers");
application.deploymentSpec().instance(instance.name()).ifPresent(spec -> spec.changeBlocker().forEach(changeBlocker -> {
Cursor changeBlockerObject = changeBlockers.addObject();
changeBlockerObject.setBool("versions", changeBlocker.blocksVersions());
changeBlockerObject.setBool("revisions", changeBlocker.blocksRevisions());
changeBlockerObject.setString("timeZone", changeBlocker.window().zone().getId());
Cursor days = changeBlockerObject.setArray("days");
changeBlocker.window().days().stream().map(DayOfWeek::getValue).forEach(days::addLong);
Cursor hours = changeBlockerObject.setArray("hours");
changeBlocker.window().hours().forEach(hours::addLong);
}));
}
application.majorVersion().ifPresent(majorVersion -> object.setLong("majorVersion", majorVersion));
globalEndpointsToSlime(object, instance);
List<Deployment> deployments =
application.deploymentSpec().instance(instance.name())
.map(spec -> new DeploymentSteps(spec, controller::system))
.map(steps -> steps.sortedDeployments(instance.deployments().values()))
.orElse(List.copyOf(instance.deployments().values()));
Cursor instancesArray = object.setArray("instances");
for (Deployment deployment : deployments) {
Cursor deploymentObject = instancesArray.addObject();
if (deployment.zone().environment() == Environment.prod) {
if (instance.rotations().size() == 1) {
toSlime(instance.rotationStatus().of(instance.rotations().get(0).rotationId(), deployment),
deploymentObject);
}
if ( ! recurseOverDeployments(request) && ! instance.rotations().isEmpty()) {
toSlime(instance.rotations(), instance.rotationStatus(), deployment, deploymentObject);
}
}
if (recurseOverDeployments(request))
toSlime(deploymentObject, new DeploymentId(instance.id(), deployment.zone()), deployment, request);
else {
deploymentObject.setString("environment", deployment.zone().environment().value());
deploymentObject.setString("region", deployment.zone().region().value());
deploymentObject.setString("instance", instance.id().instance().value());
deploymentObject.setString("url", withPath(request.getUri().getPath() +
"/environment/" + deployment.zone().environment().value() +
"/region/" + deployment.zone().region().value(),
request.getUri()).toString());
}
}
status.jobSteps().keySet().stream()
.filter(job -> job.application().instance().equals(instance.name()))
.filter(job -> job.type().isProduction() && job.type().isDeployment())
.map(job -> job.type().zone(controller.system()))
.filter(zone -> ! instance.deployments().containsKey(zone))
.forEach(zone -> {
Cursor deploymentObject = instancesArray.addObject();
deploymentObject.setString("environment", zone.environment().value());
deploymentObject.setString("region", zone.region().value());
});
application.deployKeys().stream().findFirst().ifPresent(key -> object.setString("pemDeployKey", KeyUtils.toPem(key)));
application.deployKeys().stream().map(KeyUtils::toPem).forEach(object.setArray("pemDeployKeys")::addString);
Cursor metricsObject = object.setObject("metrics");
metricsObject.setDouble("queryServiceQuality", application.metrics().queryServiceQuality());
metricsObject.setDouble("writeServiceQuality", application.metrics().writeServiceQuality());
Cursor activity = object.setObject("activity");
application.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried", instant.toEpochMilli()));
application.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten", instant.toEpochMilli()));
application.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
application.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
application.ownershipIssueId().ifPresent(issueId -> object.setString("ownershipIssueId", issueId.value()));
application.owner().ifPresent(owner -> object.setString("owner", owner.username()));
application.deploymentIssueId().ifPresent(issueId -> object.setString("deploymentIssueId", issueId.value()));
}
private HttpResponse deployment(String tenantName, String applicationName, String instanceName, String environment,
String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
Instance instance = controller.applications().getInstance(id)
.orElseThrow(() -> new NotExistsException(id + " not found"));
DeploymentId deploymentId = new DeploymentId(instance.id(),
requireZone(environment, region));
Deployment deployment = instance.deployments().get(deploymentId.zoneId());
if (deployment == null)
throw new NotExistsException(instance + " is not deployed in " + deploymentId.zoneId());
Slime slime = new Slime();
toSlime(slime.setObject(), deploymentId, deployment, request);
return new SlimeJsonResponse(slime);
}
private void toSlime(Cursor object, Change change) {
change.platform().ifPresent(version -> object.setString("version", version.toString()));
change.application()
.filter(version -> !version.isUnknown())
.ifPresent(version -> toSlime(version, object.setObject("revision")));
}
private void toSlime(Endpoint endpoint, Cursor object) {
object.setString("cluster", endpoint.cluster().value());
object.setBool("tls", endpoint.tls());
object.setString("url", endpoint.url().toString());
object.setString("scope", endpointScopeString(endpoint.scope()));
object.setString("routingMethod", routingMethodString(endpoint.routingMethod()));
}
private void toSlime(Cursor response, DeploymentId deploymentId, Deployment deployment, HttpRequest request) {
response.setString("tenant", deploymentId.applicationId().tenant().value());
response.setString("application", deploymentId.applicationId().application().value());
response.setString("instance", deploymentId.applicationId().instance().value());
response.setString("environment", deploymentId.zoneId().environment().value());
response.setString("region", deploymentId.zoneId().region().value());
var application = controller.applications().requireApplication(TenantAndApplicationId.from(deploymentId.applicationId()));
var endpointArray = response.setArray("endpoints");
EndpointList zoneEndpoints = controller.routing().endpointsOf(deploymentId)
.scope(Endpoint.Scope.zone)
.not().legacy();
for (var endpoint : controller.routing().directEndpoints(zoneEndpoints, deploymentId.applicationId())) {
toSlime(endpoint, endpointArray.addObject());
}
EndpointList globalEndpoints = controller.routing().endpointsOf(application, deploymentId.applicationId().instance())
.not().legacy()
.targets(deploymentId.zoneId());
for (var endpoint : controller.routing().directEndpoints(globalEndpoints, deploymentId.applicationId())) {
toSlime(endpoint, endpointArray.addObject());
}
response.setString("clusters", withPath(toPath(deploymentId) + "/clusters", request.getUri()).toString());
response.setString("nodes", withPathAndQuery("/zone/v2/" + deploymentId.zoneId().environment() + "/" + deploymentId.zoneId().region() + "/nodes/v2/node/", "recursive=true&application=" + deploymentId.applicationId().tenant() + "." + deploymentId.applicationId().application() + "." + deploymentId.applicationId().instance(), request.getUri()).toString());
response.setString("yamasUrl", monitoringSystemUri(deploymentId).toString());
response.setString("version", deployment.version().toFullString());
response.setString("revision", deployment.applicationVersion().id());
response.setLong("deployTimeEpochMs", deployment.at().toEpochMilli());
controller.zoneRegistry().getDeploymentTimeToLive(deploymentId.zoneId())
.ifPresent(deploymentTimeToLive -> response.setLong("expiryTimeEpochMs", deployment.at().plus(deploymentTimeToLive).toEpochMilli()));
DeploymentStatus status = controller.jobController().deploymentStatus(application);
application.projectId().ifPresent(i -> response.setString("screwdriverId", String.valueOf(i)));
sourceRevisionToSlime(deployment.applicationVersion().source(), response);
var instance = application.instances().get(deploymentId.applicationId().instance());
if (instance != null) {
if (!instance.rotations().isEmpty() && deployment.zone().environment() == Environment.prod)
toSlime(instance.rotations(), instance.rotationStatus(), deployment, response);
JobType.from(controller.system(), deployment.zone())
.map(type -> new JobId(instance.id(), type))
.map(status.jobSteps()::get)
.ifPresent(stepStatus -> {
JobControllerApiHandlerHelper.applicationVersionToSlime(
response.setObject("applicationVersion"), deployment.applicationVersion());
if (!status.jobsToRun().containsKey(stepStatus.job().get()))
response.setString("status", "complete");
else if (stepStatus.readyAt(instance.change()).map(controller.clock().instant()::isBefore).orElse(true))
response.setString("status", "pending");
else response.setString("status", "running");
});
}
Cursor activity = response.setObject("activity");
deployment.activity().lastQueried().ifPresent(instant -> activity.setLong("lastQueried",
instant.toEpochMilli()));
deployment.activity().lastWritten().ifPresent(instant -> activity.setLong("lastWritten",
instant.toEpochMilli()));
deployment.activity().lastQueriesPerSecond().ifPresent(value -> activity.setDouble("lastQueriesPerSecond", value));
deployment.activity().lastWritesPerSecond().ifPresent(value -> activity.setDouble("lastWritesPerSecond", value));
DeploymentMetrics metrics = deployment.metrics();
Cursor metricsObject = response.setObject("metrics");
metricsObject.setDouble("queriesPerSecond", metrics.queriesPerSecond());
metricsObject.setDouble("writesPerSecond", metrics.writesPerSecond());
metricsObject.setDouble("documentCount", metrics.documentCount());
metricsObject.setDouble("queryLatencyMillis", metrics.queryLatencyMillis());
metricsObject.setDouble("writeLatencyMillis", metrics.writeLatencyMillis());
metrics.instant().ifPresent(instant -> metricsObject.setLong("lastUpdated", instant.toEpochMilli()));
}
private void toSlime(ApplicationVersion applicationVersion, Cursor object) {
if ( ! applicationVersion.isUnknown()) {
object.setLong("buildNumber", applicationVersion.buildNumber().getAsLong());
object.setString("hash", applicationVersion.id());
sourceRevisionToSlime(applicationVersion.source(), object.setObject("source"));
applicationVersion.sourceUrl().ifPresent(url -> object.setString("sourceUrl", url));
applicationVersion.commit().ifPresent(commit -> object.setString("commit", commit));
}
}
private void sourceRevisionToSlime(Optional<SourceRevision> revision, Cursor object) {
if (revision.isEmpty()) return;
object.setString("gitRepository", revision.get().repository());
object.setString("gitBranch", revision.get().branch());
object.setString("gitCommit", revision.get().commit());
}
private void toSlime(RotationState state, Cursor object) {
Cursor bcpStatus = object.setObject("bcpStatus");
bcpStatus.setString("rotationStatus", rotationStateString(state));
}
private void toSlime(List<AssignedRotation> rotations, RotationStatus status, Deployment deployment, Cursor object) {
var array = object.setArray("endpointStatus");
for (var rotation : rotations) {
var statusObject = array.addObject();
var targets = status.of(rotation.rotationId());
statusObject.setString("endpointId", rotation.endpointId().id());
statusObject.setString("rotationId", rotation.rotationId().asString());
statusObject.setString("clusterId", rotation.clusterId().value());
statusObject.setString("status", rotationStateString(status.of(rotation.rotationId(), deployment)));
statusObject.setLong("lastUpdated", targets.lastUpdated().toEpochMilli());
}
}
private URI monitoringSystemUri(DeploymentId deploymentId) {
return controller.zoneRegistry().getMonitoringSystemUri(deploymentId);
}
/**
* Returns a non-broken, released version at least as old as the oldest platform the given application is on.
*
* If no known version is applicable, the newest version at least as old as the oldest platform is selected,
* among all versions released for this system. If no such versions exists, throws an IllegalStateException.
*/
private Version compileVersion(TenantAndApplicationId id) {
Version oldestPlatform = controller.applications().oldestInstalledPlatform(id);
VersionStatus versionStatus = controller.readVersionStatus();
return versionStatus.versions().stream()
.filter(version -> version.confidence().equalOrHigherThan(VespaVersion.Confidence.low))
.filter(VespaVersion::isReleased)
.map(VespaVersion::versionNumber)
.filter(version -> ! version.isAfter(oldestPlatform))
.max(Comparator.naturalOrder())
.orElseGet(() -> controller.mavenRepository().metadata().versions().stream()
.filter(version -> ! version.isAfter(oldestPlatform))
.filter(version -> ! versionStatus.versions().stream()
.map(VespaVersion::versionNumber)
.collect(Collectors.toSet()).contains(version))
.max(Comparator.naturalOrder())
.orElseThrow(() -> new IllegalStateException("No available releases of " +
controller.mavenRepository().artifactId())));
}
private HttpResponse setGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region, boolean inService, HttpRequest request) {
Instance instance = controller.applications().requireInstance(ApplicationId.from(tenantName, applicationName, instanceName));
ZoneId zone = requireZone(environment, region);
Deployment deployment = instance.deployments().get(zone);
if (deployment == null) {
throw new NotExistsException(instance + " has no deployment in " + zone);
}
var deploymentId = new DeploymentId(instance.id(), zone);
setGlobalRotationStatus(deploymentId, inService, request);
setGlobalEndpointStatus(deploymentId, inService, request);
return new MessageResponse(String.format("Successfully set %s in %s %s service",
instance.id().toShortString(), zone, inService ? "in" : "out of"));
}
/** Set the global endpoint status for given deployment. This only applies to global endpoints backed by a cloud service */
private void setGlobalEndpointStatus(DeploymentId deployment, boolean inService, HttpRequest request) {
var agent = isOperator(request) ? GlobalRouting.Agent.operator : GlobalRouting.Agent.tenant;
var status = inService ? GlobalRouting.Status.in : GlobalRouting.Status.out;
controller.routing().policies().setGlobalRoutingStatus(deployment, status, agent);
}
/** Set the global rotation status for given deployment. This only applies to global endpoints backed by a rotation */
private void setGlobalRotationStatus(DeploymentId deployment, boolean inService, HttpRequest request) {
var requestData = toSlime(request.getData()).get();
var reason = mandatory("reason", requestData).asString();
var agent = isOperator(request) ? GlobalRouting.Agent.operator : GlobalRouting.Agent.tenant;
long timestamp = controller.clock().instant().getEpochSecond();
var status = inService ? EndpointStatus.Status.in : EndpointStatus.Status.out;
var endpointStatus = new EndpointStatus(status, reason, agent.name(), timestamp);
controller.routing().setGlobalRotationStatus(deployment, endpointStatus);
}
private HttpResponse getGlobalRotationOverride(String tenantName, String applicationName, String instanceName, String environment, String region) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
requireZone(environment, region));
Slime slime = new Slime();
Cursor array = slime.setObject().setArray("globalrotationoverride");
controller.routing().globalRotationStatus(deploymentId)
.forEach((endpoint, status) -> {
array.addString(endpoint.upstreamIdOf(deploymentId));
Cursor statusObject = array.addObject();
statusObject.setString("status", status.getStatus().name());
statusObject.setString("reason", status.getReason() == null ? "" : status.getReason());
statusObject.setString("agent", status.getAgent() == null ? "" : status.getAgent());
statusObject.setLong("timestamp", status.getEpoch());
});
return new SlimeJsonResponse(slime);
}
private HttpResponse rotationStatus(String tenantName, String applicationName, String instanceName, String environment, String region, Optional<String> endpointId) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
Instance instance = controller.applications().requireInstance(applicationId);
ZoneId zone = requireZone(environment, region);
RotationId rotation = findRotationId(instance, endpointId);
Deployment deployment = instance.deployments().get(zone);
if (deployment == null) {
throw new NotExistsException(instance + " has no deployment in " + zone);
}
Slime slime = new Slime();
Cursor response = slime.setObject();
toSlime(instance.rotationStatus().of(rotation, deployment), response);
return new SlimeJsonResponse(slime);
}
private HttpResponse metering(String tenant, String application, HttpRequest request) {
Slime slime = new Slime();
Cursor root = slime.setObject();
MeteringData meteringData = controller.serviceRegistry()
.meteringService()
.getMeteringData(TenantName.from(tenant), ApplicationName.from(application));
ResourceAllocation currentSnapshot = meteringData.getCurrentSnapshot();
Cursor currentRate = root.setObject("currentrate");
currentRate.setDouble("cpu", currentSnapshot.getCpuCores());
currentRate.setDouble("mem", currentSnapshot.getMemoryGb());
currentRate.setDouble("disk", currentSnapshot.getDiskGb());
ResourceAllocation thisMonth = meteringData.getThisMonth();
Cursor thismonth = root.setObject("thismonth");
thismonth.setDouble("cpu", thisMonth.getCpuCores());
thismonth.setDouble("mem", thisMonth.getMemoryGb());
thismonth.setDouble("disk", thisMonth.getDiskGb());
ResourceAllocation lastMonth = meteringData.getLastMonth();
Cursor lastmonth = root.setObject("lastmonth");
lastmonth.setDouble("cpu", lastMonth.getCpuCores());
lastmonth.setDouble("mem", lastMonth.getMemoryGb());
lastmonth.setDouble("disk", lastMonth.getDiskGb());
Map<ApplicationId, List<ResourceSnapshot>> history = meteringData.getSnapshotHistory();
Cursor details = root.setObject("details");
Cursor detailsCpu = details.setObject("cpu");
Cursor detailsMem = details.setObject("mem");
Cursor detailsDisk = details.setObject("disk");
history.forEach((applicationId, resources) -> {
String instanceName = applicationId.instance().value();
Cursor detailsCpuApp = detailsCpu.setObject(instanceName);
Cursor detailsMemApp = detailsMem.setObject(instanceName);
Cursor detailsDiskApp = detailsDisk.setObject(instanceName);
Cursor detailsCpuData = detailsCpuApp.setArray("data");
Cursor detailsMemData = detailsMemApp.setArray("data");
Cursor detailsDiskData = detailsDiskApp.setArray("data");
resources.forEach(resourceSnapshot -> {
Cursor cpu = detailsCpuData.addObject();
cpu.setLong("unixms", resourceSnapshot.getTimestamp().toEpochMilli());
cpu.setDouble("value", resourceSnapshot.getCpuCores());
Cursor mem = detailsMemData.addObject();
mem.setLong("unixms", resourceSnapshot.getTimestamp().toEpochMilli());
mem.setDouble("value", resourceSnapshot.getMemoryGb());
Cursor disk = detailsDiskData.addObject();
disk.setLong("unixms", resourceSnapshot.getTimestamp().toEpochMilli());
disk.setDouble("value", resourceSnapshot.getDiskGb());
});
});
return new SlimeJsonResponse(slime);
}
private HttpResponse deploying(String tenantName, String applicationName, String instanceName, HttpRequest request) {
Instance instance = controller.applications().requireInstance(ApplicationId.from(tenantName, applicationName, instanceName));
Slime slime = new Slime();
Cursor root = slime.setObject();
if ( ! instance.change().isEmpty()) {
instance.change().platform().ifPresent(version -> root.setString("platform", version.toString()));
instance.change().application().ifPresent(applicationVersion -> root.setString("application", applicationVersion.id()));
root.setBool("pinned", instance.change().isPinned());
}
return new SlimeJsonResponse(slime);
}
private HttpResponse suspended(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
requireZone(environment, region));
boolean suspended = controller.applications().isSuspended(deploymentId);
Slime slime = new Slime();
Cursor response = slime.setObject();
response.setBool("suspended", suspended);
return new SlimeJsonResponse(slime);
}
private HttpResponse services(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationView applicationView = controller.getApplicationView(tenantName, applicationName, instanceName, environment, region);
ZoneId zone = requireZone(environment, region);
ServiceApiResponse response = new ServiceApiResponse(zone,
new ApplicationId.Builder().tenant(tenantName).applicationName(applicationName).instanceName(instanceName).build(),
controller.zoneRegistry().getConfigServerApiUris(zone),
request.getUri());
response.setResponse(applicationView);
return response;
}
private HttpResponse service(String tenantName, String applicationName, String instanceName, String environment, String region, String serviceName, String restPath, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName), requireZone(environment, region));
if ("container-clustercontroller".equals((serviceName)) && restPath.contains("/status/")) {
String[] parts = restPath.split("/status/");
String result = controller.serviceRegistry().configServer().getClusterControllerStatus(deploymentId, parts[0], parts[1]);
return new HtmlResponse(result);
}
Map<?,?> result = controller.serviceRegistry().configServer().getServiceApiResponse(deploymentId, serviceName, restPath);
ServiceApiResponse response = new ServiceApiResponse(deploymentId.zoneId(),
deploymentId.applicationId(),
controller.zoneRegistry().getConfigServerApiUris(deploymentId.zoneId()),
request.getUri());
response.setResponse(result, serviceName, restPath);
return response;
}
private HttpResponse content(String tenantName, String applicationName, String instanceName, String environment, String region, String restPath, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName), requireZone(environment, region));
return controller.serviceRegistry().configServer().getApplicationPackageContent(deploymentId, "/" + restPath, request.getUri());
}
private HttpResponse updateTenant(String tenantName, HttpRequest request) {
getTenantOrThrow(tenantName);
TenantName tenant = TenantName.from(tenantName);
Inspector requestObject = toSlime(request.getData()).get();
controller.tenants().update(accessControlRequests.specification(tenant, requestObject),
accessControlRequests.credentials(tenant, requestObject, request.getJDiscRequest()));
return tenant(controller.tenants().require(TenantName.from(tenantName)), request);
}
private HttpResponse createTenant(String tenantName, HttpRequest request) {
TenantName tenant = TenantName.from(tenantName);
Inspector requestObject = toSlime(request.getData()).get();
controller.tenants().create(accessControlRequests.specification(tenant, requestObject),
accessControlRequests.credentials(tenant, requestObject, request.getJDiscRequest()));
return tenant(controller.tenants().require(TenantName.from(tenantName)), request);
}
private HttpResponse createApplication(String tenantName, String applicationName, HttpRequest request) {
Inspector requestObject = toSlime(request.getData()).get();
TenantAndApplicationId id = TenantAndApplicationId.from(tenantName, applicationName);
Credentials credentials = accessControlRequests.credentials(id.tenant(), requestObject, request.getJDiscRequest());
com.yahoo.vespa.hosted.controller.Application application = controller.applications().createApplication(id, credentials);
Slime slime = new Slime();
toSlime(id, slime.setObject(), request);
return new SlimeJsonResponse(slime);
}
private HttpResponse createInstance(String tenantName, String applicationName, String instanceName, HttpRequest request) {
TenantAndApplicationId applicationId = TenantAndApplicationId.from(tenantName, applicationName);
if (controller.applications().getApplication(applicationId).isEmpty())
createApplication(tenantName, applicationName, request);
controller.applications().createInstance(applicationId.instance(instanceName));
Slime slime = new Slime();
toSlime(applicationId.instance(instanceName), slime.setObject(), request);
return new SlimeJsonResponse(slime);
}
/** Trigger deployment of the given Vespa version if a valid one is given, e.g., "7.8.9". */
private HttpResponse deployPlatform(String tenantName, String applicationName, String instanceName, boolean pin, HttpRequest request) {
request = controller.auditLogger().log(request);
String versionString = readToString(request.getData());
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
StringBuilder response = new StringBuilder();
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), application -> {
Version version = Version.fromString(versionString);
VersionStatus versionStatus = controller.readVersionStatus();
if (version.equals(Version.emptyVersion))
version = controller.systemVersion(versionStatus);
if (!versionStatus.isActive(version))
throw new IllegalArgumentException("Cannot trigger deployment of version '" + version + "': " +
"Version is not active in this system. " +
"Active versions: " + versionStatus.versions()
.stream()
.map(VespaVersion::versionNumber)
.map(Version::toString)
.collect(joining(", ")));
Change change = Change.of(version);
if (pin)
change = change.withPin();
controller.applications().deploymentTrigger().forceChange(id, change);
response.append("Triggered ").append(change).append(" for ").append(id);
});
return new MessageResponse(response.toString());
}
/** Trigger deployment to the last known application package for the given application. */
private HttpResponse deployApplication(String tenantName, String applicationName, String instanceName, HttpRequest request) {
controller.auditLogger().log(request);
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
StringBuilder response = new StringBuilder();
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), application -> {
Change change = Change.of(application.get().latestVersion().get());
controller.applications().deploymentTrigger().forceChange(id, change);
response.append("Triggered ").append(change).append(" for ").append(id);
});
return new MessageResponse(response.toString());
}
/** Cancel ongoing change for given application, e.g., everything with {"cancel":"all"} */
private HttpResponse cancelDeploy(String tenantName, String applicationName, String instanceName, String choice) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
StringBuilder response = new StringBuilder();
controller.applications().lockApplicationOrThrow(TenantAndApplicationId.from(id), application -> {
Change change = application.get().require(id.instance()).change();
if (change.isEmpty()) {
response.append("No deployment in progress for ").append(id).append(" at this time");
return;
}
ChangesToCancel cancel = ChangesToCancel.valueOf(choice.toUpperCase());
controller.applications().deploymentTrigger().cancelChange(id, cancel);
response.append("Changed deployment from '").append(change).append("' to '").append(controller.applications().requireInstance(id).change()).append("' for ").append(id);
});
return new MessageResponse(response.toString());
}
/** Schedule reindexing of an application, or a subset of clusters, possibly on a subset of documents. */
private HttpResponse reindex(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
List<String> clusterNames = Optional.ofNullable(request.getProperty("clusterId")).stream()
.flatMap(clusters -> Stream.of(clusters.split(",")))
.filter(cluster -> ! cluster.isBlank())
.collect(toUnmodifiableList());
List<String> documentTypes = Optional.ofNullable(request.getProperty("documentType")).stream()
.flatMap(types -> Stream.of(types.split(",")))
.filter(type -> ! type.isBlank())
.collect(toUnmodifiableList());
controller.applications().reindex(id, zone, clusterNames, documentTypes, request.getBooleanProperty("indexedOnly"));
return new MessageResponse("Requested reindexing of " + id + " in " + zone +
(clusterNames.isEmpty() ? "" : ", on clusters " + String.join(", ", clusterNames) +
(documentTypes.isEmpty() ? "" : ", for types " + String.join(", ", documentTypes))));
}
/** Gets reindexing status of an application in a zone. */
private HttpResponse getReindexing(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
ApplicationReindexing reindexing = controller.applications().applicationReindexing(id, zone);
Slime slime = new Slime();
Cursor root = slime.setObject();
root.setBool("enabled", reindexing.enabled());
Cursor clustersArray = root.setArray("clusters");
reindexing.clusters().entrySet().stream().sorted(comparingByKey())
.forEach(cluster -> {
Cursor clusterObject = clustersArray.addObject();
clusterObject.setString("name", cluster.getKey());
Cursor pendingArray = clusterObject.setArray("pending");
cluster.getValue().pending().entrySet().stream().sorted(comparingByKey())
.forEach(pending -> {
Cursor pendingObject = pendingArray.addObject();
pendingObject.setString("type", pending.getKey());
pendingObject.setLong("requiredGeneration", pending.getValue());
});
Cursor readyArray = clusterObject.setArray("ready");
cluster.getValue().ready().entrySet().stream().sorted(comparingByKey())
.forEach(ready -> {
Cursor readyObject = readyArray.addObject();
readyObject.setString("type", ready.getKey());
setStatus(readyObject, ready.getValue());
});
});
return new SlimeJsonResponse(slime);
}
void setStatus(Cursor statusObject, ApplicationReindexing.Status status) {
status.readyAt().ifPresent(readyAt -> statusObject.setLong("readyAtMillis", readyAt.toEpochMilli()));
status.startedAt().ifPresent(startedAt -> statusObject.setLong("startedAtMillis", startedAt.toEpochMilli()));
status.endedAt().ifPresent(endedAt -> statusObject.setLong("endedAtMillis", endedAt.toEpochMilli()));
status.state().map(ApplicationApiHandler::toString).ifPresent(state -> statusObject.setString("state", state));
status.message().ifPresent(message -> statusObject.setString("message", message));
status.progress().ifPresent(progress -> statusObject.setDouble("progress", progress));
}
private static String toString(ApplicationReindexing.State state) {
switch (state) {
case PENDING: return "pending";
case RUNNING: return "running";
case FAILED: return "failed";
case SUCCESSFUL: return "successful";
default: return null;
}
}
/** Enables reindexing of an application in a zone. */
private HttpResponse enableReindexing(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
controller.applications().enableReindexing(id, zone);
return new MessageResponse("Enabled reindexing of " + id + " in " + zone);
}
/** Disables reindexing of an application in a zone. */
private HttpResponse disableReindexing(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId id = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
controller.applications().disableReindexing(id, zone);
return new MessageResponse("Disabled reindexing of " + id + " in " + zone);
}
/** Schedule restart of deployment, or specific host in a deployment */
private HttpResponse restart(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
requireZone(environment, region));
RestartFilter restartFilter = new RestartFilter()
.withHostName(Optional.ofNullable(request.getProperty("hostname")).map(HostName::from))
.withClusterType(Optional.ofNullable(request.getProperty("clusterType")).map(ClusterSpec.Type::from))
.withClusterId(Optional.ofNullable(request.getProperty("clusterId")).map(ClusterSpec.Id::from));
controller.applications().restart(deploymentId, restartFilter);
return new MessageResponse("Requested restart of " + deploymentId);
}
/** Set suspension status of the given deployment. */
private HttpResponse suspend(String tenantName, String applicationName, String instanceName, String environment, String region, boolean suspend) {
DeploymentId deploymentId = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
requireZone(environment, region));
controller.applications().setSuspension(deploymentId, suspend);
return new MessageResponse((suspend ? "Suspended" : "Resumed") + " orchestration of " + deploymentId);
}
private HttpResponse jobDeploy(ApplicationId id, JobType type, HttpRequest request) {
if ( ! type.environment().isManuallyDeployed() && ! isOperator(request))
throw new IllegalArgumentException("Direct deployments are only allowed to manually deployed environments.");
Map<String, byte[]> dataParts = parseDataParts(request);
if ( ! dataParts.containsKey("applicationZip"))
throw new IllegalArgumentException("Missing required form part 'applicationZip'");
ApplicationPackage applicationPackage = new ApplicationPackage(dataParts.get(EnvironmentResource.APPLICATION_ZIP));
controller.applications().verifyApplicationIdentityConfiguration(id.tenant(),
Optional.of(id.instance()),
Optional.of(type.zone(controller.system())),
applicationPackage,
Optional.of(requireUserPrincipal(request)));
Optional<Version> version = Optional.ofNullable(dataParts.get("deployOptions"))
.map(json -> SlimeUtils.jsonToSlime(json).get())
.flatMap(options -> optional("vespaVersion", options))
.map(Version::fromString);
controller.jobController().deploy(id, type, version, applicationPackage);
RunId runId = controller.jobController().last(id, type).get().id();
Slime slime = new Slime();
Cursor rootObject = slime.setObject();
rootObject.setString("message", "Deployment started in " + runId +
". This may take about 15 minutes the first time.");
rootObject.setLong("run", runId.number());
return new SlimeJsonResponse(slime);
}
private HttpResponse deploy(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
ApplicationId applicationId = ApplicationId.from(tenantName, applicationName, instanceName);
ZoneId zone = requireZone(environment, region);
Map<String, byte[]> dataParts = parseDataParts(request);
if ( ! dataParts.containsKey("deployOptions"))
return ErrorResponse.badRequest("Missing required form part 'deployOptions'");
Inspector deployOptions = SlimeUtils.jsonToSlime(dataParts.get("deployOptions")).get();
/*
* Special handling of the proxy application (the only system application with an application package)
* Setting any other deployOptions here is not supported for now (e.g. specifying version), but
* this might be handy later to handle emergency downgrades.
*/
boolean isZoneApplication = SystemApplication.proxy.id().equals(applicationId);
if (isZoneApplication) {
String versionStr = deployOptions.field("vespaVersion").asString();
boolean versionPresent = !versionStr.isEmpty() && !versionStr.equals("null");
if (versionPresent) {
throw new RuntimeException("Version not supported for system applications");
}
VersionStatus versionStatus = controller.readVersionStatus();
if (versionStatus.isUpgrading()) {
throw new IllegalArgumentException("Deployment of system applications during a system upgrade is not allowed");
}
Optional<VespaVersion> systemVersion = versionStatus.systemVersion();
if (systemVersion.isEmpty()) {
throw new IllegalArgumentException("Deployment of system applications is not permitted until system version is determined");
}
ActivateResult result = controller.applications()
.deploySystemApplicationPackage(SystemApplication.proxy, zone, systemVersion.get().versionNumber());
return new SlimeJsonResponse(toSlime(result));
}
/*
* Normal applications from here
*/
Optional<ApplicationPackage> applicationPackage = Optional.ofNullable(dataParts.get("applicationZip"))
.map(ApplicationPackage::new);
Optional<com.yahoo.vespa.hosted.controller.Application> application = controller.applications().getApplication(TenantAndApplicationId.from(applicationId));
Inspector sourceRevision = deployOptions.field("sourceRevision");
Inspector buildNumber = deployOptions.field("buildNumber");
if (sourceRevision.valid() != buildNumber.valid())
throw new IllegalArgumentException("Source revision and build number must both be provided, or not");
Optional<ApplicationVersion> applicationVersion = Optional.empty();
if (sourceRevision.valid()) {
if (applicationPackage.isPresent())
throw new IllegalArgumentException("Application version and application package can't both be provided.");
applicationVersion = Optional.of(ApplicationVersion.from(toSourceRevision(sourceRevision),
buildNumber.asLong()));
applicationPackage = Optional.of(controller.applications().getApplicationPackage(applicationId,
applicationVersion.get()));
}
boolean deployDirectly = deployOptions.field("deployDirectly").asBool();
Optional<Version> vespaVersion = optional("vespaVersion", deployOptions).map(Version::new);
if (deployDirectly && applicationPackage.isEmpty() && applicationVersion.isEmpty() && vespaVersion.isEmpty()) {
Optional<Deployment> deployment = controller.applications().getInstance(applicationId)
.map(Instance::deployments)
.flatMap(deployments -> Optional.ofNullable(deployments.get(zone)));
if(deployment.isEmpty())
throw new IllegalArgumentException("Can't redeploy application, no deployment currently exist");
ApplicationVersion version = deployment.get().applicationVersion();
if(version.isUnknown())
throw new IllegalArgumentException("Can't redeploy application, application version is unknown");
applicationVersion = Optional.of(version);
vespaVersion = Optional.of(deployment.get().version());
applicationPackage = Optional.of(controller.applications().getApplicationPackage(applicationId,
applicationVersion.get()));
}
DeployOptions deployOptionsJsonClass = new DeployOptions(deployDirectly,
vespaVersion,
deployOptions.field("ignoreValidationErrors").asBool(),
deployOptions.field("deployCurrentVersion").asBool());
applicationPackage.ifPresent(aPackage -> controller.applications().verifyApplicationIdentityConfiguration(applicationId.tenant(),
Optional.of(applicationId.instance()),
Optional.of(zone),
aPackage,
Optional.of(requireUserPrincipal(request))));
ActivateResult result = controller.applications().deploy(applicationId,
zone,
applicationPackage,
applicationVersion,
deployOptionsJsonClass);
return new SlimeJsonResponse(toSlime(result));
}
private HttpResponse deleteTenant(String tenantName, HttpRequest request) {
Optional<Tenant> tenant = controller.tenants().get(tenantName);
if (tenant.isEmpty())
return ErrorResponse.notFoundError("Could not delete tenant '" + tenantName + "': Tenant not found");
controller.tenants().delete(tenant.get().name(),
accessControlRequests.credentials(tenant.get().name(),
toSlime(request.getData()).get(),
request.getJDiscRequest()));
return tenant(tenant.get(), request);
}
private HttpResponse deleteApplication(String tenantName, String applicationName, HttpRequest request) {
TenantAndApplicationId id = TenantAndApplicationId.from(tenantName, applicationName);
Credentials credentials = accessControlRequests.credentials(id.tenant(), toSlime(request.getData()).get(), request.getJDiscRequest());
controller.applications().deleteApplication(id, credentials);
return new MessageResponse("Deleted application " + id);
}
private HttpResponse deleteInstance(String tenantName, String applicationName, String instanceName, HttpRequest request) {
TenantAndApplicationId id = TenantAndApplicationId.from(tenantName, applicationName);
controller.applications().deleteInstance(id.instance(instanceName));
if (controller.applications().requireApplication(id).instances().isEmpty()) {
Credentials credentials = accessControlRequests.credentials(id.tenant(), toSlime(request.getData()).get(), request.getJDiscRequest());
controller.applications().deleteApplication(id, credentials);
}
return new MessageResponse("Deleted instance " + id.instance(instanceName).toFullString());
}
private HttpResponse deactivate(String tenantName, String applicationName, String instanceName, String environment, String region, HttpRequest request) {
DeploymentId id = new DeploymentId(ApplicationId.from(tenantName, applicationName, instanceName),
requireZone(environment, region));
controller.applications().deactivate(id.applicationId(), id.zoneId());
return new MessageResponse("Deactivated " + id);
}
/** Returns test config for indicated job, with production deployments of the default instance. */
private HttpResponse testConfig(ApplicationId id, JobType type) {
ApplicationId defaultInstanceId = TenantAndApplicationId.from(id).defaultInstance();
HashSet<DeploymentId> deployments = controller.applications()
.getInstance(defaultInstanceId).stream()
.flatMap(instance -> instance.productionDeployments().keySet().stream())
.map(zone -> new DeploymentId(defaultInstanceId, zone))
.collect(Collectors.toCollection(HashSet::new));
var testedZone = type.zone(controller.system());
if ( ! type.isProduction())
deployments.add(new DeploymentId(id, testedZone));
return new SlimeJsonResponse(testConfigSerializer.configSlime(id,
type,
false,
controller.routing().zoneEndpointsOf(deployments),
controller.applications().reachableContentClustersByZone(deployments)));
}
private static SourceRevision toSourceRevision(Inspector object) {
if (!object.field("repository").valid() ||
!object.field("branch").valid() ||
!object.field("commit").valid()) {
throw new IllegalArgumentException("Must specify \"repository\", \"branch\", and \"commit\".");
}
return new SourceRevision(object.field("repository").asString(),
object.field("branch").asString(),
object.field("commit").asString());
}
private Tenant getTenantOrThrow(String tenantName) {
return controller.tenants().get(tenantName)
.orElseThrow(() -> new NotExistsException(new TenantId(tenantName)));
}
private void toSlime(Cursor object, Tenant tenant, HttpRequest request) {
object.setString("tenant", tenant.name().value());
object.setString("type", tenantType(tenant));
List<com.yahoo.vespa.hosted.controller.Application> applications = controller.applications().asList(tenant.name());
switch (tenant.type()) {
case athenz:
AthenzTenant athenzTenant = (AthenzTenant) tenant;
object.setString("athensDomain", athenzTenant.domain().getName());
object.setString("property", athenzTenant.property().id());
athenzTenant.propertyId().ifPresent(id -> object.setString("propertyId", id.toString()));
athenzTenant.contact().ifPresent(c -> {
object.setString("propertyUrl", c.propertyUrl().toString());
object.setString("contactsUrl", c.url().toString());
object.setString("issueCreationUrl", c.issueTrackerUrl().toString());
Cursor contactsArray = object.setArray("contacts");
c.persons().forEach(persons -> {
Cursor personArray = contactsArray.addArray();
persons.forEach(personArray::addString);
});
});
break;
case cloud: {
CloudTenant cloudTenant = (CloudTenant) tenant;
cloudTenant.creator().ifPresent(creator -> object.setString("creator", creator.getName()));
Cursor pemDeveloperKeysArray = object.setArray("pemDeveloperKeys");
cloudTenant.developerKeys().forEach((key, user) -> {
Cursor keyObject = pemDeveloperKeysArray.addObject();
keyObject.setString("key", KeyUtils.toPem(key));
keyObject.setString("user", user.getName());
});
Cursor secretStore = object.setArray("secretStores");
cloudTenant.tenantSecretStores().forEach(store -> {
Cursor storeObject = secretStore.addObject();
storeObject.setString("name", store.getName());
storeObject.setString("awsId", store.getAwsId());
storeObject.setString("role", store.getRole());
});
var tenantQuota = controller.serviceRegistry().billingController().getQuota(tenant.name());
var usedQuota = applications.stream()
.map(com.yahoo.vespa.hosted.controller.Application::quotaUsage)
.reduce(QuotaUsage.none, QuotaUsage::add);
toSlime(tenantQuota, usedQuota, object.setObject("quota"));
break;
}
default: throw new IllegalArgumentException("Unexpected tenant type '" + tenant.type() + "'.");
}
Cursor applicationArray = object.setArray("applications");
for (com.yahoo.vespa.hosted.controller.Application application : applications) {
DeploymentStatus status = controller.jobController().deploymentStatus(application);
for (Instance instance : showOnlyProductionInstances(request) ? application.productionInstances().values()
: application.instances().values())
if (recurseOverApplications(request))
toSlime(applicationArray.addObject(), instance, status, request);
else
toSlime(instance.id(), applicationArray.addObject(), request);
}
tenantMetaDataToSlime(tenant, object.setObject("metaData"));
}
private void toSlime(Quota quota, QuotaUsage usage, Cursor object) {
quota.budget().ifPresentOrElse(
budget -> object.setDouble("budget", budget.doubleValue()),
() -> object.setNix("budget")
);
object.setDouble("budgetUsed", usage.rate());
quota.maxClusterSize().ifPresent(maxClusterSize -> object.setLong("clusterSize", maxClusterSize));
}
private void toSlime(ClusterResources resources, Cursor object) {
object.setLong("nodes", resources.nodes());
object.setLong("groups", resources.groups());
toSlime(resources.nodeResources(), object.setObject("nodeResources"));
double costDivisor = controller.zoneRegistry().system().isPublic() ? 1.0 : 3.0;
object.setDouble("cost", Math.round(resources.nodes() * resources.nodeResources().cost() * 100.0 / costDivisor) / 100.0);
}
private void utilizationToSlime(Cluster.Utilization utilization, Cursor utilizationObject) {
utilizationObject.setDouble("cpu", utilization.cpu());
utilizationObject.setDouble("memory", utilization.memory());
utilizationObject.setDouble("disk", utilization.disk());
}
private void scalingEventsToSlime(List<Cluster.ScalingEvent> scalingEvents, Cursor scalingEventsArray) {
for (Cluster.ScalingEvent scalingEvent : scalingEvents) {
Cursor scalingEventObject = scalingEventsArray.addObject();
toSlime(scalingEvent.from(), scalingEventObject.setObject("from"));
toSlime(scalingEvent.to(), scalingEventObject.setObject("to"));
scalingEventObject.setLong("at", scalingEvent.at().toEpochMilli());
}
}
private void toSlime(NodeResources resources, Cursor object) {
object.setDouble("vcpu", resources.vcpu());
object.setDouble("memoryGb", resources.memoryGb());
object.setDouble("diskGb", resources.diskGb());
object.setDouble("bandwidthGbps", resources.bandwidthGbps());
object.setString("diskSpeed", valueOf(resources.diskSpeed()));
object.setString("storageType", valueOf(resources.storageType()));
}
private void tenantInTenantsListToSlime(Tenant tenant, URI requestURI, Cursor object) {
object.setString("tenant", tenant.name().value());
Cursor metaData = object.setObject("metaData");
metaData.setString("type", tenantType(tenant));
switch (tenant.type()) {
case athenz:
AthenzTenant athenzTenant = (AthenzTenant) tenant;
metaData.setString("athensDomain", athenzTenant.domain().getName());
metaData.setString("property", athenzTenant.property().id());
break;
case cloud: break;
default: throw new IllegalArgumentException("Unexpected tenant type '" + tenant.type() + "'.");
}
object.setString("url", withPath("/application/v4/tenant/" + tenant.name().value(), requestURI).toString());
}
private void tenantMetaDataToSlime(Tenant tenant, Cursor object) {
List<com.yahoo.vespa.hosted.controller.Application> applications = controller.applications().asList(tenant.name());
Optional<Instant> lastDev = applications.stream()
.flatMap(application -> application.instances().values().stream())
.flatMap(instance -> controller.jobController().jobs(instance.id()).stream()
.filter(jobType -> jobType.environment() == Environment.dev)
.flatMap(jobType -> controller.jobController().last(instance.id(), jobType).stream()))
.map(Run::start)
.max(Comparator.naturalOrder());
Optional<Instant> lastSubmission = applications.stream()
.flatMap(app -> app.latestVersion().flatMap(ApplicationVersion::buildTime).stream())
.max(Comparator.naturalOrder());
object.setLong("createdAtMillis", tenant.createdAt().toEpochMilli());
lastDev.ifPresent(instant -> object.setLong("lastDeploymentToDevMillis", instant.toEpochMilli()));
lastSubmission.ifPresent(instant -> object.setLong("lastSubmissionToProdMillis", instant.toEpochMilli()));
tenant.lastLoginInfo().get(LastLoginInfo.UserLevel.user)
.ifPresent(instant -> object.setLong("lastLoginByUserMillis", instant.toEpochMilli()));
tenant.lastLoginInfo().get(LastLoginInfo.UserLevel.developer)
.ifPresent(instant -> object.setLong("lastLoginByDeveloperMillis", instant.toEpochMilli()));
tenant.lastLoginInfo().get(LastLoginInfo.UserLevel.administrator)
.ifPresent(instant -> object.setLong("lastLoginByAdministratorMillis", instant.toEpochMilli()));
}
/** Returns a copy of the given URI with the host and port from the given URI, the path set to the given path and the query set to given query*/
private URI withPathAndQuery(String newPath, String newQuery, URI uri) {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), newPath, newQuery, null);
}
catch (URISyntaxException e) {
throw new RuntimeException("Will not happen", e);
}
}
/** Returns a copy of the given URI with the host and port from the given URI and the path set to the given path */
private URI withPath(String newPath, URI uri) {
return withPathAndQuery(newPath, null, uri);
}
private String toPath(DeploymentId id) {
return path("/application", "v4",
"tenant", id.applicationId().tenant(),
"application", id.applicationId().application(),
"instance", id.applicationId().instance(),
"environment", id.zoneId().environment(),
"region", id.zoneId().region());
}
private long asLong(String valueOrNull, long defaultWhenNull) {
if (valueOrNull == null) return defaultWhenNull;
try {
return Long.parseLong(valueOrNull);
}
catch (NumberFormatException e) {
throw new IllegalArgumentException("Expected an integer but got '" + valueOrNull + "'");
}
}
private void toSlime(Run run, Cursor object) {
object.setLong("id", run.id().number());
object.setString("version", run.versions().targetPlatform().toFullString());
if ( ! run.versions().targetApplication().isUnknown())
toSlime(run.versions().targetApplication(), object.setObject("revision"));
object.setString("reason", "unknown reason");
object.setLong("at", run.end().orElse(run.start()).toEpochMilli());
}
private Slime toSlime(InputStream jsonStream) {
try {
byte[] jsonBytes = IOUtils.readBytes(jsonStream, 1000 * 1000);
return SlimeUtils.jsonToSlime(jsonBytes);
} catch (IOException e) {
throw new RuntimeException();
}
}
private static Principal requireUserPrincipal(HttpRequest request) {
Principal principal = request.getJDiscRequest().getUserPrincipal();
if (principal == null) throw new InternalServerErrorException("Expected a user principal");
return principal;
}
private Inspector mandatory(String key, Inspector object) {
if ( ! object.field(key).valid())
throw new IllegalArgumentException("'" + key + "' is missing");
return object.field(key);
}
private Optional<String> optional(String key, Inspector object) {
return SlimeUtils.optionalString(object.field(key));
}
private static String path(Object... elements) {
return Joiner.on("/").join(elements);
}
private void toSlime(TenantAndApplicationId id, Cursor object, HttpRequest request) {
object.setString("tenant", id.tenant().value());
object.setString("application", id.application().value());
object.setString("url", withPath("/application/v4" +
"/tenant/" + id.tenant().value() +
"/application/" + id.application().value(),
request.getUri()).toString());
}
private void toSlime(ApplicationId id, Cursor object, HttpRequest request) {
object.setString("tenant", id.tenant().value());
object.setString("application", id.application().value());
object.setString("instance", id.instance().value());
object.setString("url", withPath("/application/v4" +
"/tenant/" + id.tenant().value() +
"/application/" + id.application().value() +
"/instance/" + id.instance().value(),
request.getUri()).toString());
}
private Slime toSlime(ActivateResult result) {
Slime slime = new Slime();
Cursor object = slime.setObject();
object.setString("revisionId", result.revisionId().id());
object.setLong("applicationZipSize", result.applicationZipSizeBytes());
Cursor logArray = object.setArray("prepareMessages");
if (result.prepareResponse().log != null) {
for (Log logMessage : result.prepareResponse().log) {
Cursor logObject = logArray.addObject();
logObject.setLong("time", logMessage.time);
logObject.setString("level", logMessage.level);
logObject.setString("message", logMessage.message);
}
}
Cursor changeObject = object.setObject("configChangeActions");
Cursor restartActionsArray = changeObject.setArray("restart");
for (RestartAction restartAction : result.prepareResponse().configChangeActions.restartActions) {
Cursor restartActionObject = restartActionsArray.addObject();
restartActionObject.setString("clusterName", restartAction.clusterName);
restartActionObject.setString("clusterType", restartAction.clusterType);
restartActionObject.setString("serviceType", restartAction.serviceType);
serviceInfosToSlime(restartAction.services, restartActionObject.setArray("services"));
stringsToSlime(restartAction.messages, restartActionObject.setArray("messages"));
}
Cursor refeedActionsArray = changeObject.setArray("refeed");
for (RefeedAction refeedAction : result.prepareResponse().configChangeActions.refeedActions) {
Cursor refeedActionObject = refeedActionsArray.addObject();
refeedActionObject.setString("name", refeedAction.name);
refeedActionObject.setString("documentType", refeedAction.documentType);
refeedActionObject.setString("clusterName", refeedAction.clusterName);
serviceInfosToSlime(refeedAction.services, refeedActionObject.setArray("services"));
stringsToSlime(refeedAction.messages, refeedActionObject.setArray("messages"));
}
return slime;
}
private void serviceInfosToSlime(List<ServiceInfo> serviceInfoList, Cursor array) {
for (ServiceInfo serviceInfo : serviceInfoList) {
Cursor serviceInfoObject = array.addObject();
serviceInfoObject.setString("serviceName", serviceInfo.serviceName);
serviceInfoObject.setString("serviceType", serviceInfo.serviceType);
serviceInfoObject.setString("configId", serviceInfo.configId);
serviceInfoObject.setString("hostName", serviceInfo.hostName);
}
}
private void stringsToSlime(List<String> strings, Cursor array) {
for (String string : strings)
array.addString(string);
}
private String readToString(InputStream stream) {
Scanner scanner = new Scanner(stream).useDelimiter("\\A");
if ( ! scanner.hasNext()) return null;
return scanner.next();
}
private static boolean recurseOverTenants(HttpRequest request) {
return recurseOverApplications(request) || "tenant".equals(request.getProperty("recursive"));
}
private static boolean recurseOverApplications(HttpRequest request) {
return recurseOverDeployments(request) || "application".equals(request.getProperty("recursive"));
}
private static boolean recurseOverDeployments(HttpRequest request) {
return ImmutableSet.of("all", "true", "deployment").contains(request.getProperty("recursive"));
}
private static boolean showOnlyProductionInstances(HttpRequest request) {
return "true".equals(request.getProperty("production"));
}
private static String tenantType(Tenant tenant) {
switch (tenant.type()) {
case athenz: return "ATHENS";
case cloud: return "CLOUD";
default: throw new IllegalArgumentException("Unknown tenant type: " + tenant.getClass().getSimpleName());
}
}
private static ApplicationId appIdFromPath(Path path) {
return ApplicationId.from(path.get("tenant"), path.get("application"), path.get("instance"));
}
private static JobType jobTypeFromPath(Path path) {
return JobType.fromJobName(path.get("jobtype"));
}
private static RunId runIdFromPath(Path path) {
long number = Long.parseLong(path.get("number"));
return new RunId(appIdFromPath(path), jobTypeFromPath(path), number);
}
private HttpResponse submit(String tenant, String application, HttpRequest request) {
Map<String, byte[]> dataParts = parseDataParts(request);
Inspector submitOptions = SlimeUtils.jsonToSlime(dataParts.get(EnvironmentResource.SUBMIT_OPTIONS)).get();
long projectId = Math.max(1, submitOptions.field("projectId").asLong());
Optional<String> repository = optional("repository", submitOptions);
Optional<String> branch = optional("branch", submitOptions);
Optional<String> commit = optional("commit", submitOptions);
Optional<SourceRevision> sourceRevision = repository.isPresent() && branch.isPresent() && commit.isPresent()
? Optional.of(new SourceRevision(repository.get(), branch.get(), commit.get()))
: Optional.empty();
Optional<String> sourceUrl = optional("sourceUrl", submitOptions);
Optional<String> authorEmail = optional("authorEmail", submitOptions);
sourceUrl.map(URI::create).ifPresent(url -> {
if (url.getHost() == null || url.getScheme() == null)
throw new IllegalArgumentException("Source URL must include scheme and host");
});
ApplicationPackage applicationPackage = new ApplicationPackage(dataParts.get(EnvironmentResource.APPLICATION_ZIP), true);
controller.applications().verifyApplicationIdentityConfiguration(TenantName.from(tenant),
Optional.empty(),
Optional.empty(),
applicationPackage,
Optional.of(requireUserPrincipal(request)));
return JobControllerApiHandlerHelper.submitResponse(controller.jobController(),
tenant,
application,
sourceRevision,
authorEmail,
sourceUrl,
projectId,
applicationPackage,
dataParts.get(EnvironmentResource.APPLICATION_TEST_ZIP));
}
private HttpResponse removeAllProdDeployments(String tenant, String application) {
JobControllerApiHandlerHelper.submitResponse(controller.jobController(), tenant, application,
Optional.empty(), Optional.empty(), Optional.empty(), 1,
ApplicationPackage.deploymentRemoval(), new byte[0]);
return new MessageResponse("All deployments removed");
}
private ZoneId requireZone(String environment, String region) {
ZoneId zone = ZoneId.from(environment, region);
if (zone.environment() == Environment.prod && zone.region().value().equals("controller")) {
return zone;
}
if (!controller.zoneRegistry().hasZone(zone)) {
throw new IllegalArgumentException("Zone " + zone + " does not exist in this system");
}
return zone;
}
private static Map<String, byte[]> parseDataParts(HttpRequest request) {
String contentHash = request.getHeader("X-Content-Hash");
if (contentHash == null)
return new MultipartParser().parse(request);
DigestInputStream digester = Signatures.sha256Digester(request.getData());
var dataParts = new MultipartParser().parse(request.getHeader("Content-Type"), digester, request.getUri());
if ( ! Arrays.equals(digester.getMessageDigest().digest(), Base64.getDecoder().decode(contentHash)))
throw new IllegalArgumentException("Value of X-Content-Hash header does not match computed content hash");
return dataParts;
}
private static RotationId findRotationId(Instance instance, Optional<String> endpointId) {
if (instance.rotations().isEmpty()) {
throw new NotExistsException("global rotation does not exist for " + instance);
}
if (endpointId.isPresent()) {
return instance.rotations().stream()
.filter(r -> r.endpointId().id().equals(endpointId.get()))
.map(AssignedRotation::rotationId)
.findFirst()
.orElseThrow(() -> new NotExistsException("endpoint " + endpointId.get() +
" does not exist for " + instance));
} else if (instance.rotations().size() > 1) {
throw new IllegalArgumentException(instance + " has multiple rotations. Query parameter 'endpointId' must be given");
}
return instance.rotations().get(0).rotationId();
}
private static String rotationStateString(RotationState state) {
switch (state) {
case in: return "IN";
case out: return "OUT";
}
return "UNKNOWN";
}
private static String endpointScopeString(Endpoint.Scope scope) {
switch (scope) {
case region: return "region";
case global: return "global";
case zone: return "zone";
}
throw new IllegalArgumentException("Unknown endpoint scope " + scope);
}
private static String routingMethodString(RoutingMethod method) {
switch (method) {
case exclusive: return "exclusive";
case shared: return "shared";
case sharedLayer4: return "sharedLayer4";
}
throw new IllegalArgumentException("Unknown routing method " + method);
}
private static <T> T getAttribute(HttpRequest request, String attributeName, Class<T> cls) {
return Optional.ofNullable(request.getJDiscRequest().context().get(attributeName))
.filter(cls::isInstance)
.map(cls::cast)
.orElseThrow(() -> new IllegalArgumentException("Attribute '" + attributeName + "' was not set on request"));
}
/** Returns whether given request is by an operator */
private static boolean isOperator(HttpRequest request) {
var securityContext = getAttribute(request, SecurityContext.ATTRIBUTE_NAME, SecurityContext.class);
return securityContext.roles().stream()
.map(Role::definition)
.anyMatch(definition -> definition == RoleDefinition.hostedOperator);
}
} |
I think this TODO for a stream factory should be commented on the `streamFactory`. | public Optional<MaterializationRunnable> initMaterialization() throws Exception {
SequenceNumber upTo = getLastAppendedTo();
SequenceNumber lastMaterializedTo = changelogSnapshotState.lastMaterializedTo();
LOG.info(
"Initialize Materialization. Current changelog writers last append to sequence number {}",
upTo);
if (upTo.compareTo(lastMaterializedTo) > 0) {
LOG.info("Starting materialization from {} : {}", lastMaterializedTo, upTo);
long materializationID = materializedId++;
MaterializationRunnable materializationRunnable =
new MaterializationRunnable(
keyedStateBackend.snapshot(
materializationID,
System.currentTimeMillis(),
streamFactory,
CHECKPOINT_OPTIONS),
materializationID,
upTo);
for (ChangelogState changelogState : changelogStates.values()) {
changelogState.resetWritingMetaFlag();
}
for (ChangelogKeyGroupedPriorityQueue<?> priorityQueueState :
priorityQueueStatesByName.values()) {
priorityQueueState.resetWritingMetaFlag();
}
return Optional.of(materializationRunnable);
} else {
LOG.debug(
"Skip materialization, last materialized to {} : last log to {}",
lastMaterializedTo,
upTo);
return Optional.empty();
}
} | public Optional<MaterializationRunnable> initMaterialization() throws Exception {
SequenceNumber upTo = getLastAppendedTo();
SequenceNumber lastMaterializedTo = changelogSnapshotState.lastMaterializedTo();
LOG.info(
"Initialize Materialization. Current changelog writers last append to sequence number {}",
upTo);
if (upTo.compareTo(lastMaterializedTo) > 0) {
LOG.info("Starting materialization from {} : {}", lastMaterializedTo, upTo);
long materializationID = materializedId++;
MaterializationRunnable materializationRunnable =
new MaterializationRunnable(
keyedStateBackend.snapshot(
materializationID,
System.currentTimeMillis(),
streamFactory,
CHECKPOINT_OPTIONS),
materializationID,
upTo);
for (ChangelogState changelogState : changelogStates.values()) {
changelogState.resetWritingMetaFlag();
}
for (ChangelogKeyGroupedPriorityQueue<?> priorityQueueState :
priorityQueueStatesByName.values()) {
priorityQueueState.resetWritingMetaFlag();
}
return Optional.of(materializationRunnable);
} else {
LOG.debug(
"Skip materialization, last materialized to {} : last log to {}",
lastMaterializedTo,
upTo);
return Optional.empty();
}
} | class ChangelogKeyedStateBackend<K>
implements CheckpointableKeyedStateBackend<K>,
CheckpointListener,
TestableKeyedStateBackend<K> {
private static final Logger LOG = LoggerFactory.getLogger(ChangelogKeyedStateBackend.class);
/**
* ChangelogStateBackend only supports CheckpointType.CHECKPOINT; The rest of information in
* CheckpointOptions is not used in Snapshotable
*/
private static final CheckpointOptions CHECKPOINT_OPTIONS =
new CheckpointOptions(
CheckpointType.CHECKPOINT, CheckpointStorageLocationReference.getDefault());
private static final Map<StateDescriptor.Type, StateFactory> STATE_FACTORIES =
Stream.of(
Tuple2.of(
StateDescriptor.Type.VALUE,
(StateFactory) ChangelogValueState::create),
Tuple2.of(
StateDescriptor.Type.LIST,
(StateFactory) ChangelogListState::create),
Tuple2.of(
StateDescriptor.Type.REDUCING,
(StateFactory) ChangelogReducingState::create),
Tuple2.of(
StateDescriptor.Type.AGGREGATING,
(StateFactory) ChangelogAggregatingState::create),
Tuple2.of(
StateDescriptor.Type.MAP,
(StateFactory) ChangelogMapState::create))
.collect(Collectors.toMap(t -> t.f0, t -> t.f1));
/** delegated keyedStateBackend. */
private final AbstractKeyedStateBackend<K> keyedStateBackend;
/**
* This is the cache maintained by the DelegateKeyedStateBackend itself. It is not the same as
* the underlying delegated keyedStateBackend. InternalKvState is a delegated state.
*/
private final Map<String, InternalKvState<K, ?, ?>> keyValueStatesByName;
/**
* Unwrapped changelog states used for recovery (not wrapped into e.g. TTL, latency tracking).
*/
private final Map<String, ChangelogState> changelogStates;
private final Map<String, ChangelogKeyGroupedPriorityQueue<?>> priorityQueueStatesByName;
private final ExecutionConfig executionConfig;
private final TtlTimeProvider ttlTimeProvider;
private final StateChangelogWriter<? extends ChangelogStateHandle> stateChangelogWriter;
private final Closer closer = Closer.create();
private final CheckpointStreamFactory streamFactory;
private ChangelogSnapshotState changelogSnapshotState;
private long lastCheckpointId = -1L;
private long materializedId = 0;
/** last accessed partitioned state. */
@SuppressWarnings("rawtypes")
private InternalKvState lastState;
/** For caching the last accessed partitioned state. */
private String lastName;
private final FunctionDelegationHelper functionDelegationHelper =
new FunctionDelegationHelper();
/**
* {@link SequenceNumber} denoting last upload range <b>start</b>, inclusive. Updated to {@link
* ChangelogSnapshotState
* CheckpointStreamFactory, CheckpointOptions) starting snapshot}. Used to notify {@link
*
*/
@Nullable private SequenceNumber lastUploadedFrom;
/**
* {@link SequenceNumber} denoting last upload range <b>end</b>, exclusive. Updated to {@link
* org.apache.flink.runtime.state.changelog.StateChangelogWriter
* when {@link
* snapshot}. Used to notify {@link
* confirmed or aborted by JM.
*/
@Nullable private SequenceNumber lastUploadedTo;
private final String subtaskName;
/**
* Provides a unique ID for each state created by this backend instance. A mapping from this ID
* to state name is written once along with metadata; afterwards, only ID is written with each
* state change for efficiency.
*/
private short lastCreatedStateId = -1;
private final NavigableMap<Long, Long> materializationIdByCheckpointId = new TreeMap<>();
public ChangelogKeyedStateBackend(
AbstractKeyedStateBackend<K> keyedStateBackend,
String subtaskName,
ExecutionConfig executionConfig,
TtlTimeProvider ttlTimeProvider,
StateChangelogWriter<? extends ChangelogStateHandle> stateChangelogWriter,
Collection<ChangelogStateBackendHandle> initialState,
CheckpointStorageWorkerView checkpointStorageWorkerView) {
this.keyedStateBackend = keyedStateBackend;
this.subtaskName = subtaskName;
this.executionConfig = executionConfig;
this.ttlTimeProvider = ttlTimeProvider;
this.keyValueStatesByName = new HashMap<>();
this.priorityQueueStatesByName = new HashMap<>();
this.stateChangelogWriter = stateChangelogWriter;
this.changelogStates = new HashMap<>();
this.changelogSnapshotState = completeRestore(initialState);
this.streamFactory = shared -> checkpointStorageWorkerView.createTaskOwnedStateStream();
this.closer.register(keyedStateBackend);
}
@Override
public KeyGroupRange getKeyGroupRange() {
return keyedStateBackend.getKeyGroupRange();
}
@Override
public void close() throws IOException {
closer.close();
}
@Override
public void setCurrentKey(K newKey) {
keyedStateBackend.setCurrentKey(newKey);
}
@Override
public K getCurrentKey() {
return keyedStateBackend.getCurrentKey();
}
@Override
public TypeSerializer<K> getKeySerializer() {
return keyedStateBackend.getKeySerializer();
}
@Override
public <N> Stream<K> getKeys(String state, N namespace) {
return keyedStateBackend.getKeys(state, namespace);
}
@Override
public <N> Stream<Tuple2<K, N>> getKeysAndNamespaces(String state) {
return keyedStateBackend.getKeysAndNamespaces(state);
}
@Override
public void dispose() {
keyedStateBackend.dispose();
lastName = null;
lastState = null;
keyValueStatesByName.clear();
changelogStates.clear();
priorityQueueStatesByName.clear();
}
@Override
public void registerKeySelectionListener(KeySelectionListener<K> listener) {
keyedStateBackend.registerKeySelectionListener(listener);
}
@Override
public boolean deregisterKeySelectionListener(KeySelectionListener<K> listener) {
return keyedStateBackend.deregisterKeySelectionListener(listener);
}
@Override
public <N, S extends State, T> void applyToAllKeys(
N namespace,
TypeSerializer<N> namespaceSerializer,
StateDescriptor<S, T> stateDescriptor,
KeyedStateFunction<K, S> function)
throws Exception {
keyedStateBackend.applyToAllKeys(
namespace,
namespaceSerializer,
stateDescriptor,
function,
this::getPartitionedState);
}
@Override
@SuppressWarnings("unchecked")
public <N, S extends State> S getPartitionedState(
N namespace,
TypeSerializer<N> namespaceSerializer,
StateDescriptor<S, ?> stateDescriptor)
throws Exception {
checkNotNull(namespace, "Namespace");
if (lastName != null && lastName.equals(stateDescriptor.getName())) {
lastState.setCurrentNamespace(namespace);
return (S) lastState;
}
final InternalKvState<K, ?, ?> previous =
keyValueStatesByName.get(stateDescriptor.getName());
if (previous != null) {
lastState = previous;
lastState.setCurrentNamespace(namespace);
lastName = stateDescriptor.getName();
functionDelegationHelper.addOrUpdate(stateDescriptor);
return (S) previous;
}
final S state = getOrCreateKeyedState(namespaceSerializer, stateDescriptor);
final InternalKvState<K, N, ?> kvState = (InternalKvState<K, N, ?>) state;
lastName = stateDescriptor.getName();
lastState = kvState;
kvState.setCurrentNamespace(namespace);
return state;
}
@Nonnull
@Override
public RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot(
long checkpointId,
long timestamp,
@Nonnull CheckpointStreamFactory streamFactory,
@Nonnull CheckpointOptions checkpointOptions)
throws Exception {
lastCheckpointId = checkpointId;
lastUploadedFrom = changelogSnapshotState.lastMaterializedTo();
lastUploadedTo = getLastAppendedTo();
LOG.info(
"snapshot of {} for checkpoint {}, change range: {}..{}",
subtaskName,
checkpointId,
lastUploadedFrom,
lastUploadedTo);
ChangelogSnapshotState changelogStateBackendStateCopy = changelogSnapshotState;
materializationIdByCheckpointId.put(
checkpointId, changelogStateBackendStateCopy.materializationID);
return toRunnableFuture(
stateChangelogWriter
.persist(lastUploadedFrom)
.thenApply(
delta ->
buildSnapshotResult(
delta, changelogStateBackendStateCopy)));
}
private SnapshotResult<KeyedStateHandle> buildSnapshotResult(
ChangelogStateHandle delta, ChangelogSnapshotState changelogStateBackendStateCopy) {
List<ChangelogStateHandle> prevDeltaCopy =
new ArrayList<>(changelogStateBackendStateCopy.getRestoredNonMaterialized());
if (delta != null && delta.getStateSize() > 0) {
prevDeltaCopy.add(delta);
}
if (prevDeltaCopy.isEmpty()
&& changelogStateBackendStateCopy.getMaterializedSnapshot().isEmpty()) {
return SnapshotResult.empty();
} else {
return SnapshotResult.of(
new ChangelogStateBackendHandleImpl(
changelogStateBackendStateCopy.getMaterializedSnapshot(),
prevDeltaCopy,
getKeyGroupRange(),
changelogStateBackendStateCopy.materializationID));
}
}
@Nonnull
@Override
@SuppressWarnings("unchecked")
public <T extends HeapPriorityQueueElement & PriorityComparable<? super T> & Keyed<?>>
KeyGroupedInternalPriorityQueue<T> create(
@Nonnull String stateName,
@Nonnull TypeSerializer<T> byteOrderedElementSerializer) {
ChangelogKeyGroupedPriorityQueue<T> queue =
(ChangelogKeyGroupedPriorityQueue<T>) priorityQueueStatesByName.get(stateName);
if (queue == null) {
PriorityQueueStateChangeLoggerImpl<K, T> priorityQueueStateChangeLogger =
new PriorityQueueStateChangeLoggerImpl<>(
byteOrderedElementSerializer,
keyedStateBackend.getKeyContext(),
stateChangelogWriter,
new RegisteredPriorityQueueStateBackendMetaInfo<>(
stateName, byteOrderedElementSerializer),
++lastCreatedStateId);
closer.register(priorityQueueStateChangeLogger);
queue =
new ChangelogKeyGroupedPriorityQueue<>(
keyedStateBackend.create(stateName, byteOrderedElementSerializer),
priorityQueueStateChangeLogger,
byteOrderedElementSerializer);
priorityQueueStatesByName.put(stateName, queue);
}
return queue;
}
@VisibleForTesting
@Override
public int numKeyValueStateEntries() {
return keyedStateBackend.numKeyValueStateEntries();
}
@Override
public boolean isSafeToReuseKVState() {
return keyedStateBackend.isSafeToReuseKVState();
}
@Nonnull
@Override
public SavepointResources<K> savepoint() throws Exception {
return keyedStateBackend.savepoint();
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
if (lastCheckpointId == checkpointId) {
stateChangelogWriter.confirm(lastUploadedFrom, lastUploadedTo);
}
Long materializationID = materializationIdByCheckpointId.remove(checkpointId);
if (materializationID != null) {
keyedStateBackend.notifyCheckpointComplete(materializationID);
}
materializationIdByCheckpointId.headMap(checkpointId, true).clear();
}
@Override
public void notifyCheckpointAborted(long checkpointId) throws Exception {
if (lastCheckpointId == checkpointId) {
stateChangelogWriter.reset(lastUploadedFrom, lastUploadedTo);
}
Long materializationID = materializationIdByCheckpointId.remove(checkpointId);
if (materializationID != null) {
keyedStateBackend.notifyCheckpointAborted(materializationID);
}
}
@Override
@SuppressWarnings("unchecked")
public <N, S extends State, T> S getOrCreateKeyedState(
TypeSerializer<N> namespaceSerializer, StateDescriptor<S, T> stateDescriptor)
throws Exception {
checkNotNull(namespaceSerializer, "Namespace serializer");
checkNotNull(
getKeySerializer(),
"State key serializer has not been configured in the config. "
+ "This operation cannot use partitioned state.");
InternalKvState<K, ?, ?> kvState = keyValueStatesByName.get(stateDescriptor.getName());
if (kvState == null) {
if (!stateDescriptor.isSerializerInitialized()) {
stateDescriptor.initializeSerializerUnlessSet(executionConfig);
}
kvState =
LatencyTrackingStateFactory.createStateAndWrapWithLatencyTrackingIfEnabled(
TtlStateFactory.createStateAndWrapWithTtlIfEnabled(
namespaceSerializer, stateDescriptor, this, ttlTimeProvider),
stateDescriptor,
keyedStateBackend.getLatencyTrackingStateConfig());
keyValueStatesByName.put(stateDescriptor.getName(), kvState);
keyedStateBackend.publishQueryableStateIfEnabled(stateDescriptor, kvState);
}
functionDelegationHelper.addOrUpdate(stateDescriptor);
return (S) kvState;
}
@Nonnull
@Override
@SuppressWarnings("unchecked")
public <N, SV, SEV, S extends State, IS extends S> IS createInternalState(
@Nonnull TypeSerializer<N> namespaceSerializer,
@Nonnull StateDescriptor<S, SV> stateDesc,
@Nonnull
StateSnapshotTransformer.StateSnapshotTransformFactory<SEV>
snapshotTransformFactory)
throws Exception {
StateFactory stateFactory = STATE_FACTORIES.get(stateDesc.getType());
if (stateFactory == null) {
String message =
String.format(
"State %s is not supported by %s",
stateDesc.getClass(), this.getClass());
throw new FlinkRuntimeException(message);
}
RegisteredKeyValueStateBackendMetaInfo<N, SV> meta =
new RegisteredKeyValueStateBackendMetaInfo<>(
stateDesc.getType(),
stateDesc.getName(),
namespaceSerializer,
stateDesc.getSerializer(),
(StateSnapshotTransformer.StateSnapshotTransformFactory<SV>)
snapshotTransformFactory);
InternalKvState<K, N, SV> state =
keyedStateBackend.createInternalState(
namespaceSerializer, stateDesc, snapshotTransformFactory);
KvStateChangeLoggerImpl<K, SV, N> kvStateChangeLogger =
new KvStateChangeLoggerImpl<>(
state.getKeySerializer(),
state.getNamespaceSerializer(),
state.getValueSerializer(),
keyedStateBackend.getKeyContext(),
stateChangelogWriter,
meta,
stateDesc.getTtlConfig(),
stateDesc.getDefaultValue(),
++lastCreatedStateId);
closer.register(kvStateChangeLogger);
IS is =
stateFactory.create(
state,
kvStateChangeLogger,
keyedStateBackend /* pass the nested backend as key context so that it get key updates on recovery*/);
changelogStates.put(stateDesc.getName(), (ChangelogState) is);
return is;
}
public void registerCloseable(@Nullable Closeable closeable) {
closer.register(closeable);
}
private ChangelogSnapshotState completeRestore(
Collection<ChangelogStateBackendHandle> stateHandles) {
long materializationId = 0L;
List<KeyedStateHandle> materialized = new ArrayList<>();
List<ChangelogStateHandle> restoredNonMaterialized = new ArrayList<>();
for (ChangelogStateBackendHandle h : stateHandles) {
if (h != null) {
materialized.addAll(h.getMaterializedStateHandles());
restoredNonMaterialized.addAll(h.getNonMaterializedStateHandles());
materializationId = Math.max(materializationId, h.getMaterializationID());
}
}
this.materializedId = materializationId + 1;
return new ChangelogSnapshotState(
materialized,
restoredNonMaterialized,
stateChangelogWriter.initialSequenceNumber(),
materializationId);
}
/**
* Initialize state materialization so that materialized data can be persisted durably and
* included into the checkpoint.
*
* <p>This method is not thread safe. It should be called either under a lock or through task
* mailbox executor.
*
* @return a tuple of - future snapshot result from the underlying state backend - a {@link
* SequenceNumber} identifying the latest change in the changelog
*/
private SequenceNumber getLastAppendedTo() {
stateChangelogWriter.lastAppendedSequenceNumber();
return stateChangelogWriter.lastAppendedSequenceNumber();
}
/**
* This method is not thread safe. It should be called either under a lock or through task
* mailbox executor.
*/
public void updateChangelogSnapshotState(
SnapshotResult<KeyedStateHandle> materializedSnapshot,
long materializationID,
SequenceNumber upTo)
throws Exception {
LOG.info(
"Task {} finishes materialization, updates the snapshotState upTo {} : {}",
subtaskName,
upTo,
materializedSnapshot);
changelogSnapshotState =
new ChangelogSnapshotState(
getMaterializedResult(materializedSnapshot),
Collections.emptyList(),
upTo,
materializationID);
stateChangelogWriter.truncate(upTo);
}
private List<KeyedStateHandle> getMaterializedResult(
@Nonnull SnapshotResult<KeyedStateHandle> materializedSnapshot) {
KeyedStateHandle jobManagerOwned = materializedSnapshot.getJobManagerOwnedSnapshot();
return jobManagerOwned == null ? emptyList() : singletonList(jobManagerOwned);
}
@Override
public KeyedStateBackend<K> getDelegatedKeyedStateBackend(boolean recursive) {
return keyedStateBackend.getDelegatedKeyedStateBackend(recursive);
}
private interface StateFactory {
<K, N, SV, S extends State, IS extends S> IS create(
InternalKvState<K, N, SV> kvState,
KvStateChangeLogger<SV, N> changeLogger,
InternalKeyContext<K> keyContext)
throws Exception;
}
/**
* @param name state name
* @param type state type (the only supported type currently are: {@link
* BackendStateType
* queue})
* @return an existing state, i.e. the one that was already created. The returned state will not
* apply TTL to the passed values, regardless of the TTL settings. This prevents double
* applying of TTL (recovered values are TTL values if TTL was enabled). The state will,
* however, use TTL serializer if TTL is enabled. WARN: only valid during the recovery.
* @throws NoSuchElementException if the state wasn't created
* @throws UnsupportedOperationException if state type is not supported
*/
public ChangelogState getExistingStateForRecovery(String name, BackendStateType type)
throws NoSuchElementException, UnsupportedOperationException {
ChangelogState state;
switch (type) {
case KEY_VALUE:
state = changelogStates.get(name);
break;
case PRIORITY_QUEUE:
state = priorityQueueStatesByName.get(name);
break;
default:
throw new UnsupportedOperationException(
String.format("Unknown state type %s (%s)", type, name));
}
if (state == null) {
throw new NoSuchElementException(String.format("%s state %s not found", type, name));
}
return state;
}
private static <T> RunnableFuture<T> toRunnableFuture(CompletableFuture<T> f) {
return new RunnableFuture<T>() {
@Override
public void run() {
f.join();
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return f.cancel(mayInterruptIfRunning);
}
@Override
public boolean isCancelled() {
return f.isCancelled();
}
@Override
public boolean isDone() {
return f.isDone();
}
@Override
public T get() throws InterruptedException, ExecutionException {
return f.get();
}
@Override
public T get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
return f.get(timeout, unit);
}
};
}
/**
* Snapshot State for ChangelogKeyedStatebackend.
*
* <p>It includes three parts: - materialized snapshot from the underlying delegated state
* backend - non-materialized part in the current changelog - non-materialized changelog, from
* previous logs (before failover or rescaling)
*/
private static class ChangelogSnapshotState {
/**
* Materialized snapshot from the underlying delegated state backend. Set initially on
* restore and later upon materialization.
*/
private final List<KeyedStateHandle> materializedSnapshot;
/**
* The {@link SequenceNumber} up to which the state is materialized, exclusive. This
* indicates the non-materialized part of the current changelog.
*/
private final SequenceNumber materializedTo;
/**
* Non-materialized changelog, from previous logs. Set initially on restore and later
* cleared upon materialization.
*/
private final List<ChangelogStateHandle> restoredNonMaterialized;
/** ID of this materialization corresponding to the nested backend checkpoint ID. */
private final long materializationID;
public ChangelogSnapshotState(
List<KeyedStateHandle> materializedSnapshot,
List<ChangelogStateHandle> restoredNonMaterialized,
SequenceNumber materializedTo,
long materializationID) {
this.materializedSnapshot = unmodifiableList((materializedSnapshot));
this.restoredNonMaterialized = unmodifiableList(restoredNonMaterialized);
this.materializedTo = materializedTo;
this.materializationID = materializationID;
}
public List<KeyedStateHandle> getMaterializedSnapshot() {
return materializedSnapshot;
}
public SequenceNumber lastMaterializedTo() {
return materializedTo;
}
public List<ChangelogStateHandle> getRestoredNonMaterialized() {
return restoredNonMaterialized;
}
public long getMaterializationID() {
return materializationID;
}
}
@VisibleForTesting
StateChangelogWriter<? extends ChangelogStateHandle> getChangelogWriter() {
return stateChangelogWriter;
}
} | class ChangelogKeyedStateBackend<K>
implements CheckpointableKeyedStateBackend<K>,
CheckpointListener,
TestableKeyedStateBackend<K> {
private static final Logger LOG = LoggerFactory.getLogger(ChangelogKeyedStateBackend.class);
/**
* ChangelogStateBackend only supports CheckpointType.CHECKPOINT; The rest of information in
* CheckpointOptions is not used in Snapshotable
*/
private static final CheckpointOptions CHECKPOINT_OPTIONS =
new CheckpointOptions(
CheckpointType.CHECKPOINT, CheckpointStorageLocationReference.getDefault());
private static final Map<StateDescriptor.Type, StateFactory> STATE_FACTORIES =
Stream.of(
Tuple2.of(
StateDescriptor.Type.VALUE,
(StateFactory) ChangelogValueState::create),
Tuple2.of(
StateDescriptor.Type.LIST,
(StateFactory) ChangelogListState::create),
Tuple2.of(
StateDescriptor.Type.REDUCING,
(StateFactory) ChangelogReducingState::create),
Tuple2.of(
StateDescriptor.Type.AGGREGATING,
(StateFactory) ChangelogAggregatingState::create),
Tuple2.of(
StateDescriptor.Type.MAP,
(StateFactory) ChangelogMapState::create))
.collect(Collectors.toMap(t -> t.f0, t -> t.f1));
/** delegated keyedStateBackend. */
private final AbstractKeyedStateBackend<K> keyedStateBackend;
/**
* This is the cache maintained by the DelegateKeyedStateBackend itself. It is not the same as
* the underlying delegated keyedStateBackend. InternalKvState is a delegated state.
*/
private final Map<String, InternalKvState<K, ?, ?>> keyValueStatesByName;
/**
* Unwrapped changelog states used for recovery (not wrapped into e.g. TTL, latency tracking).
*/
private final Map<String, ChangelogState> changelogStates;
private final Map<String, ChangelogKeyGroupedPriorityQueue<?>> priorityQueueStatesByName;
private final ExecutionConfig executionConfig;
private final TtlTimeProvider ttlTimeProvider;
private final StateChangelogWriter<? extends ChangelogStateHandle> stateChangelogWriter;
private final Closer closer = Closer.create();
private final CheckpointStreamFactory streamFactory;
private ChangelogSnapshotState changelogSnapshotState;
private long lastCheckpointId = -1L;
private long materializedId = 0;
/** last accessed partitioned state. */
@SuppressWarnings("rawtypes")
private InternalKvState lastState;
/** For caching the last accessed partitioned state. */
private String lastName;
private final FunctionDelegationHelper functionDelegationHelper =
new FunctionDelegationHelper();
/**
* {@link SequenceNumber} denoting last upload range <b>start</b>, inclusive. Updated to {@link
* ChangelogSnapshotState
* CheckpointStreamFactory, CheckpointOptions) starting snapshot}. Used to notify {@link
*
*/
@Nullable private SequenceNumber lastUploadedFrom;
/**
* {@link SequenceNumber} denoting last upload range <b>end</b>, exclusive. Updated to {@link
* org.apache.flink.runtime.state.changelog.StateChangelogWriter
* when {@link
* snapshot}. Used to notify {@link
* confirmed or aborted by JM.
*/
@Nullable private SequenceNumber lastUploadedTo;
private final String subtaskName;
/**
* Provides a unique ID for each state created by this backend instance. A mapping from this ID
* to state name is written once along with metadata; afterwards, only ID is written with each
* state change for efficiency.
*/
private short lastCreatedStateId = -1;
/** Checkpoint ID mapped to Materialization ID - used to notify nested backend of completion. */
private final NavigableMap<Long, Long> materializationIdByCheckpointId = new TreeMap<>();
/**
* Materialization ID mapped to Checkpoint IDs - used to notify nested backend of abortion.
* Entry is removed when:
*
* <ol>
* <li>some checkpoint of a Set completes (in which case {@link
* CheckpointListener
* <li>a newer checkpoint completes
* <li>all checkpoints of a Set are aborted (in which case {@link
* {@link CheckpointListener
* </ol>
*/
private final Map<Long, Set<Long>> pendingMaterializationConfirmations = new HashMap<>();
private long lastConfirmedMaterializationId = -1L;
public ChangelogKeyedStateBackend(
AbstractKeyedStateBackend<K> keyedStateBackend,
String subtaskName,
ExecutionConfig executionConfig,
TtlTimeProvider ttlTimeProvider,
StateChangelogWriter<? extends ChangelogStateHandle> stateChangelogWriter,
Collection<ChangelogStateBackendHandle> initialState,
CheckpointStorageWorkerView checkpointStorageWorkerView) {
this.keyedStateBackend = keyedStateBackend;
this.subtaskName = subtaskName;
this.executionConfig = executionConfig;
this.ttlTimeProvider = ttlTimeProvider;
this.keyValueStatesByName = new HashMap<>();
this.priorityQueueStatesByName = new HashMap<>();
this.stateChangelogWriter = stateChangelogWriter;
this.changelogStates = new HashMap<>();
this.changelogSnapshotState = completeRestore(initialState);
this.streamFactory =
new CheckpointStreamFactory() {
@Override
public CheckpointStateOutputStream createCheckpointStateOutputStream(
CheckpointedStateScope scope) throws IOException {
return checkpointStorageWorkerView.createTaskOwnedStateStream();
}
@Override
public boolean canFastDuplicate(
StreamStateHandle stateHandle, CheckpointedStateScope scope)
throws IOException {
return false;
}
@Override
public List<StreamStateHandle> duplicate(
List<StreamStateHandle> stateHandles, CheckpointedStateScope scope)
throws IOException {
return null;
}
};
this.closer.register(keyedStateBackend);
}
@Override
public KeyGroupRange getKeyGroupRange() {
return keyedStateBackend.getKeyGroupRange();
}
@Override
public void close() throws IOException {
closer.close();
}
@Override
public void setCurrentKey(K newKey) {
keyedStateBackend.setCurrentKey(newKey);
}
@Override
public K getCurrentKey() {
return keyedStateBackend.getCurrentKey();
}
@Override
public TypeSerializer<K> getKeySerializer() {
return keyedStateBackend.getKeySerializer();
}
@Override
public <N> Stream<K> getKeys(String state, N namespace) {
return keyedStateBackend.getKeys(state, namespace);
}
@Override
public <N> Stream<Tuple2<K, N>> getKeysAndNamespaces(String state) {
return keyedStateBackend.getKeysAndNamespaces(state);
}
@Override
public void dispose() {
keyedStateBackend.dispose();
lastName = null;
lastState = null;
keyValueStatesByName.clear();
changelogStates.clear();
priorityQueueStatesByName.clear();
}
@Override
public void registerKeySelectionListener(KeySelectionListener<K> listener) {
keyedStateBackend.registerKeySelectionListener(listener);
}
@Override
public boolean deregisterKeySelectionListener(KeySelectionListener<K> listener) {
return keyedStateBackend.deregisterKeySelectionListener(listener);
}
@Override
public <N, S extends State, T> void applyToAllKeys(
N namespace,
TypeSerializer<N> namespaceSerializer,
StateDescriptor<S, T> stateDescriptor,
KeyedStateFunction<K, S> function)
throws Exception {
keyedStateBackend.applyToAllKeys(
namespace,
namespaceSerializer,
stateDescriptor,
function,
this::getPartitionedState);
}
@Override
@SuppressWarnings("unchecked")
public <N, S extends State> S getPartitionedState(
N namespace,
TypeSerializer<N> namespaceSerializer,
StateDescriptor<S, ?> stateDescriptor)
throws Exception {
checkNotNull(namespace, "Namespace");
if (lastName != null && lastName.equals(stateDescriptor.getName())) {
lastState.setCurrentNamespace(namespace);
return (S) lastState;
}
final InternalKvState<K, ?, ?> previous =
keyValueStatesByName.get(stateDescriptor.getName());
if (previous != null) {
lastState = previous;
lastState.setCurrentNamespace(namespace);
lastName = stateDescriptor.getName();
functionDelegationHelper.addOrUpdate(stateDescriptor);
return (S) previous;
}
final S state = getOrCreateKeyedState(namespaceSerializer, stateDescriptor);
final InternalKvState<K, N, ?> kvState = (InternalKvState<K, N, ?>) state;
lastName = stateDescriptor.getName();
lastState = kvState;
kvState.setCurrentNamespace(namespace);
return state;
}
@Nonnull
@Override
public RunnableFuture<SnapshotResult<KeyedStateHandle>> snapshot(
long checkpointId,
long timestamp,
@Nonnull CheckpointStreamFactory streamFactory,
@Nonnull CheckpointOptions checkpointOptions)
throws Exception {
lastCheckpointId = checkpointId;
lastUploadedFrom = changelogSnapshotState.lastMaterializedTo();
lastUploadedTo = getLastAppendedTo();
LOG.info(
"snapshot of {} for checkpoint {}, change range: {}..{}",
subtaskName,
checkpointId,
lastUploadedFrom,
lastUploadedTo);
ChangelogSnapshotState changelogStateBackendStateCopy = changelogSnapshotState;
if (changelogStateBackendStateCopy.materializationID > lastConfirmedMaterializationId) {
materializationIdByCheckpointId.put(
checkpointId, changelogStateBackendStateCopy.materializationID);
pendingMaterializationConfirmations
.computeIfAbsent(
changelogStateBackendStateCopy.materializationID,
ign -> new HashSet<>())
.add(checkpointId);
}
return toRunnableFuture(
stateChangelogWriter
.persist(lastUploadedFrom)
.thenApply(
delta ->
buildSnapshotResult(
delta, changelogStateBackendStateCopy)));
}
private SnapshotResult<KeyedStateHandle> buildSnapshotResult(
ChangelogStateHandle delta, ChangelogSnapshotState changelogStateBackendStateCopy) {
List<ChangelogStateHandle> prevDeltaCopy =
new ArrayList<>(changelogStateBackendStateCopy.getRestoredNonMaterialized());
if (delta != null && delta.getStateSize() > 0) {
prevDeltaCopy.add(delta);
}
if (prevDeltaCopy.isEmpty()
&& changelogStateBackendStateCopy.getMaterializedSnapshot().isEmpty()) {
return SnapshotResult.empty();
} else {
return SnapshotResult.of(
new ChangelogStateBackendHandleImpl(
changelogStateBackendStateCopy.getMaterializedSnapshot(),
prevDeltaCopy,
getKeyGroupRange(),
changelogStateBackendStateCopy.materializationID));
}
}
@Nonnull
@Override
@SuppressWarnings("unchecked")
public <T extends HeapPriorityQueueElement & PriorityComparable<? super T> & Keyed<?>>
KeyGroupedInternalPriorityQueue<T> create(
@Nonnull String stateName,
@Nonnull TypeSerializer<T> byteOrderedElementSerializer) {
ChangelogKeyGroupedPriorityQueue<T> queue =
(ChangelogKeyGroupedPriorityQueue<T>) priorityQueueStatesByName.get(stateName);
if (queue == null) {
PriorityQueueStateChangeLoggerImpl<K, T> priorityQueueStateChangeLogger =
new PriorityQueueStateChangeLoggerImpl<>(
byteOrderedElementSerializer,
keyedStateBackend.getKeyContext(),
stateChangelogWriter,
new RegisteredPriorityQueueStateBackendMetaInfo<>(
stateName, byteOrderedElementSerializer),
++lastCreatedStateId);
closer.register(priorityQueueStateChangeLogger);
queue =
new ChangelogKeyGroupedPriorityQueue<>(
keyedStateBackend.create(stateName, byteOrderedElementSerializer),
priorityQueueStateChangeLogger,
byteOrderedElementSerializer);
priorityQueueStatesByName.put(stateName, queue);
}
return queue;
}
@VisibleForTesting
@Override
public int numKeyValueStateEntries() {
return keyedStateBackend.numKeyValueStateEntries();
}
@Override
public boolean isSafeToReuseKVState() {
return keyedStateBackend.isSafeToReuseKVState();
}
@Nonnull
@Override
public SavepointResources<K> savepoint() throws Exception {
return keyedStateBackend.savepoint();
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
if (lastCheckpointId == checkpointId) {
stateChangelogWriter.confirm(lastUploadedFrom, lastUploadedTo);
}
Long materializationID = materializationIdByCheckpointId.remove(checkpointId);
if (materializationID != null) {
if (materializationID > lastConfirmedMaterializationId) {
keyedStateBackend.notifyCheckpointComplete(materializationID);
lastConfirmedMaterializationId = materializationID;
}
pendingMaterializationConfirmations.remove(materializationID);
}
Map<Long, Long> olderCheckpoints =
materializationIdByCheckpointId.headMap(checkpointId, true);
olderCheckpoints.values().forEach(pendingMaterializationConfirmations::remove);
olderCheckpoints.clear();
}
@Override
public void notifyCheckpointAborted(long checkpointId) throws Exception {
if (lastCheckpointId == checkpointId) {
stateChangelogWriter.reset(lastUploadedFrom, lastUploadedTo);
}
Long materializationID = materializationIdByCheckpointId.remove(checkpointId);
if (materializationID != null) {
Set<Long> checkpoints = pendingMaterializationConfirmations.get(materializationID);
checkpoints.remove(checkpointId);
if (checkpoints.isEmpty()) {
if (materializationID < changelogSnapshotState.materializationID) {
keyedStateBackend.notifyCheckpointAborted(materializationID);
}
pendingMaterializationConfirmations.remove(materializationID);
}
}
}
@Override
@SuppressWarnings("unchecked")
public <N, S extends State, T> S getOrCreateKeyedState(
TypeSerializer<N> namespaceSerializer, StateDescriptor<S, T> stateDescriptor)
throws Exception {
checkNotNull(namespaceSerializer, "Namespace serializer");
checkNotNull(
getKeySerializer(),
"State key serializer has not been configured in the config. "
+ "This operation cannot use partitioned state.");
InternalKvState<K, ?, ?> kvState = keyValueStatesByName.get(stateDescriptor.getName());
if (kvState == null) {
if (!stateDescriptor.isSerializerInitialized()) {
stateDescriptor.initializeSerializerUnlessSet(executionConfig);
}
kvState =
LatencyTrackingStateFactory.createStateAndWrapWithLatencyTrackingIfEnabled(
TtlStateFactory.createStateAndWrapWithTtlIfEnabled(
namespaceSerializer, stateDescriptor, this, ttlTimeProvider),
stateDescriptor,
keyedStateBackend.getLatencyTrackingStateConfig());
keyValueStatesByName.put(stateDescriptor.getName(), kvState);
keyedStateBackend.publishQueryableStateIfEnabled(stateDescriptor, kvState);
}
functionDelegationHelper.addOrUpdate(stateDescriptor);
return (S) kvState;
}
@Nonnull
@Override
@SuppressWarnings("unchecked")
public <N, SV, SEV, S extends State, IS extends S> IS createInternalState(
@Nonnull TypeSerializer<N> namespaceSerializer,
@Nonnull StateDescriptor<S, SV> stateDesc,
@Nonnull
StateSnapshotTransformer.StateSnapshotTransformFactory<SEV>
snapshotTransformFactory)
throws Exception {
StateFactory stateFactory = STATE_FACTORIES.get(stateDesc.getType());
if (stateFactory == null) {
String message =
String.format(
"State %s is not supported by %s",
stateDesc.getClass(), this.getClass());
throw new FlinkRuntimeException(message);
}
RegisteredKeyValueStateBackendMetaInfo<N, SV> meta =
new RegisteredKeyValueStateBackendMetaInfo<>(
stateDesc.getType(),
stateDesc.getName(),
namespaceSerializer,
stateDesc.getSerializer(),
(StateSnapshotTransformer.StateSnapshotTransformFactory<SV>)
snapshotTransformFactory);
InternalKvState<K, N, SV> state =
keyedStateBackend.createInternalState(
namespaceSerializer, stateDesc, snapshotTransformFactory);
KvStateChangeLoggerImpl<K, SV, N> kvStateChangeLogger =
new KvStateChangeLoggerImpl<>(
state.getKeySerializer(),
state.getNamespaceSerializer(),
state.getValueSerializer(),
keyedStateBackend.getKeyContext(),
stateChangelogWriter,
meta,
stateDesc.getTtlConfig(),
stateDesc.getDefaultValue(),
++lastCreatedStateId);
closer.register(kvStateChangeLogger);
IS is =
stateFactory.create(
state,
kvStateChangeLogger,
keyedStateBackend /* pass the nested backend as key context so that it get key updates on recovery*/);
changelogStates.put(stateDesc.getName(), (ChangelogState) is);
return is;
}
public void registerCloseable(@Nullable Closeable closeable) {
closer.register(closeable);
}
private ChangelogSnapshotState completeRestore(
Collection<ChangelogStateBackendHandle> stateHandles) {
long materializationId = 0L;
List<KeyedStateHandle> materialized = new ArrayList<>();
List<ChangelogStateHandle> restoredNonMaterialized = new ArrayList<>();
for (ChangelogStateBackendHandle h : stateHandles) {
if (h != null) {
materialized.addAll(h.getMaterializedStateHandles());
restoredNonMaterialized.addAll(h.getNonMaterializedStateHandles());
materializationId = Math.max(materializationId, h.getMaterializationID());
}
}
this.materializedId = materializationId + 1;
return new ChangelogSnapshotState(
materialized,
restoredNonMaterialized,
stateChangelogWriter.initialSequenceNumber(),
materializationId);
}
/**
* Initialize state materialization so that materialized data can be persisted durably and
* included into the checkpoint.
*
* <p>This method is not thread safe. It should be called either under a lock or through task
* mailbox executor.
*
* @return a tuple of - future snapshot result from the underlying state backend - a {@link
* SequenceNumber} identifying the latest change in the changelog
*/
private SequenceNumber getLastAppendedTo() {
stateChangelogWriter.lastAppendedSequenceNumber();
return stateChangelogWriter.lastAppendedSequenceNumber();
}
/**
* This method is not thread safe. It should be called either under a lock or through task
* mailbox executor.
*/
public void updateChangelogSnapshotState(
SnapshotResult<KeyedStateHandle> materializedSnapshot,
long materializationID,
SequenceNumber upTo)
throws Exception {
LOG.info(
"Task {} finishes materialization, updates the snapshotState upTo {} : {}",
subtaskName,
upTo,
materializedSnapshot);
changelogSnapshotState =
new ChangelogSnapshotState(
getMaterializedResult(materializedSnapshot),
Collections.emptyList(),
upTo,
materializationID);
stateChangelogWriter.truncate(upTo);
}
private List<KeyedStateHandle> getMaterializedResult(
@Nonnull SnapshotResult<KeyedStateHandle> materializedSnapshot) {
KeyedStateHandle jobManagerOwned = materializedSnapshot.getJobManagerOwnedSnapshot();
return jobManagerOwned == null ? emptyList() : singletonList(jobManagerOwned);
}
@Override
public KeyedStateBackend<K> getDelegatedKeyedStateBackend(boolean recursive) {
return keyedStateBackend.getDelegatedKeyedStateBackend(recursive);
}
private interface StateFactory {
<K, N, SV, S extends State, IS extends S> IS create(
InternalKvState<K, N, SV> kvState,
KvStateChangeLogger<SV, N> changeLogger,
InternalKeyContext<K> keyContext)
throws Exception;
}
/**
* @param name state name
* @param type state type (the only supported type currently are: {@link
* BackendStateType
* queue})
* @return an existing state, i.e. the one that was already created. The returned state will not
* apply TTL to the passed values, regardless of the TTL settings. This prevents double
* applying of TTL (recovered values are TTL values if TTL was enabled). The state will,
* however, use TTL serializer if TTL is enabled. WARN: only valid during the recovery.
* @throws NoSuchElementException if the state wasn't created
* @throws UnsupportedOperationException if state type is not supported
*/
public ChangelogState getExistingStateForRecovery(String name, BackendStateType type)
throws NoSuchElementException, UnsupportedOperationException {
ChangelogState state;
switch (type) {
case KEY_VALUE:
state = changelogStates.get(name);
break;
case PRIORITY_QUEUE:
state = priorityQueueStatesByName.get(name);
break;
default:
throw new UnsupportedOperationException(
String.format("Unknown state type %s (%s)", type, name));
}
if (state == null) {
throw new NoSuchElementException(String.format("%s state %s not found", type, name));
}
return state;
}
private static <T> RunnableFuture<T> toRunnableFuture(CompletableFuture<T> f) {
return new RunnableFuture<T>() {
@Override
public void run() {
f.join();
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return f.cancel(mayInterruptIfRunning);
}
@Override
public boolean isCancelled() {
return f.isCancelled();
}
@Override
public boolean isDone() {
return f.isDone();
}
@Override
public T get() throws InterruptedException, ExecutionException {
return f.get();
}
@Override
public T get(long timeout, TimeUnit unit)
throws InterruptedException, ExecutionException, TimeoutException {
return f.get(timeout, unit);
}
};
}
/**
* Snapshot State for ChangelogKeyedStatebackend.
*
* <p>It includes three parts: - materialized snapshot from the underlying delegated state
* backend - non-materialized part in the current changelog - non-materialized changelog, from
* previous logs (before failover or rescaling)
*/
private static class ChangelogSnapshotState {
/**
* Materialized snapshot from the underlying delegated state backend. Set initially on
* restore and later upon materialization.
*/
private final List<KeyedStateHandle> materializedSnapshot;
/**
* The {@link SequenceNumber} up to which the state is materialized, exclusive. This
* indicates the non-materialized part of the current changelog.
*/
private final SequenceNumber materializedTo;
/**
* Non-materialized changelog, from previous logs. Set initially on restore and later
* cleared upon materialization.
*/
private final List<ChangelogStateHandle> restoredNonMaterialized;
/** ID of this materialization corresponding to the nested backend checkpoint ID. */
private final long materializationID;
public ChangelogSnapshotState(
List<KeyedStateHandle> materializedSnapshot,
List<ChangelogStateHandle> restoredNonMaterialized,
SequenceNumber materializedTo,
long materializationID) {
this.materializedSnapshot = unmodifiableList((materializedSnapshot));
this.restoredNonMaterialized = unmodifiableList(restoredNonMaterialized);
this.materializedTo = materializedTo;
this.materializationID = materializationID;
}
public List<KeyedStateHandle> getMaterializedSnapshot() {
return materializedSnapshot;
}
public SequenceNumber lastMaterializedTo() {
return materializedTo;
}
public List<ChangelogStateHandle> getRestoredNonMaterialized() {
return restoredNonMaterialized;
}
public long getMaterializationID() {
return materializationID;
}
}
@VisibleForTesting
StateChangelogWriter<? extends ChangelogStateHandle> getChangelogWriter() {
return stateChangelogWriter;
}
} | |
Removed the modified error and provided the error generated by tree parser as it is. [`40558c7`](https://github.com/ballerina-platform/ballerina-lang/pull/37273/commits/40558c760c4979669408d0beed2760ea15600a44) | public BalShellGetResultResponse getResult(String source) {
BalShellGetResultResponse output = new BalShellGetResultResponse();
PrintStream originalOut = System.out;
PrintStream originalErr = System.err;
ConsoleOutCollector consoleOutCollector = new ConsoleOutCollector();
PrintStream printStreamCollector = new PrintStream(consoleOutCollector, false, StandardCharsets.UTF_8);
System.setOut(printStreamCollector);
System.setErr(printStreamCollector);
try {
ShellCompilation shellCompilation = evaluator.getCompilation(source);
if (shellCompilation.getExceptionStatus() == ExceptionStatus.SUCCESS) {
Optional<PackageCompilation> compilation = shellCompilation.getPackageCompilation();
Optional<NotebookReturnValue> notebookReturnValue = evaluator.getValueAsObject(compilation);
if (notebookReturnValue.isPresent() &&
notebookReturnValue.get().getExceptionStatus() == ExceptionStatus.SUCCESS) {
Object out = notebookReturnValue.get().getResult();
output.setValue(out);
} else if (notebookReturnValue.isPresent() &&
notebookReturnValue.get().getExceptionStatus() == ExceptionStatus.INVOKER_FAILED) {
throw new InvokerException();
}
} else if (shellCompilation.getExceptionStatus() == ExceptionStatus.SNIPPET_FAILED) {
throw new SnippetException();
} else if (shellCompilation.getExceptionStatus() == ExceptionStatus.TREE_PARSER_FAILED) {
if (source.startsWith(COMMAND_PREFIX)) {
List<Diagnostic> diagnostics = Collections.singletonList(Diagnostic.error(
"Please note that Ballerina shell commands are not supported in here."));
output.addOutputDiagnostics(diagnostics);
evaluator.resetDiagnostics();
}
throw new TreeParserException();
} else {
throw new InvokerException();
}
} catch (InvokerPanicException ignored) {
} catch (Exception error) {
output.addError(error.getMessage());
} finally {
output.setConsoleOut(consoleOutCollector.getLines());
output.addOutputDiagnostics(evaluator.diagnostics());
output.setMetaInfo(
evaluator.newVariableNames(),
evaluator.newModuleDeclarations()
);
evaluator.resetDiagnostics();
evaluator.clearPreviousVariablesAndModuleDclnsNames();
printStreamCollector.close();
System.setOut(originalOut);
System.setErr(originalErr);
}
return output;
} | "Please note that Ballerina shell commands are not supported in here.")); | public BalShellGetResultResponse getResult(String source) {
BalShellGetResultResponse output = new BalShellGetResultResponse();
PrintStream originalOut = System.out;
PrintStream originalErr = System.err;
ConsoleOutCollector consoleOutCollector = new ConsoleOutCollector();
PrintStream printStreamCollector = new PrintStream(consoleOutCollector, false, StandardCharsets.UTF_8);
System.setOut(printStreamCollector);
System.setErr(printStreamCollector);
try {
ShellCompilation shellCompilation = evaluator.getCompilation(source);
if (shellCompilation.getExceptionStatus() == ExceptionStatus.SUCCESS) {
Optional<PackageCompilation> compilation = shellCompilation.getPackageCompilation();
Optional<NotebookReturnValue> notebookReturnValue = evaluator.getValueAsObject(compilation);
if (notebookReturnValue.isPresent() &&
notebookReturnValue.get().getExceptionStatus() == ExceptionStatus.SUCCESS) {
Object out = notebookReturnValue.get().getResult();
output.setValue(out);
} else if (notebookReturnValue.isPresent() &&
notebookReturnValue.get().getExceptionStatus() == ExceptionStatus.INVOKER_FAILED) {
throw new InvokerException();
}
} else if (shellCompilation.getExceptionStatus() == ExceptionStatus.SNIPPET_FAILED) {
throw new SnippetException();
} else if (shellCompilation.getExceptionStatus() == ExceptionStatus.TREE_PARSER_FAILED) {
if (source.startsWith(COMMAND_PREFIX)) {
evaluator.resetDiagnostics();
}
throw new TreeParserException();
} else {
throw new InvokerException();
}
} catch (InvokerPanicException ignored) {
} catch (Exception error) {
output.addError(error.getMessage());
} finally {
output.setConsoleOut(consoleOutCollector.getLines());
output.addOutputDiagnostics(evaluator.diagnostics());
output.setMetaInfo(
evaluator.newVariableNames(),
evaluator.newModuleDeclarations()
);
evaluator.resetDiagnostics();
evaluator.clearPreviousVariablesAndModuleDclnsNames();
printStreamCollector.close();
System.setOut(originalOut);
System.setErr(originalErr);
}
return output;
} | class InstanceHolder {
private static final ShellWrapper instance = new ShellWrapper();
} | class InstanceHolder {
private static final ShellWrapper instance = new ShellWrapper();
} |
Maybe we could further provide an utility for creating Execution like below: ``` private CompletableFuture<Execution> createExecution( TaskManagerGateway taskManagerGateway, JobVertex... vertices) throws Exception { SimpleSlot slot = new SimpleSlot( new SingleSlotTestingSlotOwner(), new LocalTaskManagerLocation(), 0, taskManagerGateway); ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1); slotProvider.addSlot(vertices[0].getID(), 0, CompletableFuture.completedFuture(slot)); ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph( new JobID(), slotProvider, new NoRestartStrategy(), vertices); executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread()); ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(vertices[0].getID()); ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0]; Execution execution = executionVertex.getCurrentExecutionAttempt(); CompletableFuture<Execution> allocationFuture = execution.allocateAndAssignSlotForExecution( slotProvider, false, LocationPreferenceConstraint.ALL, Collections.emptySet(), TestingUtils.infiniteTime()); return allocationFuture; } ``` Then we could get Execution from future, and further get ExecutionVertex and ExecutionGraph from Execution. Then this helper could be reused for many existing tests. | private void testPartitionReleaseAfterFinished(Consumer<Execution> postFinishedExecutionAction) throws Exception {
final Tuple2<JobID, Collection<ResultPartitionID>> releasedPartitions = Tuple2.of(null, null);
final SimpleAckingTaskManagerGateway taskManagerGateway = new SimpleAckingTaskManagerGateway();
taskManagerGateway.setReleasePartitionsConsumer(releasedPartitions::setFields);
final SimpleSlot slot = new SimpleSlot(
new SingleSlotTestingSlotOwner(),
new LocalTaskManagerLocation(),
0,
taskManagerGateway);
final JobVertex producerVertex = createNoOpJobVertex();
final JobVertex consumerVertex = createNoOpJobVertex();
consumerVertex.connectNewDataSetAsInput(producerVertex, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING);
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(producerVertex.getID(), 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
producerVertex,
consumerVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(producerVertex.getID());
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
execution.allocateAndAssignSlotForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
execution.deploy();
execution.switchToRunning();
execution.markFinished();
postFinishedExecutionAction.accept(execution);
assertEquals(executionGraph.getJobID(), releasedPartitions.f0);
assertEquals(executionVertex.getProducedPartitions().size(), releasedPartitions.f1.size());
for (ResultPartitionID partitionId : releasedPartitions.f1) {
IntermediateResultPartition intermediateResultPartition = executionVertex
.getProducedPartitions()
.get(partitionId.getPartitionId());
assertNotNull(intermediateResultPartition);
assertEquals(execution.getAttemptId(), partitionId.getProducerId());
}
} | execution.deploy(); | private void testPartitionReleaseAfterFinished(Consumer<Execution> postFinishedExecutionAction) throws Exception {
final Tuple2<JobID, Collection<ResultPartitionID>> releasedPartitions = Tuple2.of(null, null);
final SimpleAckingTaskManagerGateway taskManagerGateway = new SimpleAckingTaskManagerGateway();
taskManagerGateway.setReleasePartitionsConsumer(releasedPartitions::setFields);
final JobVertex producerVertex = createNoOpJobVertex();
final JobVertex consumerVertex = createNoOpJobVertex();
consumerVertex.connectNewDataSetAsInput(producerVertex, DistributionPattern.ALL_TO_ALL, ResultPartitionType.BLOCKING);
final SimpleSlot slot = new SimpleSlot(
new SingleSlotTestingSlotOwner(),
new LocalTaskManagerLocation(),
0,
taskManagerGateway);
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(producerVertex.getID(), 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
producerVertex,
consumerVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(producerVertex.getID());
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
execution.allocateResourcesForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
execution.deploy();
execution.switchToRunning();
execution.markFinished();
postFinishedExecutionAction.accept(execution);
assertEquals(executionGraph.getJobID(), releasedPartitions.f0);
assertEquals(executionVertex.getProducedPartitions().size(), releasedPartitions.f1.size());
for (ResultPartitionID partitionId : releasedPartitions.f1) {
IntermediateResultPartition intermediateResultPartition = executionVertex
.getProducedPartitions()
.get(partitionId.getPartitionId());
assertNotNull(intermediateResultPartition);
assertEquals(execution.getAttemptId(), partitionId.getProducerId());
}
} | class ExecutionTest extends TestLogger {
@ClassRule
public static final TestingComponentMainThreadExecutor.Resource EXECUTOR_RESOURCE =
new TestingComponentMainThreadExecutor.Resource();
private final TestingComponentMainThreadExecutor testMainThreadUtil =
EXECUTOR_RESOURCE.getComponentMainThreadTestExecutor();
/**
* Tests that slots are released if we cannot assign the allocated resource to the
* Execution.
*/
@Test
public void testSlotReleaseOnFailedResourceAssignment() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final CompletableFuture<LogicalSlot> slotFuture = new CompletableFuture<>();
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, slotFuture);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final LogicalSlot otherSlot = new TestingLogicalSlot();
CompletableFuture<Execution> allocationFuture = execution.allocateAndAssignSlotForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertFalse(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertTrue(execution.tryAssignResource(otherSlot));
slotFuture.complete(slot);
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that the slot is released in case of a execution cancellation when having
* a slot assigned and being in state SCHEDULED.
*/
@Test
public void testSlotReleaseOnExecutionCancellationInScheduled() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
CompletableFuture<Execution> allocationFuture = execution.allocateAndAssignSlotForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertTrue(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertEquals(slot, execution.getAssignedResource());
execution.cancel();
assertEquals(ExecutionState.CANCELED, execution.getState());
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that the slot is released in case of a execution cancellation when being in state
* RUNNING.
*/
@Test
public void testSlotReleaseOnExecutionCancellationInRunning() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
CompletableFuture<Execution> allocationFuture = execution.allocateAndAssignSlotForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertTrue(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertEquals(slot, execution.getAssignedResource());
execution.deploy();
execution.switchToRunning();
execution.cancel();
assertEquals(ExecutionState.CANCELING, execution.getState());
execution.completeCancelling();
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that a slot allocation from a {@link SlotProvider} is cancelled if the
* {@link Execution} is cancelled.
*/
@Test
public void testSlotAllocationCancellationWhenExecutionCancelled() throws Exception {
final JobVertexID jobVertexId = new JobVertexID();
final JobVertex jobVertex = new JobVertex("test vertex", jobVertexId);
jobVertex.setInvokableClass(NoOpInvokable.class);
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
final CompletableFuture<LogicalSlot> slotFuture = new CompletableFuture<>();
slotProvider.addSlot(jobVertexId, 0, slotFuture);
final ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
final ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution currentExecutionAttempt = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
final CompletableFuture<Execution> allocationFuture = currentExecutionAttempt.allocateAndAssignSlotForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertThat(allocationFuture.isDone(), is(false));
assertThat(slotProvider.getSlotRequestedFuture(jobVertexId, 0).get(), is(true));
final Set<SlotRequestId> slotRequests = slotProvider.getSlotRequests();
assertThat(slotRequests, hasSize(1));
assertThat(currentExecutionAttempt.getState(), is(ExecutionState.SCHEDULED));
currentExecutionAttempt.cancel();
assertThat(currentExecutionAttempt.getState(), is(ExecutionState.CANCELED));
assertThat(allocationFuture.isCompletedExceptionally(), is(true));
final Set<SlotRequestId> canceledSlotRequests = slotProvider.getCanceledSlotRequests();
assertThat(canceledSlotRequests, equalTo(slotRequests));
}
/**
* Tests that the partitions are released in case of a execution cancellation after the execution is already finished.
*/
@Test
public void testPartitionReleaseOnCancelAfterFinished() throws Exception {
testPartitionReleaseAfterFinished(Execution::cancel);
}
/**
* Tests that the partitions are released in case of a execution suspension after the execution is already finished.
*/
@Test
public void testPartitionReleaseOnSuspendAfterFinished() throws Exception {
testPartitionReleaseAfterFinished(Execution::suspend);
}
/**
* Tests that all preferred locations are calculated.
*/
@Test
public void testAllPreferredLocationCalculation() throws ExecutionException, InterruptedException {
final TaskManagerLocation taskManagerLocation1 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation2 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation3 = new LocalTaskManagerLocation();
final CompletableFuture<TaskManagerLocation> locationFuture1 = CompletableFuture.completedFuture(taskManagerLocation1);
final CompletableFuture<TaskManagerLocation> locationFuture2 = new CompletableFuture<>();
final CompletableFuture<TaskManagerLocation> locationFuture3 = new CompletableFuture<>();
final Execution execution = SchedulerTestUtils.getTestVertex(Arrays.asList(locationFuture1, locationFuture2, locationFuture3));
CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture = execution.calculatePreferredLocations(LocationPreferenceConstraint.ALL);
assertFalse(preferredLocationsFuture.isDone());
locationFuture3.complete(taskManagerLocation3);
assertFalse(preferredLocationsFuture.isDone());
locationFuture2.complete(taskManagerLocation2);
assertTrue(preferredLocationsFuture.isDone());
final Collection<TaskManagerLocation> preferredLocations = preferredLocationsFuture.get();
assertThat(preferredLocations, containsInAnyOrder(taskManagerLocation1, taskManagerLocation2, taskManagerLocation3));
}
/**
* Tests that any preferred locations are calculated.
*/
@Test
public void testAnyPreferredLocationCalculation() throws ExecutionException, InterruptedException {
final TaskManagerLocation taskManagerLocation1 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation3 = new LocalTaskManagerLocation();
final CompletableFuture<TaskManagerLocation> locationFuture1 = CompletableFuture.completedFuture(taskManagerLocation1);
final CompletableFuture<TaskManagerLocation> locationFuture2 = new CompletableFuture<>();
final CompletableFuture<TaskManagerLocation> locationFuture3 = CompletableFuture.completedFuture(taskManagerLocation3);
final Execution execution = SchedulerTestUtils.getTestVertex(Arrays.asList(locationFuture1, locationFuture2, locationFuture3));
CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture = execution.calculatePreferredLocations(LocationPreferenceConstraint.ANY);
assertTrue(preferredLocationsFuture.isDone());
final Collection<TaskManagerLocation> preferredLocations = preferredLocationsFuture.get();
assertThat(preferredLocations, containsInAnyOrder(taskManagerLocation1, taskManagerLocation3));
}
/**
* Checks that the {@link Execution} termination future is only completed after the
* assigned slot has been released.
*
* <p>NOTE: This test only fails spuriously without the fix of this commit. Thus, one has
* to execute this test multiple times to see the failure.
*/
@Test
public void testTerminationFutureIsCompletedAfterSlotRelease() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final ProgrammedSlotProvider slotProvider = createProgrammedSlotProvider(
1,
Collections.singleton(jobVertexId),
slotOwner);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
executionVertex.scheduleForExecution(slotProvider, false, LocationPreferenceConstraint.ANY, Collections.emptySet()).get();
Execution currentExecutionAttempt = executionVertex.getCurrentExecutionAttempt();
CompletableFuture<LogicalSlot> returnedSlotFuture = slotOwner.getReturnedSlotFuture();
CompletableFuture<?> terminationFuture = executionVertex.cancel();
currentExecutionAttempt.completeCancelling();
CompletableFuture<Boolean> restartFuture = terminationFuture.thenApply(
ignored -> {
assertTrue(returnedSlotFuture.isDone());
return true;
});
restartFuture.get();
}
/**
* Tests that the task restore state is nulled after the {@link Execution} has been
* deployed. See FLINK-9693.
*/
@Test
public void testTaskRestoreStateIsNulledAfterDeployment() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final ProgrammedSlotProvider slotProvider = createProgrammedSlotProvider(
1,
Collections.singleton(jobVertexId),
slotOwner);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
final JobManagerTaskRestore taskRestoreState = new JobManagerTaskRestore(1L, new TaskStateSnapshot());
execution.setInitialState(taskRestoreState);
assertThat(execution.getTaskRestore(), is(notNullValue()));
executionVertex.scheduleForExecution(slotProvider, false, LocationPreferenceConstraint.ANY, Collections.emptySet()).get();
assertThat(execution.getTaskRestore(), is(nullValue()));
}
@Test
public void testEagerSchedulingFailureReturnsSlot() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SimpleAckingTaskManagerGateway taskManagerGateway = new SimpleAckingTaskManagerGateway();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final CompletableFuture<SlotRequestId> slotRequestIdFuture = new CompletableFuture<>();
final CompletableFuture<SlotRequestId> returnedSlotFuture = new CompletableFuture<>();
final TestingSlotProvider slotProvider = new TestingSlotProvider(
(SlotRequestId slotRequestId) -> {
slotRequestIdFuture.complete(slotRequestId);
return new CompletableFuture<>();
});
slotProvider.setSlotCanceller(returnedSlotFuture::complete);
slotOwner.getReturnedSlotFuture().thenAccept(
(LogicalSlot logicalSlot) -> returnedSlotFuture.complete(logicalSlot.getSlotRequestId()));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(testMainThreadUtil.getMainThreadExecutor());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
taskManagerGateway.setCancelConsumer(
executionAttemptID -> {
if (execution.getAttemptId().equals(executionAttemptID)) {
execution.completeCancelling();
}
}
);
slotRequestIdFuture.thenAcceptAsync(
(SlotRequestId slotRequestId) -> {
final SingleLogicalSlot singleLogicalSlot = ExecutionGraphSchedulingTest.createSingleLogicalSlot(
slotOwner,
taskManagerGateway,
slotRequestId);
slotProvider.complete(slotRequestId, singleLogicalSlot);
},
testMainThreadUtil.getMainThreadExecutor());
final CompletableFuture<Void> schedulingFuture = testMainThreadUtil.execute(
() -> execution.scheduleForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ANY,
Collections.emptySet()));
try {
schedulingFuture.get();
testMainThreadUtil.execute(execution::cancel);
} catch (ExecutionException ignored) {
}
assertThat(returnedSlotFuture.get(), is(equalTo(slotRequestIdFuture.get())));
}
/**
* Tests that a slot release will atomically release the assigned {@link Execution}.
*/
@Test
public void testSlotReleaseAtomicallyReleasesExecution() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SingleLogicalSlot slot = ExecutionGraphSchedulingTest.createSingleLogicalSlot(
slotOwner,
new SimpleAckingTaskManagerGateway(),
new SlotRequestId());
final CompletableFuture<LogicalSlot> slotFuture = CompletableFuture.completedFuture(slot);
final CountDownLatch slotRequestLatch = new CountDownLatch(1);
final TestingSlotProvider slotProvider = new TestingSlotProvider(slotRequestId -> {
slotRequestLatch.countDown();
return slotFuture;
});
final ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
final Execution execution = executionGraph.getJobVertex(jobVertex.getID()).getTaskVertices()[0].getCurrentExecutionAttempt();
executionGraph.start(testMainThreadUtil.getMainThreadExecutor());
testMainThreadUtil.execute(executionGraph::scheduleForExecution);
slotRequestLatch.await();
testMainThreadUtil.execute(() -> {
assertThat(execution.getAssignedResource(), is(sameInstance(slot)));
slot.release(new FlinkException("Test exception"));
assertThat(execution.getReleaseFuture().isDone(), is(true));
});
}
@Nonnull
private JobVertex createNoOpJobVertex() {
final JobVertex jobVertex = new JobVertex("Test vertex", new JobVertexID());
jobVertex.setInvokableClass(NoOpInvokable.class);
return jobVertex;
}
@Nonnull
private ProgrammedSlotProvider createProgrammedSlotProvider(
int parallelism,
Collection<JobVertexID> jobVertexIds,
SlotOwner slotOwner) {
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(parallelism);
for (JobVertexID jobVertexId : jobVertexIds) {
for (int i = 0; i < parallelism; i++) {
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway(),
null,
null);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
}
}
return slotProvider;
}
/**
* Slot owner which records the first returned slot.
*/
private static final class SingleSlotTestingSlotOwner implements SlotOwner {
final CompletableFuture<LogicalSlot> returnedSlot = new CompletableFuture<>();
public CompletableFuture<LogicalSlot> getReturnedSlotFuture() {
return returnedSlot;
}
@Override
public void returnLogicalSlot(LogicalSlot logicalSlot) {
returnedSlot.complete(logicalSlot);
}
}
} | class ExecutionTest extends TestLogger {
@ClassRule
public static final TestingComponentMainThreadExecutor.Resource EXECUTOR_RESOURCE =
new TestingComponentMainThreadExecutor.Resource();
private final TestingComponentMainThreadExecutor testMainThreadUtil =
EXECUTOR_RESOURCE.getComponentMainThreadTestExecutor();
/**
* Tests that slots are released if we cannot assign the allocated resource to the
* Execution.
*/
@Test
public void testSlotReleaseOnFailedResourceAssignment() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final CompletableFuture<LogicalSlot> slotFuture = new CompletableFuture<>();
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, slotFuture);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final LogicalSlot otherSlot = new TestingLogicalSlot();
CompletableFuture<Execution> allocationFuture = execution.allocateResourcesForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertFalse(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertTrue(execution.tryAssignResource(otherSlot));
slotFuture.complete(slot);
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that the slot is released in case of a execution cancellation when having
* a slot assigned and being in state SCHEDULED.
*/
@Test
public void testSlotReleaseOnExecutionCancellationInScheduled() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
CompletableFuture<Execution> allocationFuture = execution.allocateResourcesForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertTrue(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertEquals(slot, execution.getAssignedResource());
execution.cancel();
assertEquals(ExecutionState.CANCELED, execution.getState());
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that the slot is released in case of a execution cancellation when being in state
* RUNNING.
*/
@Test
public void testSlotReleaseOnExecutionCancellationInRunning() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway());
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution execution = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
CompletableFuture<Execution> allocationFuture = execution.allocateResourcesForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertTrue(allocationFuture.isDone());
assertEquals(ExecutionState.SCHEDULED, execution.getState());
assertEquals(slot, execution.getAssignedResource());
execution.deploy();
execution.switchToRunning();
execution.cancel();
assertEquals(ExecutionState.CANCELING, execution.getState());
execution.completeCancelling();
assertEquals(slot, slotOwner.getReturnedSlotFuture().get());
}
/**
* Tests that a slot allocation from a {@link SlotProvider} is cancelled if the
* {@link Execution} is cancelled.
*/
@Test
public void testSlotAllocationCancellationWhenExecutionCancelled() throws Exception {
final JobVertexID jobVertexId = new JobVertexID();
final JobVertex jobVertex = new JobVertex("test vertex", jobVertexId);
jobVertex.setInvokableClass(NoOpInvokable.class);
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(1);
final CompletableFuture<LogicalSlot> slotFuture = new CompletableFuture<>();
slotProvider.addSlot(jobVertexId, 0, slotFuture);
final ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
final ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
final Execution currentExecutionAttempt = executionJobVertex.getTaskVertices()[0].getCurrentExecutionAttempt();
final CompletableFuture<Execution> allocationFuture = currentExecutionAttempt.allocateResourcesForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ALL,
Collections.emptySet(),
TestingUtils.infiniteTime());
assertThat(allocationFuture.isDone(), is(false));
assertThat(slotProvider.getSlotRequestedFuture(jobVertexId, 0).get(), is(true));
final Set<SlotRequestId> slotRequests = slotProvider.getSlotRequests();
assertThat(slotRequests, hasSize(1));
assertThat(currentExecutionAttempt.getState(), is(ExecutionState.SCHEDULED));
currentExecutionAttempt.cancel();
assertThat(currentExecutionAttempt.getState(), is(ExecutionState.CANCELED));
assertThat(allocationFuture.isCompletedExceptionally(), is(true));
final Set<SlotRequestId> canceledSlotRequests = slotProvider.getCanceledSlotRequests();
assertThat(canceledSlotRequests, equalTo(slotRequests));
}
/**
* Tests that the partitions are released in case of an execution cancellation after the execution is already finished.
*/
@Test
public void testPartitionReleaseOnCancelingAfterBeingFinished() throws Exception {
testPartitionReleaseAfterFinished(Execution::cancel);
}
/**
* Tests that the partitions are released in case of an execution suspension after the execution is already finished.
*/
@Test
public void testPartitionReleaseOnSuspendingAfterBeingFinished() throws Exception {
testPartitionReleaseAfterFinished(Execution::suspend);
}
/**
* Tests that all preferred locations are calculated.
*/
@Test
public void testAllPreferredLocationCalculation() throws ExecutionException, InterruptedException {
final TaskManagerLocation taskManagerLocation1 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation2 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation3 = new LocalTaskManagerLocation();
final CompletableFuture<TaskManagerLocation> locationFuture1 = CompletableFuture.completedFuture(taskManagerLocation1);
final CompletableFuture<TaskManagerLocation> locationFuture2 = new CompletableFuture<>();
final CompletableFuture<TaskManagerLocation> locationFuture3 = new CompletableFuture<>();
final Execution execution = SchedulerTestUtils.getTestVertex(Arrays.asList(locationFuture1, locationFuture2, locationFuture3));
CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture = execution.calculatePreferredLocations(LocationPreferenceConstraint.ALL);
assertFalse(preferredLocationsFuture.isDone());
locationFuture3.complete(taskManagerLocation3);
assertFalse(preferredLocationsFuture.isDone());
locationFuture2.complete(taskManagerLocation2);
assertTrue(preferredLocationsFuture.isDone());
final Collection<TaskManagerLocation> preferredLocations = preferredLocationsFuture.get();
assertThat(preferredLocations, containsInAnyOrder(taskManagerLocation1, taskManagerLocation2, taskManagerLocation3));
}
/**
* Tests that any preferred locations are calculated.
*/
@Test
public void testAnyPreferredLocationCalculation() throws ExecutionException, InterruptedException {
final TaskManagerLocation taskManagerLocation1 = new LocalTaskManagerLocation();
final TaskManagerLocation taskManagerLocation3 = new LocalTaskManagerLocation();
final CompletableFuture<TaskManagerLocation> locationFuture1 = CompletableFuture.completedFuture(taskManagerLocation1);
final CompletableFuture<TaskManagerLocation> locationFuture2 = new CompletableFuture<>();
final CompletableFuture<TaskManagerLocation> locationFuture3 = CompletableFuture.completedFuture(taskManagerLocation3);
final Execution execution = SchedulerTestUtils.getTestVertex(Arrays.asList(locationFuture1, locationFuture2, locationFuture3));
CompletableFuture<Collection<TaskManagerLocation>> preferredLocationsFuture = execution.calculatePreferredLocations(LocationPreferenceConstraint.ANY);
assertTrue(preferredLocationsFuture.isDone());
final Collection<TaskManagerLocation> preferredLocations = preferredLocationsFuture.get();
assertThat(preferredLocations, containsInAnyOrder(taskManagerLocation1, taskManagerLocation3));
}
/**
* Checks that the {@link Execution} termination future is only completed after the
* assigned slot has been released.
*
* <p>NOTE: This test only fails spuriously without the fix of this commit. Thus, one has
* to execute this test multiple times to see the failure.
*/
@Test
public void testTerminationFutureIsCompletedAfterSlotRelease() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final ProgrammedSlotProvider slotProvider = createProgrammedSlotProvider(
1,
Collections.singleton(jobVertexId),
slotOwner);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(TestingComponentMainThreadExecutorServiceAdapter.forMainThread());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
executionVertex.scheduleForExecution(slotProvider, false, LocationPreferenceConstraint.ANY, Collections.emptySet()).get();
Execution currentExecutionAttempt = executionVertex.getCurrentExecutionAttempt();
CompletableFuture<LogicalSlot> returnedSlotFuture = slotOwner.getReturnedSlotFuture();
CompletableFuture<?> terminationFuture = executionVertex.cancel();
currentExecutionAttempt.completeCancelling();
CompletableFuture<Boolean> restartFuture = terminationFuture.thenApply(
ignored -> {
assertTrue(returnedSlotFuture.isDone());
return true;
});
restartFuture.get();
}
/**
* Tests that the task restore state is nulled after the {@link Execution} has been
* deployed. See FLINK-9693.
*/
@Test
public void testTaskRestoreStateIsNulledAfterDeployment() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final ProgrammedSlotProvider slotProvider = createProgrammedSlotProvider(
1,
Collections.singleton(jobVertexId),
slotOwner);
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
final JobManagerTaskRestore taskRestoreState = new JobManagerTaskRestore(1L, new TaskStateSnapshot());
execution.setInitialState(taskRestoreState);
assertThat(execution.getTaskRestore(), is(notNullValue()));
executionVertex.scheduleForExecution(slotProvider, false, LocationPreferenceConstraint.ANY, Collections.emptySet()).get();
assertThat(execution.getTaskRestore(), is(nullValue()));
}
@Test
public void testEagerSchedulingFailureReturnsSlot() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final JobVertexID jobVertexId = jobVertex.getID();
final SimpleAckingTaskManagerGateway taskManagerGateway = new SimpleAckingTaskManagerGateway();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final CompletableFuture<SlotRequestId> slotRequestIdFuture = new CompletableFuture<>();
final CompletableFuture<SlotRequestId> returnedSlotFuture = new CompletableFuture<>();
final TestingSlotProvider slotProvider = new TestingSlotProvider(
(SlotRequestId slotRequestId) -> {
slotRequestIdFuture.complete(slotRequestId);
return new CompletableFuture<>();
});
slotProvider.setSlotCanceller(returnedSlotFuture::complete);
slotOwner.getReturnedSlotFuture().thenAccept(
(LogicalSlot logicalSlot) -> returnedSlotFuture.complete(logicalSlot.getSlotRequestId()));
ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
executionGraph.start(testMainThreadUtil.getMainThreadExecutor());
ExecutionJobVertex executionJobVertex = executionGraph.getJobVertex(jobVertexId);
ExecutionVertex executionVertex = executionJobVertex.getTaskVertices()[0];
final Execution execution = executionVertex.getCurrentExecutionAttempt();
taskManagerGateway.setCancelConsumer(
executionAttemptID -> {
if (execution.getAttemptId().equals(executionAttemptID)) {
execution.completeCancelling();
}
}
);
slotRequestIdFuture.thenAcceptAsync(
(SlotRequestId slotRequestId) -> {
final SingleLogicalSlot singleLogicalSlot = ExecutionGraphSchedulingTest.createSingleLogicalSlot(
slotOwner,
taskManagerGateway,
slotRequestId);
slotProvider.complete(slotRequestId, singleLogicalSlot);
},
testMainThreadUtil.getMainThreadExecutor());
final CompletableFuture<Void> schedulingFuture = testMainThreadUtil.execute(
() -> execution.scheduleForExecution(
slotProvider,
false,
LocationPreferenceConstraint.ANY,
Collections.emptySet()));
try {
schedulingFuture.get();
testMainThreadUtil.execute(execution::cancel);
} catch (ExecutionException ignored) {
}
assertThat(returnedSlotFuture.get(), is(equalTo(slotRequestIdFuture.get())));
}
/**
* Tests that a slot release will atomically release the assigned {@link Execution}.
*/
@Test
public void testSlotReleaseAtomicallyReleasesExecution() throws Exception {
final JobVertex jobVertex = createNoOpJobVertex();
final SingleSlotTestingSlotOwner slotOwner = new SingleSlotTestingSlotOwner();
final SingleLogicalSlot slot = ExecutionGraphSchedulingTest.createSingleLogicalSlot(
slotOwner,
new SimpleAckingTaskManagerGateway(),
new SlotRequestId());
final CompletableFuture<LogicalSlot> slotFuture = CompletableFuture.completedFuture(slot);
final CountDownLatch slotRequestLatch = new CountDownLatch(1);
final TestingSlotProvider slotProvider = new TestingSlotProvider(slotRequestId -> {
slotRequestLatch.countDown();
return slotFuture;
});
final ExecutionGraph executionGraph = ExecutionGraphTestUtils.createSimpleTestGraph(
new JobID(),
slotProvider,
new NoRestartStrategy(),
jobVertex);
final Execution execution = executionGraph.getJobVertex(jobVertex.getID()).getTaskVertices()[0].getCurrentExecutionAttempt();
executionGraph.start(testMainThreadUtil.getMainThreadExecutor());
testMainThreadUtil.execute(executionGraph::scheduleForExecution);
slotRequestLatch.await();
testMainThreadUtil.execute(() -> {
assertThat(execution.getAssignedResource(), is(sameInstance(slot)));
slot.release(new FlinkException("Test exception"));
assertThat(execution.getReleaseFuture().isDone(), is(true));
});
}
@Nonnull
private JobVertex createNoOpJobVertex() {
final JobVertex jobVertex = new JobVertex("Test vertex", new JobVertexID());
jobVertex.setInvokableClass(NoOpInvokable.class);
return jobVertex;
}
@Nonnull
private ProgrammedSlotProvider createProgrammedSlotProvider(
int parallelism,
Collection<JobVertexID> jobVertexIds,
SlotOwner slotOwner) {
final ProgrammedSlotProvider slotProvider = new ProgrammedSlotProvider(parallelism);
for (JobVertexID jobVertexId : jobVertexIds) {
for (int i = 0; i < parallelism; i++) {
final SimpleSlot slot = new SimpleSlot(
slotOwner,
new LocalTaskManagerLocation(),
0,
new SimpleAckingTaskManagerGateway(),
null,
null);
slotProvider.addSlot(jobVertexId, 0, CompletableFuture.completedFuture(slot));
}
}
return slotProvider;
}
/**
* Slot owner which records the first returned slot.
*/
private static final class SingleSlotTestingSlotOwner implements SlotOwner {
final CompletableFuture<LogicalSlot> returnedSlot = new CompletableFuture<>();
public CompletableFuture<LogicalSlot> getReturnedSlotFuture() {
return returnedSlot;
}
@Override
public void returnLogicalSlot(LogicalSlot logicalSlot) {
returnedSlot.complete(logicalSlot);
}
}
} |
Shall we extract `matchExpr.type` and `errorMatchPattern.type` out to variables? | public BType resolvePatternTypeFromMatchExpr(BLangErrorMatchPattern errorMatchPattern, BLangExpression matchExpr) {
if (matchExpr == null) {
return errorMatchPattern.type;
}
if (isAssignable(matchExpr.type, errorMatchPattern.type)) {
return matchExpr.type;
}
if (isAssignable(errorMatchPattern.type, matchExpr.type)) {
return errorMatchPattern.type;
}
return symTable.noType;
} | if (isAssignable(matchExpr.type, errorMatchPattern.type)) { | public BType resolvePatternTypeFromMatchExpr(BLangErrorMatchPattern errorMatchPattern, BLangExpression matchExpr) {
if (matchExpr == null) {
return errorMatchPattern.type;
}
BType matchExprType = matchExpr.type;
BType patternType = errorMatchPattern.type;
if (isAssignable(matchExprType, patternType)) {
return matchExprType;
}
if (isAssignable(patternType, matchExprType)) {
return patternType;
}
return symTable.noType;
} | class Types {
private static final CompilerContext.Key<Types> TYPES_KEY =
new CompilerContext.Key<>();
private final ResolvedTypeBuilder typeBuilder;
private SymbolTable symTable;
private SymbolResolver symResolver;
private BLangDiagnosticLog dlog;
private Names names;
private int finiteTypeCount = 0;
private BUnionType expandedXMLBuiltinSubtypes;
private final BLangAnonymousModelHelper anonymousModelHelper;
public static Types getInstance(CompilerContext context) {
Types types = context.get(TYPES_KEY);
if (types == null) {
types = new Types(context);
}
return types;
}
public Types(CompilerContext context) {
context.put(TYPES_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.names = Names.getInstance(context);
this.expandedXMLBuiltinSubtypes = BUnionType.create(null,
symTable.xmlElementType, symTable.xmlCommentType,
symTable.xmlPIType, symTable.xmlTextType);
this.typeBuilder = new ResolvedTypeBuilder();
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
}
public List<BType> checkTypes(BLangExpression node,
List<BType> actualTypes,
List<BType> expTypes) {
List<BType> resTypes = new ArrayList<>();
for (int i = 0; i < actualTypes.size(); i++) {
resTypes.add(checkType(node, actualTypes.get(i), expTypes.size() > i ? expTypes.get(i) : symTable.noType));
}
return resTypes;
}
public BType checkType(BLangExpression node,
BType actualType,
BType expType) {
return checkType(node, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkType(BLangExpression expr,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
expr.type = checkType(expr.pos, actualType, expType, diagCode);
if (expr.type.tag == TypeTags.SEMANTIC_ERROR) {
return expr.type;
}
setImplicitCastExpr(expr, actualType, expType);
return expr.type;
}
public BType checkType(Location pos,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
if (expType.tag == TypeTags.SEMANTIC_ERROR) {
return expType;
} else if (expType.tag == TypeTags.NONE) {
return actualType;
} else if (actualType.tag == TypeTags.SEMANTIC_ERROR) {
return actualType;
} else if (isAssignable(actualType, expType)) {
return actualType;
}
dlog.error(pos, diagCode, expType, actualType);
return symTable.semanticError;
}
public boolean isJSONContext(BType type) {
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(memType -> memType.tag == TypeTags.JSON);
}
return type.tag == TypeTags.JSON;
}
public boolean isJSONUnionType(BUnionType type) {
if (type.name != null && (type.name.getValue().equals(Names.JSON.getValue()))) {
return true;
}
return isSameType(type, symTable.jsonType);
}
public boolean isLax(BType type) {
Set<BType> visited = new HashSet<>();
int result = isLaxType(type, visited);
if (result == 1) {
return true;
}
return false;
}
public int isLaxType(BType type, Set<BType> visited) {
if (!visited.add(type)) {
return -1;
}
switch (type.tag) {
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return 1;
case TypeTags.MAP:
return isLaxType(((BMapType) type).constraint, visited);
case TypeTags.UNION:
if (isSameType(type, symTable.jsonType)) {
visited.add(type);
return 1;
}
boolean atleastOneLaxType = false;
for (BType member : ((BUnionType) type).getMemberTypes()) {
int result = isLaxType(member, visited);
if (result == -1) {
continue;
}
if (result == 0) {
return 0;
}
atleastOneLaxType = true;
}
return atleastOneLaxType ? 1 : 0;
}
return 0;
}
public boolean isLaxType(BType type, Map<BType, Boolean> visited) {
if (visited.containsKey(type)) {
return visited.get(type);
}
switch (type.tag) {
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
visited.put(type, true);
return true;
case TypeTags.MAP:
boolean result = isLaxType(((BMapType) type).constraint, visited);
visited.put(type, result);
return result;
case TypeTags.UNION:
if (type == symTable.jsonType || isSameType(type, symTable.jsonType)) {
visited.put(type, true);
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!isLaxType(member, visited)) {
visited.put(type, false);
return false;
}
}
visited.put(type, true);
return true;
}
visited.put(type, false);
return false;
}
public boolean isSameType(BType source, BType target) {
return isSameType(source, target, new HashSet<>());
}
public boolean isPureType(BType type) {
IsPureTypeUniqueVisitor visitor = new IsPureTypeUniqueVisitor();
return visitor.visit(type);
}
public boolean isAnydata(BType type) {
IsAnydataUniqueVisitor visitor = new IsAnydataUniqueVisitor();
return visitor.visit(type);
}
private boolean isSameType(BType source, BType target, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
BTypeVisitor<BType, Boolean> sameTypeVisitor = new BSameTypeVisitor(unresolvedTypes);
return target.accept(sameTypeVisitor, source);
}
public boolean isValueType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.BYTE:
case TypeTags.DECIMAL:
case TypeTags.FLOAT:
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
return true;
default:
return false;
}
}
boolean isBasicNumericType(BType type) {
return type.tag < TypeTags.STRING || TypeTags.isIntegerTypeTag(type.tag);
}
boolean finiteTypeContainsNumericTypeValues(BFiniteType finiteType) {
return finiteType.getValueSpace().stream().anyMatch(valueExpr -> isBasicNumericType(valueExpr.type));
}
public boolean containsErrorType(BType type) {
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.anyMatch(this::containsErrorType);
}
return type.tag == TypeTags.ERROR;
}
public boolean isSubTypeOfList(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.ARRAY) || isSubTypeOfBaseType(type, TypeTags.TUPLE);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfList);
}
BType resolvePatternTypeFromMatchExpr(BLangExpression matchExpr, BTupleType listMatchPatternType,
SymbolEnv env) {
if (matchExpr == null) {
return listMatchPatternType;
}
BType matchExprType = matchExpr.type;
BType intersectionType = getTypeIntersection(matchExprType, listMatchPatternType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
if (matchExprType.tag == TypeTags.ANYDATA) {
Collections.fill(listMatchPatternType.tupleTypes, symTable.anydataType);
if (listMatchPatternType.restType != null) {
listMatchPatternType.restType = symTable.anydataType;
}
return listMatchPatternType;
}
return symTable.noType;
}
public BType resolvePatternTypeFromMatchExpr(BLangConstPattern constPattern, BLangExpression constPatternExpr) {
if (constPattern.matchExpr == null) {
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) constPatternExpr).symbol.type;
} else {
return constPatternExpr.type;
}
}
BType matchExprType = constPattern.matchExpr.type;
BType constMatchPatternExprType = constPatternExpr.type;
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef constVarRef = (BLangSimpleVarRef) constPatternExpr;
if (constVarRef.symbol == null) {
return symTable.noType;
}
BType constVarRefSymbolType = constVarRef.symbol.type;
if (isAssignable(constVarRefSymbolType, matchExprType)) {
return constVarRefSymbolType;
}
return symTable.noType;
}
BLangLiteral constPatternLiteral = (BLangLiteral) constPatternExpr;
if (containsAnyType(constMatchPatternExprType)) {
return matchExprType;
} else if (containsAnyType(matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.BYTE && constMatchPatternExprType.tag == TypeTags.INT) {
return matchExprType;
}
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) matchExprType).getMemberTypes()) {
if (memberType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(memberType, constPatternLiteral)) {
return memberType;
}
} else {
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
}
}
} else if (matchExprType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(matchExprType, constPatternLiteral)) {
return matchExprType;
}
}
return symTable.noType;
}
BType resolvePatternTypeFromMatchExpr(BLangMappingMatchPattern mappingMatchPattern, BType patternType,
SymbolEnv env) {
if (mappingMatchPattern.matchExpr == null) {
return patternType;
}
BType intersectionType = getTypeIntersection(mappingMatchPattern.matchExpr.type, patternType, env);
if (intersectionType == symTable.semanticError) {
return symTable.noType;
}
return intersectionType;
}
private boolean containsAnyType(BType type) {
if (type.tag != TypeTags.UNION) {
return type.tag == TypeTags.ANY;
}
for (BType memberTypes : ((BUnionType) type).getMemberTypes()) {
if (memberTypes.tag == TypeTags.ANY) {
return true;
}
}
return false;
}
private boolean containsAnyDataType(BType type) {
if (type.tag != TypeTags.UNION) {
return type.tag == TypeTags.ANYDATA;
}
for (BType memberTypes : ((BUnionType) type).getMemberTypes()) {
if (memberTypes.tag == TypeTags.ANYDATA) {
return true;
}
}
return false;
}
BType mergeTypes(BType typeFirst, BType typeSecond) {
if (containsAnyType(typeFirst) && !containsErrorType(typeSecond)) {
return typeSecond;
}
if (containsAnyType(typeSecond) && !containsErrorType(typeFirst)) {
return typeFirst;
}
if (containsAnyDataType(typeFirst) && !containsErrorType(typeSecond)) {
return typeSecond;
}
if (containsAnyDataType(typeSecond) && !containsErrorType(typeFirst)) {
return typeFirst;
}
if (isSameBasicType(typeFirst, typeSecond)) {
return typeFirst;
}
return BUnionType.create(null, typeFirst, typeSecond);
}
public boolean isSubTypeOfMapping(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.MAP) || isSubTypeOfBaseType(type, TypeTags.RECORD);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfMapping);
}
public boolean isSubTypeOfBaseType(BType type, int baseTypeTag) {
if (type.tag != TypeTags.UNION) {
return type.tag == baseTypeTag;
}
if (TypeTags.isXMLTypeTag(baseTypeTag)) {
return true;
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(memType -> memType.tag == baseTypeTag);
}
/**
* Checks whether source type is assignable to the target type.
* <p>
* Source type is assignable to the target type if,
* 1) the target type is any and the source type is not a value type.
* 2) there exists an implicit cast symbol from source to target.
* 3) both types are JSON and the target constraint is no type.
* 4) both types are array type and both array types are assignable.
* 5) both types are MAP and the target constraint is any type or constraints are structurally equivalent.
*
* @param source type.
* @param target type.
* @return true if source type is assignable to the target type.
*/
public boolean isAssignable(BType source, BType target) {
return isAssignable(source, target, new HashSet<>());
}
boolean isStampingAllowed(BType source, BType target) {
return (isAssignable(source, target) || isAssignable(target, source) ||
checkTypeEquivalencyForStamping(source, target) || checkTypeEquivalencyForStamping(target, source));
}
private boolean checkTypeEquivalencyForStamping(BType source, BType target) {
if (target.tag == TypeTags.RECORD) {
if (source.tag == TypeTags.RECORD) {
TypePair pair = new TypePair(source, target);
Set<TypePair> unresolvedTypes = new HashSet<>();
unresolvedTypes.add(pair);
return checkRecordEquivalencyForStamping((BRecordType) source, (BRecordType) target, unresolvedTypes);
} else if (source.tag == TypeTags.MAP) {
int mapConstraintTypeTag = ((BMapType) source).constraint.tag;
if ((!(mapConstraintTypeTag == TypeTags.ANY || mapConstraintTypeTag == TypeTags.ANYDATA)) &&
((BRecordType) target).sealed) {
for (BField field : ((BStructureType) target).getFields().values()) {
if (field.getType().tag != mapConstraintTypeTag) {
return false;
}
}
}
return true;
}
} else if (target.tag == TypeTags.JSON) {
return source.tag == TypeTags.JSON || source.tag == TypeTags.RECORD || source.tag == TypeTags.MAP;
} else if (target.tag == TypeTags.MAP) {
if (source.tag == TypeTags.MAP) {
return isStampingAllowed(((BMapType) source).getConstraint(), ((BMapType) target).getConstraint());
} else if (source.tag == TypeTags.UNION) {
return checkUnionEquivalencyForStamping(source, target);
}
} else if (target.tag == TypeTags.ARRAY) {
if (source.tag == TypeTags.JSON) {
return true;
} else if (source.tag == TypeTags.TUPLE) {
BType arrayElementType = ((BArrayType) target).eType;
for (BType tupleMemberType : ((BTupleType) source).getTupleTypes()) {
if (!isStampingAllowed(tupleMemberType, arrayElementType)) {
return false;
}
}
return true;
} else if (source.tag == TypeTags.ARRAY) {
return checkTypeEquivalencyForStamping(((BArrayType) source).eType, ((BArrayType) target).eType);
}
} else if (target.tag == TypeTags.UNION) {
return checkUnionEquivalencyForStamping(source, target);
} else if (target.tag == TypeTags.TUPLE && source.tag == TypeTags.TUPLE) {
return checkTupleEquivalencyForStamping(source, target);
}
return false;
}
private boolean checkRecordEquivalencyForStamping(BRecordType rhsType, BRecordType lhsType,
Set<TypePair> unresolvedTypes) {
if (Symbols.isFlagOn(lhsType.tsymbol.flags ^ rhsType.tsymbol.flags, Flags.PUBLIC)) {
return false;
}
if (Symbols.isPrivate(lhsType.tsymbol) && rhsType.tsymbol.pkgID != lhsType.tsymbol.pkgID) {
return false;
}
if (lhsType.fields.size() > rhsType.fields.size()) {
return false;
}
if (lhsType.sealed && !rhsType.sealed) {
return false;
}
return checkFieldEquivalencyForStamping(lhsType, rhsType, unresolvedTypes);
}
private boolean checkFieldEquivalencyForStamping(BStructureType lhsType, BStructureType rhsType,
Set<TypePair> unresolvedTypes) {
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsType.fields.get(lhsField.name.value);
if (rhsField == null || !isStampingAllowed(rhsField.type, lhsField.type)) {
return false;
}
}
for (BField rhsField : rhsType.fields.values()) {
BField lhsField = lhsType.fields.get(rhsField.name.value);
if (lhsField == null && !isStampingAllowed(rhsField.type, ((BRecordType) lhsType).restFieldType)) {
return false;
}
}
return true;
}
private boolean checkUnionEquivalencyForStamping(BType source, BType target) {
Set<BType> sourceTypes = new LinkedHashSet<>();
Set<BType> targetTypes = new LinkedHashSet<>();
if (source.tag == TypeTags.UNION) {
BUnionType sourceUnionType = (BUnionType) source;
sourceTypes.addAll(sourceUnionType.getMemberTypes());
} else {
sourceTypes.add(source);
}
if (target.tag == TypeTags.UNION) {
BUnionType targetUnionType = (BUnionType) target;
targetTypes.addAll(targetUnionType.getMemberTypes());
} else {
targetTypes.add(target);
}
boolean notAssignable = sourceTypes
.stream()
.map(s -> targetTypes
.stream()
.anyMatch(t -> isStampingAllowed(s, t)))
.anyMatch(assignable -> !assignable);
return !notAssignable;
}
private boolean checkTupleEquivalencyForStamping(BType source, BType target) {
if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {
return false;
}
BTupleType lhsTupleType = (BTupleType) target;
BTupleType rhsTupleType = (BTupleType) source;
if (lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {
return false;
}
for (int i = 0; i < lhsTupleType.tupleTypes.size(); i++) {
if (!isStampingAllowed(rhsTupleType.tupleTypes.get(i), lhsTupleType.tupleTypes.get(i))) {
return false;
}
}
return true;
}
private boolean isAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (isSameType(source, target)) {
return true;
}
int sourceTag = source.tag;
int targetTag = target.tag;
if (!Symbols.isFlagOn(source.flags, Flags.PARAMETERIZED) &&
!isInherentlyImmutableType(target) && Symbols.isFlagOn(target.flags, Flags.READONLY) &&
!isInherentlyImmutableType(source) && !Symbols.isFlagOn(source.flags, Flags.READONLY)) {
return false;
}
if (sourceTag == TypeTags.INTERSECTION) {
return isAssignable(((BIntersectionType) source).effectiveType,
targetTag != TypeTags.INTERSECTION ? target :
((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (targetTag == TypeTags.INTERSECTION) {
return isAssignable(source, ((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (sourceTag == TypeTags.PARAMETERIZED_TYPE) {
return isParameterizedTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.BYTE && targetTag == TypeTags.INT) {
return true;
}
if (TypeTags.isXMLTypeTag(sourceTag) && (TypeTags.isXMLTypeTag(targetTag) || targetTag == TypeTags.STRING)) {
return isXMLTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) {
return true;
}
if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.XML_TEXT) {
return true;
}
if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.CHAR_STRING) {
return true;
}
if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ERROR) {
return isErrorTypeAssignable((BErrorType) source, (BErrorType) target, unresolvedTypes);
} else if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ANY) {
return false;
}
if (sourceTag == TypeTags.NIL && (isNullable(target) || targetTag == TypeTags.JSON)) {
return true;
}
if (targetTag == TypeTags.ANY && !containsErrorType(source) && !isValueType(source)) {
return true;
}
if (targetTag == TypeTags.ANYDATA && !containsErrorType(source)) {
if (isAnydata(source)) {
return true;
}
}
if (targetTag == TypeTags.READONLY &&
(isInherentlyImmutableType(source) || Symbols.isFlagOn(source.flags, Flags.READONLY))) {
return true;
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) {
BRecordType recordType = (BRecordType) source;
return isAssignableRecordType(recordType, target, unresolvedTypes);
}
if (targetTag == TypeTags.RECORD && sourceTag == TypeTags.MAP) {
return isAssignableMapType((BMapType) source, (BRecordType) target);
}
if (targetTag == TypeTags.TYPEDESC && sourceTag == TypeTags.TYPEDESC) {
return isAssignable(((BTypedescType) source).constraint, (((BTypedescType) target).constraint),
unresolvedTypes);
}
if (targetTag == TypeTags.TABLE && sourceTag == TypeTags.TABLE) {
return isAssignableTableType((BTableType) source, (BTableType) target, unresolvedTypes);
}
if (targetTag == TypeTags.STREAM && sourceTag == TypeTags.STREAM) {
return isAssignable(((BStreamType) source).constraint, ((BStreamType) target).constraint, unresolvedTypes);
}
if (isBuiltInTypeWidenPossible(source, target) == TypeTestResult.TRUE) {
return true;
}
if (sourceTag == TypeTags.FINITE) {
return isFiniteTypeAssignable((BFiniteType) source, target, unresolvedTypes);
}
if ((targetTag == TypeTags.UNION || sourceTag == TypeTags.UNION) &&
isAssignableToUnionType(source, target, unresolvedTypes)) {
return true;
}
if (targetTag == TypeTags.JSON) {
if (sourceTag == TypeTags.JSON) {
return true;
}
if (sourceTag == TypeTags.ARRAY) {
return isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.MAP) {
return isAssignable(((BMapType) source).constraint, target, unresolvedTypes);
}
if (sourceTag == TypeTags.RECORD) {
return isAssignableRecordType((BRecordType) source, target, unresolvedTypes);
}
}
if (targetTag == TypeTags.FUTURE && sourceTag == TypeTags.FUTURE) {
if (((BFutureType) target).constraint.tag == TypeTags.NONE) {
return true;
}
return isAssignable(((BFutureType) source).constraint, ((BFutureType) target).constraint, unresolvedTypes);
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.MAP) {
if (((BMapType) target).constraint.tag == TypeTags.ANY &&
((BMapType) source).constraint.tag != TypeTags.UNION) {
return true;
}
return isAssignable(((BMapType) source).constraint, ((BMapType) target).constraint, unresolvedTypes);
}
if ((sourceTag == TypeTags.OBJECT || sourceTag == TypeTags.RECORD)
&& (targetTag == TypeTags.OBJECT || targetTag == TypeTags.RECORD)) {
return checkStructEquivalency(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE && targetTag == TypeTags.ARRAY) {
return isTupleTypeAssignableToArrayType((BTupleType) source, (BArrayType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.ARRAY && targetTag == TypeTags.TUPLE) {
return isArrayTypeAssignableToTupleType((BArrayType) source, (BTupleType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE || targetTag == TypeTags.TUPLE) {
return isTupleTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.INVOKABLE && targetTag == TypeTags.INVOKABLE) {
return isFunctionTypeAssignable((BInvokableType) source, (BInvokableType) target, new HashSet<>());
}
return sourceTag == TypeTags.ARRAY && targetTag == TypeTags.ARRAY &&
isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
private boolean isParameterizedTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
BType resolvedSourceType = typeBuilder.build(source);
if (target.tag != TypeTags.PARAMETERIZED_TYPE) {
return isAssignable(resolvedSourceType, target, unresolvedTypes);
}
if (((BParameterizedType) source).paramIndex != ((BParameterizedType) target).paramIndex) {
return false;
}
return isAssignable(resolvedSourceType, typeBuilder.build(target), unresolvedTypes);
}
private boolean isAssignableRecordType(BRecordType recordType, BType type, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(recordType, type);
if (!unresolvedTypes.add(pair)) {
return true;
}
BType targetType;
switch (type.tag) {
case TypeTags.MAP:
targetType = ((BMapType) type).constraint;
break;
case TypeTags.JSON:
targetType = type;
break;
default:
throw new IllegalArgumentException("Incompatible target type: " + type.toString());
}
return recordFieldsAssignableToType(recordType, targetType, unresolvedTypes);
}
private boolean recordFieldsAssignableToType(BRecordType recordType, BType targetType,
Set<TypePair> unresolvedTypes) {
for (BField field : recordType.fields.values()) {
if (!isAssignable(field.type, targetType, unresolvedTypes)) {
return false;
}
}
if (!recordType.sealed) {
return isAssignable(recordType.restFieldType, targetType, unresolvedTypes);
}
return true;
}
private boolean isAssignableTableType(BTableType sourceTableType, BTableType targetTableType,
Set<TypePair> unresolvedTypes) {
if (!isAssignable(sourceTableType.constraint, targetTableType.constraint, unresolvedTypes)) {
return false;
}
if (targetTableType.keyTypeConstraint == null && targetTableType.fieldNameList == null) {
return true;
}
if (targetTableType.keyTypeConstraint != null) {
if (sourceTableType.keyTypeConstraint != null &&
(isAssignable(sourceTableType.keyTypeConstraint, targetTableType.keyTypeConstraint,
unresolvedTypes))) {
return true;
}
if (sourceTableType.fieldNameList == null) {
return false;
}
List<BType> fieldTypes = new ArrayList<>();
sourceTableType.fieldNameList.forEach(field -> fieldTypes
.add(getTableConstraintField(sourceTableType.constraint, field).type));
if (fieldTypes.size() == 1) {
return isAssignable(fieldTypes.get(0), targetTableType.keyTypeConstraint, unresolvedTypes);
}
BTupleType tupleType = new BTupleType(fieldTypes);
return isAssignable(tupleType, targetTableType.keyTypeConstraint, unresolvedTypes);
}
return targetTableType.fieldNameList.equals(sourceTableType.fieldNameList);
}
BField getTableConstraintField(BType constraintType, String fieldName) {
switch (constraintType.tag) {
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) constraintType).getFields();
return fieldList.get(fieldName);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) constraintType;
Set<BType> memTypes = unionType.getMemberTypes();
List<BField> fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName))
.filter(Objects::nonNull).collect(Collectors.toList());
if (fields.size() != memTypes.size()) {
return null;
}
if (fields.stream().allMatch(field -> isAssignable(field.type, fields.get(0).type) &&
isAssignable(fields.get(0).type, field.type))) {
return fields.get(0);
}
break;
case TypeTags.INTERSECTION:
return getTableConstraintField(((BIntersectionType) constraintType).effectiveType, fieldName);
}
return null;
}
private boolean isAssignableMapType(BMapType sourceMapType, BRecordType targetRecType) {
if (targetRecType.sealed) {
return false;
}
for (BField field : targetRecType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
return false;
}
if (hasIncompatibleReadOnlyFlags(field.symbol.flags, sourceMapType.flags)) {
return false;
}
if (!isAssignable(sourceMapType.constraint, field.type)) {
return false;
}
}
return isAssignable(sourceMapType.constraint, targetRecType.restFieldType);
}
private boolean hasIncompatibleReadOnlyFlags(long targetFlags, long sourceFlags) {
return Symbols.isFlagOn(targetFlags, Flags.READONLY) && !Symbols.isFlagOn(sourceFlags, Flags.READONLY);
}
private boolean isErrorTypeAssignable(BErrorType source, BErrorType target, Set<TypePair> unresolvedTypes) {
if (target == symTable.errorType) {
return true;
}
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
return isAssignable(source.detailType, target.detailType, unresolvedTypes)
&& target.typeIdSet.isAssignableFrom(source.typeIdSet);
}
private boolean isXMLTypeAssignable(BType sourceType, BType targetType, Set<TypePair> unresolvedTypes) {
int sourceTag = sourceType.tag;
int targetTag = targetType.tag;
if (targetTag == TypeTags.XML) {
BXMLType target = (BXMLType) targetType;
if (target.constraint != null) {
if (TypeTags.isXMLNonSequenceType(sourceTag)) {
return isAssignable(sourceType, target.constraint, unresolvedTypes);
}
BXMLType source = (BXMLType) sourceType;
if (source.constraint.tag == TypeTags.NEVER) {
if (sourceTag == targetTag) {
return true;
}
return isAssignable(source, target.constraint, unresolvedTypes);
}
return isAssignable(source.constraint, target.constraint, unresolvedTypes);
}
return true;
}
if (sourceTag == TypeTags.XML) {
BXMLType source = (BXMLType) sourceType;
if (targetTag == TypeTags.XML_TEXT) {
if (source.constraint != null) {
return source.constraint.tag == TypeTags.NEVER;
}
return false;
}
if (targetTag == TypeTags.STRING) {
if (source.constraint.tag == TypeTags.NEVER) {
return true;
}
return isAssignable(source.constraint, targetType, unresolvedTypes);
}
} else if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.STRING) {
return true;
}
return sourceTag == targetTag;
}
public boolean isXMLExprCastableToString(BType source, BType target) {
if (target.tag == TypeTags.STRING && isXMLSourceCastableToString(source)) {
return true;
}
if (target.tag == TypeTags.UNION || target.tag == TypeTags.FINITE) {
return isAssignable(target, symTable.stringType) && isXMLSourceCastableToString(source);
}
return false;
}
public boolean isXMLSourceCastableToString(BType source) {
int exprTag = source.tag;
if (exprTag == TypeTags.XML_TEXT) {
return true;
}
if (exprTag == TypeTags.XML) {
BXMLType conversionExpressionType = (BXMLType) source;
while (conversionExpressionType.constraint.tag == TypeTags.XML) {
conversionExpressionType = (BXMLType) conversionExpressionType.constraint;
}
return conversionExpressionType.constraint.tag == TypeTags.NEVER ||
conversionExpressionType.constraint.tag == TypeTags.XML_TEXT;
}
if (exprTag == TypeTags.UNION) {
for (BType member : ((BUnionType) source).getMemberTypes()) {
if (!TypeTags.isXMLTypeTag(member.tag) && !(member.tag == TypeTags.STRING)) {
return false;
}
}
return isAssignable(source, symTable.stringType);
}
return false;
}
private boolean isTupleTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {
return false;
}
BTupleType lhsTupleType = (BTupleType) target;
BTupleType rhsTupleType = (BTupleType) source;
if (lhsTupleType.restType == null && rhsTupleType.restType != null) {
return false;
}
if (lhsTupleType.restType == null && lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {
return false;
}
if (lhsTupleType.restType != null && rhsTupleType.restType != null) {
if (!isAssignable(rhsTupleType.restType, lhsTupleType.restType, unresolvedTypes)) {
return false;
}
}
if (lhsTupleType.tupleTypes.size() > rhsTupleType.tupleTypes.size()) {
return false;
}
for (int i = 0; i < rhsTupleType.tupleTypes.size(); i++) {
BType lhsType = (lhsTupleType.tupleTypes.size() > i)
? lhsTupleType.tupleTypes.get(i) : lhsTupleType.restType;
if (!isAssignable(rhsTupleType.tupleTypes.get(i), lhsType, unresolvedTypes)) {
return false;
}
}
return true;
}
private boolean isTupleTypeAssignableToArrayType(BTupleType source, BArrayType target,
Set<TypePair> unresolvedTypes) {
if (target.state != BArrayState.OPEN
&& (source.restType != null || source.tupleTypes.size() != target.size)) {
return false;
}
List<BType> sourceTypes = new ArrayList<>(source.tupleTypes);
if (source.restType != null) {
sourceTypes.add(source.restType);
}
return sourceTypes.stream()
.allMatch(tupleElemType -> isAssignable(tupleElemType, target.eType, unresolvedTypes));
}
private boolean isArrayTypeAssignableToTupleType(BArrayType source, BTupleType target,
Set<TypePair> unresolvedTypes) {
if (!target.tupleTypes.isEmpty()) {
if (source.state == BArrayState.OPEN) {
return false;
}
if (target.restType != null && target.tupleTypes.size() > source.size) {
return false;
}
if (target.restType == null && target.tupleTypes.size() != source.size) {
return false;
}
}
List<BType> targetTypes = new ArrayList<>(target.tupleTypes);
if (target.restType != null) {
targetTypes.add(target.restType);
}
return targetTypes.stream()
.allMatch(tupleElemType -> isAssignable(source.eType, tupleElemType, unresolvedTypes));
}
private boolean isArrayTypesAssignable(BArrayType source, BType target, Set<TypePair> unresolvedTypes) {
BType sourceElementType = source.getElementType();
if (target.tag == TypeTags.ARRAY) {
BArrayType targetArrayType = (BArrayType) target;
BType targetElementType = targetArrayType.getElementType();
if (targetArrayType.state == BArrayState.OPEN) {
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
}
if (targetArrayType.size != source.size) {
return false;
}
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
} else if (target.tag == TypeTags.JSON) {
return isAssignable(sourceElementType, target, unresolvedTypes);
} else if (target.tag == TypeTags.ANYDATA) {
return isAssignable(sourceElementType, target, unresolvedTypes);
}
return false;
}
private boolean isFunctionTypeAssignable(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (containsTypeParams(target)) {
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
BType sourceParam = source.paramTypes.get(i);
BType targetParam = target.paramTypes.get(i);
boolean isTypeParam = TypeParamAnalyzer.isTypeParam(targetParam);
if (isTypeParam) {
if (!isAssignable(sourceParam, targetParam)) {
return false;
}
} else {
if (!isAssignable(targetParam, sourceParam)) {
return false;
}
}
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
return checkFunctionTypeEquality(source, target, unresolvedTypes, (s, t, ut) -> isAssignable(t, s, ut));
}
public boolean isInherentlyImmutableType(BType type) {
if (isValueType(type)) {
return true;
}
switch (type.tag) {
case TypeTags.XML_TEXT:
case TypeTags.FINITE:
case TypeTags.READONLY:
case TypeTags.NIL:
case TypeTags.ERROR:
case TypeTags.INVOKABLE:
case TypeTags.TYPEDESC:
case TypeTags.HANDLE:
return true;
case TypeTags.XML:
return ((BXMLType) type).constraint.tag == TypeTags.NEVER;
}
return false;
}
boolean isSelectivelyImmutableType(BType type) {
return isSelectivelyImmutableType(type, new HashSet<>(), false);
}
boolean isSelectivelyImmutableType(BType type, boolean forceCheck) {
return isSelectivelyImmutableType(type, new HashSet<>(), forceCheck);
}
public boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes) {
return isSelectivelyImmutableType(type, unresolvedTypes, false);
}
private boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes, boolean forceCheck) {
return isSelectivelyImmutableType(type, false, unresolvedTypes, forceCheck);
}
private boolean isSelectivelyImmutableType(BType type, boolean disallowReadOnlyObjects, Set<BType> unresolvedTypes,
boolean forceCheck) {
if (isInherentlyImmutableType(type) || !(type instanceof SelectivelyImmutableReferenceType)) {
return false;
}
if (!unresolvedTypes.add(type)) {
return true;
}
if (!forceCheck && ((SelectivelyImmutableReferenceType) type).getImmutableType() != null) {
return true;
}
switch (type.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_COMMENT:
case TypeTags.XML_ELEMENT:
case TypeTags.XML_PI:
return true;
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isInherentlyImmutableType(elementType) ||
isSelectivelyImmutableType(elementType, unresolvedTypes, forceCheck);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
for (BType tupMemType : tupleType.tupleTypes) {
if (!isInherentlyImmutableType(tupMemType) &&
!isSelectivelyImmutableType(tupMemType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType tupRestType = tupleType.restType;
if (tupRestType == null) {
return true;
}
return isInherentlyImmutableType(tupRestType) ||
isSelectivelyImmutableType(tupRestType, unresolvedTypes, forceCheck);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType recordRestType = recordType.restFieldType;
if (recordRestType == null || recordRestType == symTable.noType) {
return true;
}
return isInherentlyImmutableType(recordRestType) ||
isSelectivelyImmutableType(recordRestType, unresolvedTypes, forceCheck);
case TypeTags.MAP:
BType constraintType = ((BMapType) type).constraint;
return isInherentlyImmutableType(constraintType) ||
isSelectivelyImmutableType(constraintType, unresolvedTypes, forceCheck);
case TypeTags.OBJECT:
BObjectType objectType = (BObjectType) type;
for (BField field : objectType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
return true;
case TypeTags.TABLE:
BType tableConstraintType = ((BTableType) type).constraint;
return isInherentlyImmutableType(tableConstraintType) ||
isSelectivelyImmutableType(tableConstraintType, unresolvedTypes, forceCheck);
case TypeTags.UNION:
boolean readonlyIntersectionExists = false;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (isInherentlyImmutableType(memberType) ||
isSelectivelyImmutableType(memberType, unresolvedTypes, forceCheck)) {
readonlyIntersectionExists = true;
}
}
return readonlyIntersectionExists;
case TypeTags.INTERSECTION:
return isSelectivelyImmutableType(((BIntersectionType) type).effectiveType, unresolvedTypes,
forceCheck);
}
return false;
}
private boolean containsTypeParams(BInvokableType type) {
boolean hasParameterizedTypes = type.paramTypes.stream()
.anyMatch(t -> {
if (t.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) t);
}
return TypeParamAnalyzer.isTypeParam(t);
});
if (hasParameterizedTypes) {
return hasParameterizedTypes;
}
if (type.retType.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) type.retType);
}
return TypeParamAnalyzer.isTypeParam(type.retType);
}
private boolean isSameFunctionType(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes) {
return checkFunctionTypeEquality(source, target, unresolvedTypes, this::isSameType);
}
private boolean checkFunctionTypeEquality(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes, TypeEqualityPredicate equality) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
if (!equality.test(source.paramTypes.get(i), target.paramTypes.get(i), unresolvedTypes)) {
return false;
}
}
if ((source.restType != null && target.restType == null) ||
target.restType != null && source.restType == null) {
return false;
} else if (source.restType != null && !equality.test(source.restType, target.restType, unresolvedTypes)) {
return false;
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
private boolean hasIncompatibleIsolatedFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(target.flags, Flags.ISOLATED) && !Symbols.isFlagOn(source.flags, Flags.ISOLATED);
}
private boolean hasIncompatibleTransactionalFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(source.flags, Flags.TRANSACTIONAL) &&
!Symbols.isFlagOn(target.flags, Flags.TRANSACTIONAL);
}
public boolean isSameArrayType(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (target.tag != TypeTags.ARRAY || source.tag != TypeTags.ARRAY) {
return false;
}
BArrayType lhsArrayType = (BArrayType) target;
BArrayType rhsArrayType = (BArrayType) source;
boolean hasSameTypeElements = isSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes);
if (lhsArrayType.state == BArrayState.OPEN) {
return (rhsArrayType.state == BArrayState.OPEN) && hasSameTypeElements;
}
return checkSealedArraySizeEquality(rhsArrayType, lhsArrayType) && hasSameTypeElements;
}
public boolean checkSealedArraySizeEquality(BArrayType rhsArrayType, BArrayType lhsArrayType) {
return lhsArrayType.size == rhsArrayType.size;
}
public boolean checkStructEquivalency(BType rhsType, BType lhsType) {
return checkStructEquivalency(rhsType, lhsType, new HashSet<>());
}
private boolean checkStructEquivalency(BType rhsType, BType lhsType, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(rhsType, lhsType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (rhsType.tag == TypeTags.OBJECT && lhsType.tag == TypeTags.OBJECT) {
return checkObjectEquivalency((BObjectType) rhsType, (BObjectType) lhsType, unresolvedTypes);
}
if (rhsType.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {
return checkRecordEquivalency((BRecordType) rhsType, (BRecordType) lhsType, unresolvedTypes);
}
return false;
}
public boolean checkObjectEquivalency(BObjectType rhsType, BObjectType lhsType, Set<TypePair> unresolvedTypes) {
if (Symbols.isFlagOn(lhsType.flags, Flags.ISOLATED) && !Symbols.isFlagOn(rhsType.flags, Flags.ISOLATED)) {
return false;
}
BObjectTypeSymbol lhsStructSymbol = (BObjectTypeSymbol) lhsType.tsymbol;
BObjectTypeSymbol rhsStructSymbol = (BObjectTypeSymbol) rhsType.tsymbol;
List<BAttachedFunction> lhsFuncs = lhsStructSymbol.attachedFuncs;
List<BAttachedFunction> rhsFuncs = ((BObjectTypeSymbol) rhsType.tsymbol).attachedFuncs;
int lhsAttachedFuncCount = getObjectFuncCount(lhsStructSymbol);
int rhsAttachedFuncCount = getObjectFuncCount(rhsStructSymbol);
boolean isLhsAService = Symbols.isService(lhsStructSymbol);
if (isLhsAService && !Symbols.isService(rhsStructSymbol)) {
return false;
}
if (lhsType.fields.size() > rhsType.fields.size() || lhsAttachedFuncCount > rhsAttachedFuncCount) {
return false;
}
for (BField bField : lhsType.fields.values()) {
if (Symbols.isPrivate(bField.symbol)) {
return false;
}
}
for (BAttachedFunction func : lhsFuncs) {
if (Symbols.isPrivate(func.symbol)) {
return false;
}
}
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsType.fields.get(lhsField.name.value);
if (rhsField == null ||
!isInSameVisibilityRegion(lhsField.symbol, rhsField.symbol) ||
!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
}
for (BAttachedFunction lhsFunc : lhsFuncs) {
if (lhsFunc == lhsStructSymbol.initializerFunc) {
continue;
}
if (isLhsAService && Symbols.isResource(lhsFunc.symbol)) {
continue;
}
BAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, unresolvedTypes);
if (rhsFunc == null || !isInSameVisibilityRegion(lhsFunc.symbol, rhsFunc.symbol)) {
return false;
}
if (Symbols.isRemote(lhsFunc.symbol) != Symbols.isRemote(rhsFunc.symbol)) {
return false;
}
}
return lhsType.typeIdSet.isAssignableFrom(rhsType.typeIdSet);
}
private int getObjectFuncCount(BObjectTypeSymbol sym) {
if (sym.initializerFunc != null && sym.attachedFuncs.contains(sym.initializerFunc)) {
return sym.attachedFuncs.size() - 1;
}
return sym.attachedFuncs.size();
}
public boolean checkRecordEquivalency(BRecordType rhsType, BRecordType lhsType, Set<TypePair> unresolvedTypes) {
if (lhsType.sealed && !rhsType.sealed) {
return false;
}
if (!rhsType.sealed && !isAssignable(rhsType.restFieldType, lhsType.restFieldType, unresolvedTypes)) {
return false;
}
return checkFieldEquivalency(lhsType, rhsType, unresolvedTypes);
}
public void setForeachTypedBindingPatternType(BLangForeach foreachNode) {
BType collectionType = foreachNode.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
BType constraint = ((BXMLType) collectionType).constraint;
while (constraint.tag == TypeTags.XML) {
collectionType = constraint;
constraint = ((BXMLType) collectionType).constraint;
}
switch (constraint.tag) {
case TypeTags.XML_ELEMENT:
varType = symTable.xmlElementType;
break;
case TypeTags.XML_COMMENT:
varType = symTable.xmlCommentType;
break;
case TypeTags.XML_TEXT:
varType = symTable.xmlTextType;
break;
case TypeTags.XML_PI:
varType = symTable.xmlPIType;
break;
default:
Set<BType> collectionTypes = getEffectiveMemberTypes((BUnionType) constraint);
Set<BType> builtinXMLConstraintTypes = getEffectiveMemberTypes
((BUnionType) ((BXMLType) symTable.xmlType).constraint);
if (collectionTypes.size() == 4 && builtinXMLConstraintTypes.equals(collectionTypes)) {
varType = symTable.xmlType;
} else {
LinkedHashSet<BType> collectionTypesInSymTable = new LinkedHashSet<>();
for (BType subType : collectionTypes) {
switch (subType.tag) {
case TypeTags.XML_ELEMENT:
collectionTypesInSymTable.add(symTable.xmlElementType);
break;
case TypeTags.XML_COMMENT:
collectionTypesInSymTable.add(symTable.xmlCommentType);
break;
case TypeTags.XML_TEXT:
collectionTypesInSymTable.add(symTable.xmlTextType);
break;
case TypeTags.XML_PI:
collectionTypesInSymTable.add(symTable.xmlPIType);
break;
}
}
varType = BUnionType.create(null, collectionTypesInSymTable);
}
}
break;
case TypeTags.XML_TEXT:
varType = symTable.xmlTextType;
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
if (streamType.error != null) {
BType actualType = BUnionType.create(null, varType, streamType.error);
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varType, actualType);
}
break;
case TypeTags.OBJECT:
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
foreachNode.resultType = getRecordType(nextMethodReturnType);
BType valueType = (foreachNode.resultType != null)
? ((BRecordType) foreachNode.resultType).fields.get("value").type : null;
BType errorType = getErrorType(nextMethodReturnType);
if (errorType != null) {
BType actualType = BUnionType.create(null, valueType, errorType);
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
valueType, actualType);
}
foreachNode.nillableResultType = nextMethodReturnType;
foreachNode.varType = valueType;
return;
}
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);
case TypeTags.SEMANTIC_ERROR:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
return;
default:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
foreachNode.varType = varType;
foreachNode.resultType = getRecordType(nextMethodReturnType);
foreachNode.nillableResultType = nextMethodReturnType;
}
public void setInputClauseTypedBindingPatternType(BLangInputClause bLangInputClause) {
if (bLangInputClause.collection == null) {
return;
}
BType collectionType = bLangInputClause.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
varType = BUnionType.create(null, symTable.xmlType, symTable.stringType);
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
break;
case TypeTags.OBJECT:
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
bLangInputClause.varType = ((BRecordType) bLangInputClause.resultType).fields.get("value").type;
return;
}
dlog.error(bLangInputClause.collection.pos,
DiagnosticErrorCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);
case TypeTags.SEMANTIC_ERROR:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
return;
default:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
dlog.error(bLangInputClause.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
bLangInputClause.varType = varType;
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
}
public BUnionType getVarTypeFromIterableObject(BObjectType collectionType) {
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) collectionType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) {
return getVarTypeFromIteratorFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromIteratorFunc(BAttachedFunction candidateIteratorFunc) {
if (!candidateIteratorFunc.type.paramTypes.isEmpty()) {
return null;
}
BType returnType = candidateIteratorFunc.type.retType;
return getVarTypeFromIteratorFuncReturnType(returnType);
}
public BUnionType getVarTypeFromIteratorFuncReturnType(BType returnType) {
BObjectTypeSymbol objectTypeSymbol;
if (returnType.tag != TypeTags.OBJECT) {
return null;
}
objectTypeSymbol = (BObjectTypeSymbol) returnType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.NEXT_FUNC)) {
return getVarTypeFromNextFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromNextFunc(BAttachedFunction nextFunc) {
BType returnType;
if (!nextFunc.type.paramTypes.isEmpty()) {
return null;
}
returnType = nextFunc.type.retType;
if (checkNextFuncReturnType(returnType)) {
return (BUnionType) returnType;
}
return null;
}
private boolean checkNextFuncReturnType(BType returnType) {
if (returnType.tag != TypeTags.UNION) {
return false;
}
List<BType> types = new ArrayList<>(((BUnionType) returnType).getMemberTypes());
boolean containsCompletionType = types.removeIf(type -> type.tag == TypeTags.NIL);
containsCompletionType = types.removeIf(type -> type.tag == TypeTags.ERROR) || containsCompletionType;
if (!containsCompletionType) {
return false;
}
if (types.size() != 1) {
return false;
}
if (types.get(0).tag != TypeTags.RECORD) {
return false;
}
BRecordType recordType = (BRecordType) types.get(0);
return checkRecordTypeInNextFuncReturnType(recordType);
}
private boolean checkRecordTypeInNextFuncReturnType(BRecordType recordType) {
if (!recordType.sealed) {
return false;
}
if (recordType.fields.size() != 1) {
return false;
}
return recordType.fields.containsKey(BLangCompilerConstants.VALUE_FIELD);
}
private BRecordType getRecordType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.RECORD) {
return (BRecordType) member;
}
}
return null;
}
public BErrorType getErrorType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.ERROR) {
return (BErrorType) member;
} else if (member.tag == TypeTags.UNION) {
BErrorType e = getErrorType((BUnionType) member);
if (e != null) {
return e;
}
}
}
return null;
}
public BType getResultTypeOfNextInvocation(BObjectType iteratorType) {
BAttachedFunction nextFunc = getAttachedFuncFromObject(iteratorType, BLangCompilerConstants.NEXT_FUNC);
return Objects.requireNonNull(nextFunc).type.retType;
}
public BAttachedFunction getAttachedFuncFromObject(BObjectType objectType, String funcName) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) objectType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (funcName.equals(bAttachedFunction.funcName.value)) {
return bAttachedFunction;
}
}
return null;
}
public BType inferRecordFieldType(BRecordType recordType) {
Map<String, BField> fields = recordType.fields;
BUnionType unionType = BUnionType.create(null);
if (!recordType.sealed) {
unionType.add(recordType.restFieldType);
}
for (BField field : fields.values()) {
if (isAssignable(field.type, unionType)) {
continue;
}
if (isAssignable(unionType, field.type)) {
unionType = BUnionType.create(null);
}
unionType.add(field.type);
}
if (unionType.getMemberTypes().size() > 1) {
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, recordType.tsymbol.pkgID, null,
recordType.tsymbol.owner, symTable.builtinPos, VIRTUAL);
return unionType;
}
return unionType.getMemberTypes().iterator().next();
}
/**
* Enum to represent type test result.
*
* @since 1.2.0
*/
enum TypeTestResult {
NOT_FOUND,
TRUE,
FALSE
}
TypeTestResult isBuiltInTypeWidenPossible(BType actualType, BType targetType) {
int targetTag = targetType.tag;
int actualTag = actualType.tag;
if (actualTag < TypeTags.JSON && targetTag < TypeTags.JSON) {
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
if (targetTag == TypeTags.BOOLEAN || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.BOOLEAN:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.STRING:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.BOOLEAN) {
return TypeTestResult.FALSE;
}
break;
}
}
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
if (targetTag == TypeTags.JSON || targetTag == TypeTags.ANYDATA || targetTag == TypeTags.ANY ||
targetTag == TypeTags.READONLY) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.ANYDATA:
case TypeTags.TYPEDESC:
if (targetTag == TypeTags.ANY) {
return TypeTestResult.TRUE;
}
break;
default:
}
if (TypeTags.isIntegerTypeTag(targetTag) && actualTag == targetTag) {
return TypeTestResult.FALSE;
}
if ((TypeTags.isIntegerTypeTag(actualTag) || actualTag == TypeTags.BYTE)
&& (TypeTags.isIntegerTypeTag(targetTag) || targetTag == TypeTags.BYTE)) {
return checkBuiltInIntSubtypeWidenPossible(actualType, targetType);
}
if (actualTag == TypeTags.CHAR_STRING && TypeTags.STRING == targetTag) {
return TypeTestResult.TRUE;
}
return TypeTestResult.NOT_FOUND;
}
private TypeTestResult checkBuiltInIntSubtypeWidenPossible(BType actualType, BType targetType) {
int actualTag = actualType.tag;
switch (targetType.tag) {
case TypeTags.INT:
if (actualTag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(actualTag)) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED32_INT:
if (actualTag == TypeTags.SIGNED16_INT || actualTag == TypeTags.SIGNED8_INT ||
actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED16_INT:
if (actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED32_INT:
if (actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED16_INT:
if (actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.BYTE:
if (actualTag == TypeTags.UNSIGNED8_INT) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED8_INT:
if (actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
}
return TypeTestResult.NOT_FOUND;
}
public boolean isImplicityCastable(BType actualType, BType targetType) {
/* The word Builtin refers for Compiler known types. */
BType newTargetType = targetType;
if ((targetType.tag == TypeTags.UNION || targetType.tag == TypeTags.FINITE) && isValueType(actualType)) {
newTargetType = symTable.anyType;
} else if (targetType.tag == TypeTags.INTERSECTION) {
newTargetType = ((BIntersectionType) targetType).effectiveType;
}
TypeTestResult result = isBuiltInTypeWidenPossible(actualType, newTargetType);
if (result != TypeTestResult.NOT_FOUND) {
return result == TypeTestResult.TRUE;
}
if (isValueType(targetType) &&
(actualType.tag == TypeTags.FINITE ||
(actualType.tag == TypeTags.UNION && ((BUnionType) actualType).getMemberTypes().stream()
.anyMatch(type -> type.tag == TypeTags.FINITE && isAssignable(type, targetType))))) {
return targetType.tag == TypeTags.INT || targetType.tag == TypeTags.BYTE || targetType.tag == TypeTags.FLOAT
|| targetType.tag == TypeTags.STRING || targetType.tag == TypeTags.BOOLEAN;
} else if (targetType.tag == TypeTags.ERROR
&& (actualType.tag == TypeTags.UNION
&& isAllErrorMembers((BUnionType) actualType))) {
return true;
} else if (targetType.tag == TypeTags.STRING) {
if (actualType.tag == TypeTags.XML) {
return isXMLTypeAssignable(actualType, targetType, new HashSet<>());
}
if (actualType.tag == TypeTags.UNION) {
return isAssignable(actualType, symTable.stringType);
}
return actualType.tag == TypeTags.XML_TEXT;
}
return false;
}
public boolean isTypeCastable(BLangExpression expr, BType sourceType, BType targetType, SymbolEnv env) {
if (getTypeIntersection(sourceType, symTable.errorType, env) != symTable.semanticError
&& getTypeIntersection(targetType, symTable.errorType, env) == symTable.semanticError) {
return false;
}
if (sourceType.tag == TypeTags.SEMANTIC_ERROR || targetType.tag == TypeTags.SEMANTIC_ERROR ||
sourceType == targetType) {
return true;
}
if (isAssignable(sourceType, targetType) || isAssignable(targetType, sourceType)) {
return true;
}
if (isNumericConversionPossible(expr, sourceType, targetType)) {
return true;
}
boolean validTypeCast = false;
if (sourceType.tag == TypeTags.UNION) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) sourceType, targetType, null)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType.tag == TypeTags.UNION) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) targetType, sourceType, null)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (sourceType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) sourceType, targetType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) targetType, sourceType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (validTypeCast) {
if (isValueType(sourceType)) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
}
return true;
}
return false;
}
boolean isNumericConversionPossible(BLangExpression expr, BType sourceType,
BType targetType) {
final boolean isSourceNumericType = isBasicNumericType(sourceType);
final boolean isTargetNumericType = isBasicNumericType(targetType);
if (isSourceNumericType && isTargetNumericType) {
return true;
}
if (targetType.tag == TypeTags.UNION) {
HashSet<Integer> typeTags = new HashSet<>();
for (BType bType : ((BUnionType) targetType).getMemberTypes()) {
if (isBasicNumericType(bType)) {
typeTags.add(bType.tag);
if (typeTags.size() > 1) {
return false;
}
}
}
}
if (!isTargetNumericType && targetType.tag != TypeTags.UNION) {
return false;
}
if (isSourceNumericType) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
return true;
}
switch (sourceType.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
return true;
case TypeTags.UNION:
for (BType memType : ((BUnionType) sourceType).getMemberTypes()) {
if (isBasicNumericType(memType) ||
(memType.tag == TypeTags.FINITE &&
finiteTypeContainsNumericTypeValues((BFiniteType) memType))) {
return true;
}
}
break;
case TypeTags.FINITE:
if (finiteTypeContainsNumericTypeValues((BFiniteType) sourceType)) {
return true;
}
break;
}
return false;
}
private boolean isAllErrorMembers(BUnionType actualType) {
return actualType.getMemberTypes().stream().allMatch(t -> isAssignable(t, symTable.errorType));
}
public void setImplicitCastExpr(BLangExpression expr, BType actualType, BType expType) {
if (!isImplicityCastable(actualType, expType)) {
return;
}
BLangTypeConversionExpr implicitConversionExpr =
(BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
implicitConversionExpr.pos = expr.pos;
implicitConversionExpr.expr = expr.impConversionExpr == null ? expr : expr.impConversionExpr;
implicitConversionExpr.type = expType;
implicitConversionExpr.targetType = expType;
implicitConversionExpr.internal = true;
expr.impConversionExpr = implicitConversionExpr;
}
public BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
public boolean checkListenerCompatibilityAtServiceDecl(BType type) {
if (type.tag == TypeTags.UNION) {
int listenerCompatibleTypeCount = 0;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (memberType.tag != TypeTags.ERROR) {
if (!checkListenerCompatibility(memberType)) {
return false;
}
listenerCompatibleTypeCount++;
}
}
return listenerCompatibleTypeCount > 0;
}
return checkListenerCompatibility(type);
}
public boolean checkListenerCompatibility(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
for (BType memberType : unionType.getMemberTypes()) {
if (!checkListenerCompatibility(memberType)) {
return false;
}
}
return true;
}
if (type.tag != TypeTags.OBJECT) {
return false;
}
BObjectType rhsType = (BObjectType) type;
List<BAttachedFunction> rhsFuncs = ((BStructureTypeSymbol) rhsType.tsymbol).attachedFuncs;
ListenerValidationModel listenerValidationModel = new ListenerValidationModel(this, symTable);
return listenerValidationModel.checkMethods(rhsFuncs);
}
public boolean isValidErrorDetailType(BType detailType) {
switch (detailType.tag) {
case TypeTags.MAP:
return isAssignable(detailType, symTable.detailType);
case TypeTags.RECORD: {
if (isSealedRecord((BRecordType) detailType)) {
return false;
}
return isAssignable(detailType, symTable.detailType);
}
}
return false;
}
private boolean isSealedRecord(BType recordType) {
return recordType.getKind() == TypeKind.RECORD && ((BRecordType) recordType).sealed;
}
private boolean isNullable(BType fieldType) {
return fieldType.isNullable();
}
private class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BSameTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType t, BType s) {
if (t == s) {
return true;
}
switch (t.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
return t.tag == s.tag
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
case TypeTags.ANY:
case TypeTags.ANYDATA:
return t.tag == s.tag && hasSameReadonlyFlag(s, t)
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
default:
break;
}
return false;
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
if (t == s) {
return true;
}
return t.tag == s.tag;
}
@Override
public Boolean visit(BMapType t, BType s) {
if (s.tag != TypeTags.MAP || !hasSameReadonlyFlag(s, t)) {
return false;
}
BMapType sType = ((BMapType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFutureType t, BType s) {
return s.tag == TypeTags.FUTURE && t.constraint.tag == ((BFutureType) s).constraint.tag;
}
@Override
public Boolean visit(BXMLType t, BType s) {
return visit((BBuiltInRefType) t, s);
}
@Override
public Boolean visit(BJSONType t, BType s) {
return s.tag == TypeTags.JSON && hasSameReadonlyFlag(s, t);
}
@Override
public Boolean visit(BArrayType t, BType s) {
return s.tag == TypeTags.ARRAY && hasSameReadonlyFlag(s, t) && isSameArrayType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BObjectType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.OBJECT) {
return false;
}
return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name);
}
@Override
public Boolean visit(BRecordType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.RECORD || !hasSameReadonlyFlag(s, t)) {
return false;
}
BRecordType source = (BRecordType) s;
if (source.fields.size() != t.fields.size()) {
return false;
}
for (BField sourceField : source.fields.values()) {
if (t.fields.containsKey(sourceField.name.value)) {
BField targetField = t.fields.get(sourceField.name.value);
if (isSameType(sourceField.type, targetField.type, this.unresolvedTypes) &&
hasSameOptionalFlag(sourceField.symbol, targetField.symbol) &&
(!Symbols.isFlagOn(targetField.symbol.flags, Flags.READONLY) ||
Symbols.isFlagOn(sourceField.symbol.flags, Flags.READONLY))) {
continue;
}
}
return false;
}
return isSameType(source.restFieldType, t.restFieldType, this.unresolvedTypes);
}
private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) {
return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL;
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
public Boolean visit(BTupleType t, BType s) {
if (s.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(s, t)) {
return false;
}
BTupleType source = (BTupleType) s;
if (source.tupleTypes.size() != t.tupleTypes.size()) {
return false;
}
for (int i = 0; i < source.tupleTypes.size(); i++) {
if (t.getTupleTypes().get(i) == symTable.noType) {
continue;
}
if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BStreamType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BTableType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType tUnionType, BType s) {
if (s.tag != TypeTags.UNION || !hasSameReadonlyFlag(s, tUnionType)) {
return false;
}
BUnionType sUnionType = (BUnionType) s;
if (sUnionType.getMemberTypes().size()
!= tUnionType.getMemberTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes().size());
Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes().size());
sourceTypes.add(sUnionType);
sourceTypes.addAll(sUnionType.getMemberTypes());
targetTypes.add(tUnionType);
targetTypes.addAll(tUnionType.getMemberTypes());
boolean notSameType = sourceTypes
.stream()
.map(sT -> targetTypes
.stream()
.anyMatch(it -> isSameType(it, sT, this.unresolvedTypes)))
.anyMatch(foundSameType -> !foundSameType);
return !notSameType;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
if (s.tag != TypeTags.INTERSECTION || !hasSameReadonlyFlag(s, tIntersectionType)) {
return false;
}
BIntersectionType sIntersectionType = (BIntersectionType) s;
if (sIntersectionType.getConstituentTypes().size() != tIntersectionType.getConstituentTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sIntersectionType.getConstituentTypes());
Set<BType> targetTypes = new LinkedHashSet<>(tIntersectionType.getConstituentTypes());
for (BType sourceType : sourceTypes) {
boolean foundSameType = false;
for (BType targetType : targetTypes) {
if (isSameType(sourceType, targetType, this.unresolvedTypes)) {
foundSameType = true;
break;
}
}
if (!foundSameType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BErrorType t, BType s) {
if (s.tag != TypeTags.ERROR) {
return false;
}
BErrorType source = (BErrorType) s;
if (!source.typeIdSet.equals(t.typeIdSet)) {
return false;
}
if (source.detailType == t.detailType) {
return true;
}
return isSameType(source.detailType, t.detailType, this.unresolvedTypes);
}
@Override
public Boolean visit(BTypedescType t, BType s) {
if (s.tag != TypeTags.TYPEDESC) {
return false;
}
BTypedescType sType = ((BTypedescType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFiniteType t, BType s) {
return s == t;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
if (s.tag != TypeTags.PARAMETERIZED_TYPE) {
return false;
}
BParameterizedType sType = (BParameterizedType) s;
return isSameType(sType.paramValueType, t.paramValueType) && sType.paramSymbol.equals(t.paramSymbol);
}
};
private boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set<TypePair> unresolvedTypes) {
Map<String, BField> rhsFields = new LinkedHashMap<>(rhsType.fields);
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsFields.get(lhsField.name.value);
if (rhsField == null) {
return false;
}
if (hasIncompatibleReadOnlyFlags(lhsField.symbol.flags, rhsField.symbol.flags)) {
return false;
}
if (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) {
return false;
}
if (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
rhsFields.remove(lhsField.name.value);
}
return rhsFields.entrySet().stream().allMatch(
fieldEntry -> isAssignable(fieldEntry.getValue().type, lhsType.restFieldType, unresolvedTypes));
}
private BAttachedFunction getMatchingInvokableType(List<BAttachedFunction> rhsFuncList, BAttachedFunction lhsFunc,
Set<TypePair> unresolvedTypes) {
return rhsFuncList.stream()
.filter(rhsFunc -> lhsFunc.funcName.equals(rhsFunc.funcName))
.filter(rhsFunc -> isFunctionTypeAssignable(rhsFunc.type, lhsFunc.type, unresolvedTypes))
.findFirst()
.orElse(null);
}
private boolean isInSameVisibilityRegion(BSymbol lhsSym, BSymbol rhsSym) {
if (Symbols.isPrivate(lhsSym)) {
return Symbols.isPrivate(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID)
&& lhsSym.owner.name.equals(rhsSym.owner.name);
} else if (Symbols.isPublic(lhsSym)) {
return Symbols.isPublic(rhsSym);
}
return !Symbols.isPrivate(rhsSym) && !Symbols.isPublic(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID);
}
private boolean isAssignableToUnionType(BType source, BType target, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
Set<BType> sourceTypes = new LinkedHashSet<>();
Set<BType> targetTypes = new LinkedHashSet<>();
if (source.tag == TypeTags.UNION || source.tag == TypeTags.JSON || source.tag == TypeTags.ANYDATA) {
sourceTypes.addAll(getEffectiveMemberTypes((BUnionType) source));
} else {
sourceTypes.add(source);
}
if (target.tag == TypeTags.UNION) {
targetTypes.addAll(getEffectiveMemberTypes((BUnionType) target));
} else {
targetTypes.add(target);
}
var sourceIterator = sourceTypes.iterator();
while (sourceIterator.hasNext()) {
BType s = sourceIterator.next();
if (s.tag == TypeTags.NEVER) {
sourceIterator.remove();
continue;
}
if (s.tag == TypeTags.FINITE && isAssignable(s, target, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
if (s.tag == TypeTags.XML && isAssignableToUnionType(expandedXMLBuiltinSubtypes, target, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
if (!isValueType(s)) {
continue;
}
boolean sourceTypeIsNotAssignableToAnyTargetType = true;
var targetIterator = targetTypes.iterator();
while (targetIterator.hasNext()) {
BType t = targetIterator.next();
if (isAssignable(s, t, unresolvedTypes)) {
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
}
if (sourceTypeIsNotAssignableToAnyTargetType) {
return false;
}
}
sourceIterator = sourceTypes.iterator();
while (sourceIterator.hasNext()) {
BType s = sourceIterator.next();
boolean sourceTypeIsNotAssignableToAnyTargetType = true;
var targetIterator = targetTypes.iterator();
boolean selfReferencedSource = (s != source) && isSelfReferencedStructuredType(source, s);
while (targetIterator.hasNext()) {
BType t = targetIterator.next();
boolean selfReferencedTarget = isSelfReferencedStructuredType(target, t);
if (selfReferencedTarget) {
if (selfReferencedSource) {
if (s.tag == t.tag) {
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
}
}
if (isAssignable(s, t, unresolvedTypes)) {
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
}
if (sourceTypeIsNotAssignableToAnyTargetType) {
return false;
}
}
unresolvedTypes.add(pair);
return true;
}
public boolean isSelfReferencedStructuredType(BType source, BType s) {
if (source == s) {
return true;
}
if (s.tag == TypeTags.ARRAY) {
return isSelfReferencedStructuredType(source, ((BArrayType) s).eType);
}
if (s.tag == TypeTags.MAP) {
return isSelfReferencedStructuredType(source, ((BMapType) s).constraint);
}
if (s.tag == TypeTags.TABLE) {
return isSelfReferencedStructuredType(source, ((BTableType) s).constraint);
}
return false;
}
public BType updateSelfReferencedWithNewType(BType source, BType s, BType target) {
if (s.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) s;
if (arrayType.eType == source) {
return new BArrayType(target, arrayType.tsymbol, arrayType.size,
arrayType.state, arrayType.flags);
}
}
if (s.tag == TypeTags.MAP) {
BMapType mapType = (BMapType) s;
if (mapType.constraint == source) {
return new BMapType(mapType.tag, target, mapType.tsymbol, mapType.flags);
}
}
if (s.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) s;
if (tableType.constraint == source) {
return new BTableType(tableType.tag, target, tableType.tsymbol,
tableType.flags);
} else if (tableType.constraint instanceof BMapType) {
return updateSelfReferencedWithNewType(source, (BMapType) tableType.constraint, target);
}
}
return s;
}
public static void fixSelfReferencingSameUnion(BType originalMemberType, BUnionType origUnionType,
BType immutableMemberType, BUnionType newImmutableUnion,
LinkedHashSet<BType> readOnlyMemTypes) {
boolean sameMember = originalMemberType == immutableMemberType;
if (originalMemberType.tag == TypeTags.ARRAY) {
var arrayType = (BArrayType) originalMemberType;
if (origUnionType == arrayType.eType) {
if (sameMember) {
BArrayType newArrayType = new BArrayType(newImmutableUnion, arrayType.tsymbol, arrayType.size,
arrayType.state, arrayType.flags);
readOnlyMemTypes.add(newArrayType);
} else {
((BArrayType) immutableMemberType).eType = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
}
} else if (originalMemberType.tag == TypeTags.MAP) {
var mapType = (BMapType) originalMemberType;
if (origUnionType == mapType.constraint) {
if (sameMember) {
BMapType newMapType = new BMapType(mapType.tag, newImmutableUnion, mapType.tsymbol, mapType.flags);
readOnlyMemTypes.add(newMapType);
} else {
((BMapType) immutableMemberType).constraint = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
}
} else if (originalMemberType.tag == TypeTags.TABLE) {
var tableType = (BTableType) originalMemberType;
if (origUnionType == tableType.constraint) {
if (sameMember) {
BTableType newTableType = new BTableType(tableType.tag, newImmutableUnion, tableType.tsymbol,
tableType.flags);
readOnlyMemTypes.add(newTableType);
} else {
((BTableType) immutableMemberType).constraint = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
return;
}
var immutableConstraint = ((BTableType) immutableMemberType).constraint;
if (tableType.constraint.tag == TypeTags.MAP) {
sameMember = tableType.constraint == immutableConstraint;
var mapType = (BMapType) tableType.constraint;
if (origUnionType == mapType.constraint) {
if (sameMember) {
BMapType newMapType = new BMapType(mapType.tag, newImmutableUnion, mapType.tsymbol,
mapType.flags);
((BTableType) immutableMemberType).constraint = newMapType;
} else {
((BTableType) immutableMemberType).constraint = newImmutableUnion;
}
readOnlyMemTypes.add(immutableMemberType);
}
}
} else {
readOnlyMemTypes.add(immutableMemberType);
}
}
private Set<BType> getEffectiveMemberTypes(BUnionType unionType) {
Set<BType> memTypes = new LinkedHashSet<>();
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.INTERSECTION) {
BType effectiveType = ((BIntersectionType) memberType).effectiveType;
if (effectiveType.tag == TypeTags.UNION) {
memTypes.addAll(getEffectiveMemberTypes((BUnionType) effectiveType));
continue;
}
memTypes.add(effectiveType);
continue;
}
memTypes.add(memberType);
}
return memTypes;
}
private boolean isFiniteTypeAssignable(BFiniteType finiteType, BType targetType, Set<TypePair> unresolvedTypes) {
if (targetType.tag == TypeTags.FINITE) {
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignableToFiniteType(targetType, (BLangLiteral) expression));
}
if (targetType.tag == TypeTags.UNION) {
List<BType> unionMemberTypes = getAllTypes(targetType);
return finiteType.getValueSpace().stream()
.allMatch(valueExpr -> unionMemberTypes.stream()
.anyMatch(targetMemType -> targetMemType.tag == TypeTags.FINITE ?
isAssignableToFiniteType(targetMemType, (BLangLiteral) valueExpr) :
isAssignable(valueExpr.type, targetType, unresolvedTypes)));
}
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignable(expression.type, targetType, unresolvedTypes));
}
boolean isAssignableToFiniteType(BType type, BLangLiteral literalExpr) {
if (type.tag != TypeTags.FINITE) {
return false;
}
BFiniteType expType = (BFiniteType) type;
return expType.getValueSpace().stream().anyMatch(memberLiteral -> {
if (((BLangLiteral) memberLiteral).value == null) {
return literalExpr.value == null;
}
return checkLiteralAssignabilityBasedOnType((BLangLiteral) memberLiteral, literalExpr);
});
}
/**
* Method to check the literal assignability based on the types of the literals. For numeric literals the
* assignability depends on the equivalency of the literals. If the candidate literal could either be a simple
* literal or a constant. In case of a constant, it is assignable to the base literal if and only if both
* literals have same type and equivalent values.
*
* @param baseLiteral Literal based on which we check the assignability.
* @param candidateLiteral Literal to be tested whether it is assignable to the base literal or not.
* @return true if assignable; false otherwise.
*/
boolean checkLiteralAssignabilityBasedOnType(BLangLiteral baseLiteral, BLangLiteral candidateLiteral) {
if (baseLiteral.getKind() != candidateLiteral.getKind()) {
return false;
}
Object baseValue = baseLiteral.value;
Object candidateValue = candidateLiteral.value;
int candidateTypeTag = candidateLiteral.type.tag;
switch (baseLiteral.type.tag) {
case TypeTags.BYTE:
if (candidateTypeTag == TypeTags.BYTE || (candidateTypeTag == TypeTags.INT &&
!candidateLiteral.isConstant && isByteLiteralValue((Long) candidateValue))) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.INT:
if (candidateTypeTag == TypeTags.INT) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isSigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isSigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isSigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.FLOAT:
String baseValueStr = String.valueOf(baseValue);
String originalValue = baseLiteral.originalValue != null ? baseLiteral.originalValue : baseValueStr;
if (NumericLiteralSupport.isDecimalDiscriminated(originalValue)) {
return false;
}
double baseDoubleVal = Double.parseDouble(baseValueStr);
double candidateDoubleVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDoubleVal = ((Long) candidateValue).doubleValue();
return baseDoubleVal == candidateDoubleVal;
} else if (candidateTypeTag == TypeTags.FLOAT) {
candidateDoubleVal = Double.parseDouble(String.valueOf(candidateValue));
return baseDoubleVal == candidateDoubleVal;
}
break;
case TypeTags.DECIMAL:
BigDecimal baseDecimalVal = NumericLiteralSupport.parseBigDecimal(baseValue);
BigDecimal candidateDecimalVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDecimalVal = new BigDecimal((long) candidateValue, MathContext.DECIMAL128);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
} else if (candidateTypeTag == TypeTags.FLOAT && !candidateLiteral.isConstant ||
candidateTypeTag == TypeTags.DECIMAL) {
if (NumericLiteralSupport.isFloatDiscriminated(String.valueOf(candidateValue))) {
return false;
}
candidateDecimalVal = NumericLiteralSupport.parseBigDecimal(candidateValue);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
}
break;
default:
return baseValue.equals(candidateValue);
}
return false;
}
boolean isByteLiteralValue(Long longObject) {
return (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE);
}
boolean isSigned32LiteralValue(Long longObject) {
return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);
}
boolean isSigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);
}
boolean isSigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);
}
boolean isUnsigned32LiteralValue(Long longObject) {
return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);
}
boolean isUnsigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);
}
boolean isUnsigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);
}
boolean isCharLiteralValue(String literal) {
return (literal.codePoints().count() == 1);
}
/**
* Method to retrieve a type representing all the values in the value space of a finite type that are assignable to
* the target type.
*
* @param finiteType the finite type
* @param targetType the target type
* @return a new finite type if at least one value in the value space of the specified finiteType is
* assignable to targetType (the same if all are assignable), else semanticError
*/
BType getTypeForFiniteTypeValuesAssignableToType(BFiniteType finiteType, BType targetType) {
if (isAssignable(finiteType, targetType)) {
return finiteType;
}
Set<BLangExpression> matchingValues = finiteType.getValueSpace().stream()
.filter(
expr -> isAssignable(expr.type, targetType) ||
isAssignableToFiniteType(targetType, (BLangLiteral) expr) ||
(targetType.tag == TypeTags.UNION &&
((BUnionType) targetType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.anyMatch(filteredType -> isAssignableToFiniteType(filteredType,
(BLangLiteral) expr))))
.collect(Collectors.toSet());
if (matchingValues.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
finiteType.tsymbol.pkgID, null,
finiteType.tsymbol.owner, finiteType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, matchingValues);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
/**
* Method to retrieve a type representing all the member types of a union type that are assignable to
* the target type.
*
* @param unionType the union type
* @param targetType the target type
* @return a single type or a new union type if at least one member type of the union type is
* assignable to targetType, else semanticError
*/
BType getTypeForUnionTypeMembersAssignableToType(BUnionType unionType, BType targetType, SymbolEnv env) {
List<BType> intersection = new LinkedList<>();
unionType.getMemberTypes().forEach(memType -> {
BType memberIntersectionType = getTypeIntersection(memType, targetType, env);
if (memberIntersectionType != symTable.semanticError) {
intersection.add(memberIntersectionType);
}
});
if (intersection.isEmpty()) {
return symTable.semanticError;
}
if (intersection.size() == 1) {
return intersection.get(0);
} else {
return BUnionType.create(null, new LinkedHashSet<>(intersection));
}
}
boolean validEqualityIntersectionExists(BType lhsType, BType rhsType) {
if (!isPureType(lhsType) || !isPureType(rhsType)) {
return false;
}
if (isAssignable(lhsType, rhsType) || isAssignable(rhsType, lhsType)) {
return true;
}
Set<BType> lhsTypes = expandAndGetMemberTypesRecursive(lhsType);
Set<BType> rhsTypes = expandAndGetMemberTypesRecursive(rhsType);
return equalityIntersectionExists(lhsTypes, rhsTypes);
}
private boolean equalityIntersectionExists(Set<BType> lhsTypes, Set<BType> rhsTypes) {
if ((lhsTypes.contains(symTable.anydataType) &&
rhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR)) ||
(rhsTypes.contains(symTable.anydataType) &&
lhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR))) {
return true;
}
boolean matchFound = lhsTypes
.stream()
.anyMatch(s -> rhsTypes
.stream()
.anyMatch(t -> isSameType(s, t)));
if (!matchFound) {
matchFound = equalityIntersectionExistsForComplexTypes(lhsTypes, rhsTypes);
}
return matchFound;
}
/**
* Retrieves member types of the specified type, expanding maps/arrays of/constrained by unions types to individual
* maps/arrays.
*
* e.g., (string|int)[] would cause three entries as string[], int[], (string|int)[]
*
* @param bType the type for which member types needs to be identified
* @return a set containing all the retrieved member types
*/
public Set<BType> expandAndGetMemberTypesRecursive(BType bType) {
Set<BType> memberTypes = new LinkedHashSet<>();
switch (bType.tag) {
case TypeTags.BYTE:
case TypeTags.INT:
memberTypes.add(symTable.intType);
memberTypes.add(symTable.byteType);
break;
case TypeTags.FINITE:
BFiniteType expType = (BFiniteType) bType;
expType.getValueSpace().forEach(value -> {
memberTypes.add(value.type);
});
break;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) bType;
unionType.getMemberTypes().forEach(member -> {
memberTypes.addAll(expandAndGetMemberTypesRecursive(member));
});
break;
case TypeTags.ARRAY:
BType arrayElementType = ((BArrayType) bType).getElementType();
if (((BArrayType) bType).getSize() != -1) {
memberTypes.add(new BArrayType(arrayElementType));
}
if (arrayElementType.tag == TypeTags.UNION) {
Set<BType> elementUnionTypes = expandAndGetMemberTypesRecursive(arrayElementType);
elementUnionTypes.forEach(elementUnionType -> {
memberTypes.add(new BArrayType(elementUnionType));
});
}
memberTypes.add(bType);
break;
case TypeTags.MAP:
BType mapConstraintType = ((BMapType) bType).getConstraint();
if (mapConstraintType.tag == TypeTags.UNION) {
Set<BType> constraintUnionTypes = expandAndGetMemberTypesRecursive(mapConstraintType);
constraintUnionTypes.forEach(constraintUnionType -> {
memberTypes.add(new BMapType(TypeTags.MAP, constraintUnionType, symTable.mapType.tsymbol));
});
}
memberTypes.add(bType);
break;
default:
memberTypes.add(bType);
}
return memberTypes;
}
private boolean tupleIntersectionExists(BTupleType lhsType, BTupleType rhsType) {
if (lhsType.getTupleTypes().size() != rhsType.getTupleTypes().size()) {
return false;
}
List<BType> lhsMemberTypes = lhsType.getTupleTypes();
List<BType> rhsMemberTypes = rhsType.getTupleTypes();
for (int i = 0; i < lhsType.getTupleTypes().size(); i++) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberTypes.get(i)),
expandAndGetMemberTypesRecursive(rhsMemberTypes.get(i)))) {
return false;
}
}
return true;
}
private boolean equalityIntersectionExistsForComplexTypes(Set<BType> lhsTypes, Set<BType> rhsTypes) {
for (BType lhsMemberType : lhsTypes) {
switch (lhsMemberType.tag) {
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
break;
case TypeTags.JSON:
if (jsonEqualityIntersectionExists(rhsTypes)) {
return true;
}
break;
case TypeTags.TUPLE:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
tupleIntersectionExists((BTupleType) lhsMemberType, (BTupleType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
arrayTupleEqualityIntersectionExists((BArrayType) rhsMemberType,
(BTupleType) lhsMemberType))) {
return true;
}
break;
case TypeTags.ARRAY:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BArrayType) lhsMemberType).eType),
expandAndGetMemberTypesRecursive(((BArrayType) rhsMemberType).eType)))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
arrayTupleEqualityIntersectionExists((BArrayType) lhsMemberType,
(BTupleType) rhsMemberType))) {
return true;
}
break;
case TypeTags.MAP:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BMapType) lhsMemberType).constraint),
expandAndGetMemberTypesRecursive(((BMapType) rhsMemberType).constraint)))) {
return true;
}
if (!isAssignable(((BMapType) lhsMemberType).constraint, symTable.errorType) &&
rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
mapRecordEqualityIntersectionExists((BMapType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
break;
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> checkStructEquivalency(rhsMemberType, lhsMemberType) ||
checkStructEquivalency(lhsMemberType, rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
recordEqualityIntersectionExists((BRecordType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON) &&
jsonEqualityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
mapRecordEqualityIntersectionExists((BMapType) rhsMemberType,
(BRecordType) lhsMemberType))) {
return true;
}
break;
}
}
return false;
}
private boolean arrayTupleEqualityIntersectionExists(BArrayType arrayType, BTupleType tupleType) {
Set<BType> elementTypes = expandAndGetMemberTypesRecursive(arrayType.eType);
return tupleType.tupleTypes.stream()
.allMatch(tupleMemType -> equalityIntersectionExists(elementTypes,
expandAndGetMemberTypesRecursive(tupleMemType)));
}
private boolean recordEqualityIntersectionExists(BRecordType lhsType, BRecordType rhsType) {
Map<String, BField> lhsFields = lhsType.fields;
Map<String, BField> rhsFields = rhsType.fields;
List<Name> matchedFieldNames = new ArrayList<>();
for (BField lhsField : lhsFields.values()) {
if (rhsFields.containsKey(lhsField.name.value)) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(
rhsFields.get(lhsField.name.value).type))) {
return false;
}
matchedFieldNames.add(lhsField.getName());
} else {
if (Symbols.isFlagOn(lhsField.symbol.flags, Flags.OPTIONAL)) {
break;
}
if (rhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(rhsType.restFieldType))) {
return false;
}
}
}
for (BField rhsField : rhsFields.values()) {
if (matchedFieldNames.contains(rhsField.getName())) {
continue;
}
if (!Symbols.isFlagOn(rhsField.symbol.flags, Flags.OPTIONAL)) {
if (lhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(rhsField.type),
expandAndGetMemberTypesRecursive(lhsType.restFieldType))) {
return false;
}
}
}
return true;
}
private boolean mapRecordEqualityIntersectionExists(BMapType mapType, BRecordType recordType) {
Set<BType> mapConstrTypes = expandAndGetMemberTypesRecursive(mapType.getConstraint());
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) &&
!equalityIntersectionExists(mapConstrTypes, expandAndGetMemberTypesRecursive(field.type))) {
return false;
}
}
return true;
}
private boolean jsonEqualityIntersectionExists(Set<BType> typeSet) {
for (BType type : typeSet) {
switch (type.tag) {
case TypeTags.MAP:
if (!isAssignable(((BMapType) type).constraint, symTable.errorType)) {
return true;
}
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
if (recordType.fields.values().stream()
.allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) ||
!isAssignable(field.type, symTable.errorType))) {
return true;
}
break;
default:
if (isAssignable(type, symTable.jsonType)) {
return true;
}
}
}
return false;
}
public BType getRemainingMatchExprType(BType originalType, BType typeToRemove) {
switch (originalType.tag) {
case TypeTags.UNION:
return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));
case TypeTags.FINITE:
return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));
case TypeTags.TUPLE:
return getRemainingType((BTupleType) originalType, typeToRemove);
default:
return originalType;
}
}
private BType getRemainingType(BTupleType originalType, BType typeToRemove) {
switch (typeToRemove.tag) {
case TypeTags.TUPLE:
return getRemainingType(originalType, (BTupleType) typeToRemove);
case TypeTags.ARRAY:
return getRemainingType(originalType, (BArrayType) typeToRemove);
default:
return originalType;
}
}
private BType getRemainingType(BTupleType originalType, BTupleType typeToRemove) {
if (originalType.restType != null) {
return originalType;
}
List<BType> originalTupleTypes = new ArrayList<>(originalType.tupleTypes);
List<BType> typesToRemove = new ArrayList<>(typeToRemove.tupleTypes);
if (originalTupleTypes.size() < typesToRemove.size()) {
return originalType;
}
List<BType> tupleTypes = new ArrayList<>();
for (int i = 0; i < originalTupleTypes.size(); i++) {
tupleTypes.add(getRemainingMatchExprType(originalTupleTypes.get(i), typesToRemove.get(i)));
}
if (typeToRemove.restType == null) {
return new BTupleType(tupleTypes);
}
if (originalTupleTypes.size() == typesToRemove.size()) {
return originalType;
}
for (int i = typesToRemove.size(); i < originalTupleTypes.size(); i++) {
tupleTypes.add(getRemainingMatchExprType(originalTupleTypes.get(i), typeToRemove.restType));
}
return new BTupleType(tupleTypes);
}
private BType getRemainingType(BTupleType originalType, BArrayType typeToRemove) {
BType eType = typeToRemove.eType;
List<BType> tupleTypes = new ArrayList<>();
for (BType tupleType : originalType.tupleTypes) {
tupleTypes.add(getRemainingMatchExprType(tupleType, eType));
}
BTupleType remainingType = new BTupleType(tupleTypes);
if (originalType.restType != null) {
remainingType.restType = getRemainingMatchExprType(originalType.restType, eType);
}
return remainingType;
}
public BType getRemainingType(BType originalType, BType typeToRemove) {
switch (originalType.tag) {
case TypeTags.UNION:
return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));
case TypeTags.FINITE:
return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));
default:
return originalType;
}
}
BType getTypeIntersection(BType lhsType, BType rhsType, SymbolEnv env) {
List<BType> narrowingTypes = getAllTypes(rhsType);
LinkedHashSet<BType> intersection = narrowingTypes.stream().map(type -> {
if (isAssignable(type, lhsType)) {
return type;
} else if (isAssignable(lhsType, type)) {
return lhsType;
} else if (lhsType.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) lhsType, type);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) type, lhsType);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (lhsType.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) type, lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.NULL_SET) {
return type;
} else if (type.tag == TypeTags.ERROR && lhsType.tag == TypeTags.ERROR) {
BType intersectionType = getIntersectionForErrorTypes(lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {
BType intersectionType = createRecordIntersection(lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.MAP && lhsType.tag == TypeTags.RECORD) {
BType intersectionType = createRecordAndMapIntersection(lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.RECORD && lhsType.tag == TypeTags.MAP) {
BType intersectionType = createRecordAndMapIntersection(type, lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.MAP && lhsType.tag == TypeTags.MAP) {
BType intersectionType = createRecordAndMapIntersection(type, lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
}
return null;
}).filter(type -> type != null).collect(Collectors.toCollection(LinkedHashSet::new));
if (intersection.isEmpty()) {
if (lhsType.tag == TypeTags.NULL_SET) {
return lhsType;
}
return symTable.semanticError;
}
if (intersection.contains(symTable.semanticError)) {
return symTable.semanticError;
} else if (intersection.size() == 1) {
return intersection.toArray(new BType[0])[0];
} else {
return BUnionType.create(null, intersection);
}
}
private BType getIntersectionForErrorTypes(BType lhsType, BType rhsType, SymbolEnv env) {
BType detailTypeOne = ((BErrorType) lhsType).detailType;
BType detailTypeTwo = ((BErrorType) rhsType).detailType;
if (isSealedRecord(detailTypeOne) || isSealedRecord(detailTypeTwo)) {
return symTable.semanticError;
}
BType detailIntersectionType = getTypeIntersection(detailTypeOne, detailTypeTwo, env);
if (detailIntersectionType == symTable.semanticError) {
return symTable.semanticError;
}
BErrorType intersectionErrorType = createErrorType(lhsType, rhsType, detailIntersectionType, env);
return intersectionErrorType;
}
private BType createRecordIntersection(BType recordTypeOne, BType recordTypeTwo, SymbolEnv env) {
BRecordType recordType = createAnonymousRecord(env);
if (!populateRecordFields(recordType, recordTypeOne, env, null) ||
!populateRecordFields(recordType, recordTypeTwo, env, null)) {
return symTable.semanticError;
}
recordType.restFieldType = getTypeIntersection(((BRecordType) recordTypeOne).restFieldType,
((BRecordType) recordTypeTwo).restFieldType, env);
if (recordType.restFieldType == symTable.semanticError) {
return symTable.semanticError;
}
return recordType;
}
private BRecordType createAnonymousRecord(SymbolEnv env) {
EnumSet<Flag> flags = EnumSet.of(Flag.PUBLIC, Flag.ANONYMOUS);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.packageID, null,
env.scope.owner, null, VIRTUAL);
recordSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false,
symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, symTable.builtinPos);
recordSymbol.scope = new Scope(recordSymbol);
BRecordType recordType = new BRecordType(recordSymbol);
recordType.tsymbol = recordSymbol;
recordSymbol.type = recordType;
return recordType;
}
private BType createRecordAndMapIntersection(BType type, BType mapType, SymbolEnv env) {
BRecordType intersectionRecord = createAnonymousRecord(env);
if (!populateRecordFields(intersectionRecord, type, env, ((BMapType) mapType).constraint)) {
return symTable.semanticError;
}
intersectionRecord.restFieldType = getRestFieldIntersectionType(type, (BMapType) mapType, env);
if (intersectionRecord.restFieldType == symTable.semanticError) {
return symTable.semanticError;
}
return intersectionRecord;
}
private BType getRestFieldIntersectionType(BType type, BMapType mapType, SymbolEnv env) {
if (type.tag == TypeTags.RECORD) {
return getTypeIntersection(((BRecordType) type).restFieldType, mapType.constraint, env);
} else {
return getTypeIntersection(((BMapType) type).constraint, mapType.constraint, env);
}
}
private BErrorType createErrorType(BType lhsType, BType rhsType, BType detailType, SymbolEnv env) {
BErrorType errorType = createErrorType(detailType, lhsType.flags, env);
errorType.tsymbol.flags |= rhsType.flags;
return errorType;
}
public BErrorType createErrorType(BType detailType, long flags, SymbolEnv env) {
BErrorTypeSymbol errorTypeSymbol = Symbols.createErrorSymbol(flags, Names.EMPTY,
env.enclPkg.symbol.pkgID, null,
env.scope.owner, null, VIRTUAL);
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorType.flags |= errorTypeSymbol.flags;
errorTypeSymbol.type = errorType;
errorType.typeIdSet = BTypeIdSet.emptySet();
return errorType;
}
private boolean populateRecordFields(BRecordType recordType, BType originalType, SymbolEnv env, BType constraint) {
BTypeSymbol intersectionRecordSymbol = recordType.tsymbol;
if (originalType.getKind() != TypeKind.RECORD) {
return true;
}
BRecordType originalRecordType = (BRecordType) originalType;
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
for (BField origField : originalRecordType.fields.values()) {
org.wso2.ballerinalang.compiler.util.Name origFieldName = origField.name;
String nameString = origFieldName.value;
BType recordFieldType = validateRecordField(recordType, origField, constraint, env);
if (recordFieldType == symTable.semanticError) {
return false;
}
BVarSymbol recordFieldSymbol = new BVarSymbol(origField.symbol.flags, origFieldName,
env.enclPkg.packageID, recordFieldType,
intersectionRecordSymbol, origField.pos, SOURCE);
if (recordFieldType.tag == TypeTags.INVOKABLE && recordFieldType.tsymbol != null) {
BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) recordFieldType.tsymbol;
BInvokableSymbol invokableSymbol = (BInvokableSymbol) recordFieldSymbol;
invokableSymbol.params = tsymbol.params;
invokableSymbol.restParam = tsymbol.restParam;
invokableSymbol.retType = tsymbol.returnType;
invokableSymbol.flags = tsymbol.flags;
}
fields.put(nameString, new BField(origFieldName, null, recordFieldSymbol));
}
recordType.fields.putAll(fields);
return true;
}
private BType validateRecordField(BRecordType recordType, BField origField, BType constraint, SymbolEnv env) {
BType fieldType = validateOverlappingFields(recordType, origField);
if (fieldType == symTable.semanticError) {
return fieldType;
}
if (constraint == null) {
return fieldType;
}
fieldType = getTypeIntersection(fieldType, constraint, env);
if (fieldType != symTable.semanticError) {
return fieldType;
}
if (Symbols.isOptional(origField.symbol)) {
return null;
}
return symTable.semanticError;
}
private BType validateOverlappingFields(BRecordType recordType, BField origField) {
BField overlappingField = recordType.fields.get(origField.name.value);
if (overlappingField == null) {
return origField.type;
}
if (isAssignable(overlappingField.type, origField.type)) {
return overlappingField.type;
}
if (isAssignable(origField.type, overlappingField.type)) {
return origField.type;
}
return symTable.semanticError;
}
private BType getRemainingType(BUnionType originalType, List<BType> removeTypes) {
List<BType> remainingTypes = getAllTypes(originalType);
removeTypes.forEach(removeType -> remainingTypes.removeIf(type -> isAssignable(type, removeType)));
List<BType> finiteTypesToRemove = new ArrayList<>();
List<BType> finiteTypesToAdd = new ArrayList<>();
for (BType remainingType : remainingTypes) {
if (remainingType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) remainingType;
finiteTypesToRemove.add(finiteType);
BType remainingTypeWithMatchesRemoved = getRemainingType(finiteType, removeTypes);
if (remainingTypeWithMatchesRemoved != symTable.semanticError) {
finiteTypesToAdd.add(remainingTypeWithMatchesRemoved);
}
}
}
remainingTypes.removeAll(finiteTypesToRemove);
remainingTypes.addAll(finiteTypesToAdd);
if (remainingTypes.size() == 1) {
return remainingTypes.get(0);
}
if (remainingTypes.isEmpty()) {
return symTable.nullSet;
}
return BUnionType.create(null, new LinkedHashSet<>(remainingTypes));
}
private BType getRemainingType(BFiniteType originalType, List<BType> removeTypes) {
Set<BLangExpression> remainingValueSpace = new LinkedHashSet<>();
for (BLangExpression valueExpr : originalType.getValueSpace()) {
boolean matchExists = false;
for (BType remType : removeTypes) {
if (isAssignable(valueExpr.type, remType) ||
isAssignableToFiniteType(remType, (BLangLiteral) valueExpr)) {
matchExists = true;
break;
}
}
if (!matchExists) {
remainingValueSpace.add(valueExpr);
}
}
if (remainingValueSpace.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, originalType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
originalType.tsymbol.pkgID, null,
originalType.tsymbol.owner, originalType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, remainingValueSpace);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
public BType getSafeType(BType type, boolean liftNil, boolean liftError) {
switch (type.tag) {
case TypeTags.JSON:
return new BJSONType((BJSONType) type, false);
case TypeTags.ANY:
return new BAnyType(type.tag, type.tsymbol, false);
case TypeTags.ANYDATA:
return new BAnydataType((BAnydataType) type, false);
case TypeTags.READONLY:
return new BReadonlyType(type.tag, type.tsymbol, false);
}
if (type.tag != TypeTags.UNION) {
return type;
}
BUnionType unionType = (BUnionType) type;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>(unionType.getMemberTypes());
BUnionType errorLiftedType = BUnionType.create(null, memTypes);
if (liftNil) {
errorLiftedType.remove(symTable.nilType);
}
if (liftError) {
errorLiftedType.remove(symTable.errorType);
}
if (errorLiftedType.getMemberTypes().size() == 1) {
return errorLiftedType.getMemberTypes().toArray(new BType[0])[0];
}
return errorLiftedType;
}
public List<BType> getAllTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Lists.of(type);
}
List<BType> memberTypes = new ArrayList<>();
((BUnionType) type).getMemberTypes().forEach(memberType -> memberTypes.addAll(getAllTypes(memberType)));
return memberTypes;
}
public boolean isAllowedConstantType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.NIL:
return true;
case TypeTags.MAP:
return isAllowedConstantType(((BMapType) type).constraint);
case TypeTags.FINITE:
BLangExpression finiteValue = ((BFiniteType) type).getValueSpace().toArray(new BLangExpression[0])[0];
return isAllowedConstantType(finiteValue.type);
default:
return false;
}
}
public boolean isValidLiteral(BLangLiteral literal, BType targetType) {
BType literalType = literal.type;
if (literalType.tag == targetType.tag) {
return true;
}
switch (targetType.tag) {
case TypeTags.BYTE:
return literalType.tag == TypeTags.INT && isByteLiteralValue((Long) literal.value);
case TypeTags.DECIMAL:
return literalType.tag == TypeTags.FLOAT || literalType.tag == TypeTags.INT;
case TypeTags.FLOAT:
return literalType.tag == TypeTags.INT;
case TypeTags.SIGNED32_INT:
return literalType.tag == TypeTags.INT && isSigned32LiteralValue((Long) literal.value);
case TypeTags.SIGNED16_INT:
return literalType.tag == TypeTags.INT && isSigned16LiteralValue((Long) literal.value);
case TypeTags.SIGNED8_INT:
return literalType.tag == TypeTags.INT && isSigned8LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED32_INT:
return literalType.tag == TypeTags.INT && isUnsigned32LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED16_INT:
return literalType.tag == TypeTags.INT && isUnsigned16LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED8_INT:
return literalType.tag == TypeTags.INT && isUnsigned8LiteralValue((Long) literal.value);
case TypeTags.CHAR_STRING:
return literalType.tag == TypeTags.STRING && isCharLiteralValue((String) literal.value);
default:
return false;
}
}
/**
* Validate if the return type of the given function is a subtype of `error?`, containing `()`.
*
* @param function The function of which the return type should be validated
* @param diagnosticCode The code to log if the return type is invalid
*/
public void validateErrorOrNilReturn(BLangFunction function, DiagnosticCode diagnosticCode) {
BType returnType = function.returnTypeNode.type;
if (returnType.tag == TypeTags.NIL) {
return;
}
if (returnType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) returnType).getMemberTypes();
if (returnType.isNullable() &&
memberTypes.stream().allMatch(type -> type.tag == TypeTags.NIL || type.tag == TypeTags.ERROR)) {
return;
}
}
dlog.error(function.returnTypeNode.pos, diagnosticCode, function.returnTypeNode.type.toString());
}
/**
* Type vector of size two, to hold the source and the target types.
*
* @since 0.982.0
*/
private static class TypePair {
BType sourceType;
BType targetType;
public TypePair(BType sourceType, BType targetType) {
this.sourceType = sourceType;
this.targetType = targetType;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof TypePair)) {
return false;
}
TypePair other = (TypePair) obj;
return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);
}
@Override
public int hashCode() {
return Objects.hash(sourceType, targetType);
}
}
/**
* A functional interface for parameterizing the type of type checking that needs to be done on the source and
* target types.
*
* @since 0.995.0
*/
private interface TypeEqualityPredicate {
boolean test(BType source, BType target, Set<TypePair> unresolvedTypes);
}
public boolean hasFillerValue(BType type) {
switch (type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.NIL:
case TypeTags.TABLE:
case TypeTags.ANYDATA:
case TypeTags.MAP:
case TypeTags.ANY:
return true;
case TypeTags.ARRAY:
return checkFillerValue((BArrayType) type);
case TypeTags.FINITE:
return checkFillerValue((BFiniteType) type);
case TypeTags.UNION:
return checkFillerValue((BUnionType) type);
case TypeTags.OBJECT:
return checkFillerValue((BObjectType) type);
case TypeTags.RECORD:
return checkFillerValue((BRecordType) type);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
return tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType));
default:
if (TypeTags.isIntegerTypeTag(type.tag)) {
return true;
}
return false;
}
}
private boolean checkFillerValue(BObjectType type) {
if ((type.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
return false;
}
BAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc;
if (initFunction == null) {
return true;
}
if (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) {
return false;
}
for (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) {
if (!bVarSymbol.defaultableParam) {
return false;
}
}
return true;
}
/**
* This will handle two types. Singleton : As singleton can have one value that value should it self be a valid fill
* value Union : 1. if nil is a member it is the fill values 2. else all the values should belong to same type and
* the default value for that type should be a member of the union precondition : value space should have at least
* one element
*
* @param type BFiniteType union or finite
* @return boolean whether type has a valid filler value or not
*/
private boolean checkFillerValue(BFiniteType type) {
if (type.isNullable()) {
return true;
}
if (type.getValueSpace().size() == 1) {
return true;
}
Iterator iterator = type.getValueSpace().iterator();
BLangExpression firstElement = (BLangExpression) iterator.next();
boolean defaultFillValuePresent = isImplicitDefaultValue(firstElement);
while (iterator.hasNext()) {
BLangExpression value = (BLangExpression) iterator.next();
if (!isSameBasicType(value.type, firstElement.type)) {
return false;
}
if (!defaultFillValuePresent && isImplicitDefaultValue(value)) {
defaultFillValuePresent = true;
}
}
return defaultFillValuePresent;
}
private boolean hasImplicitDefaultValue(Set<BLangExpression> valueSpace) {
for (BLangExpression expression : valueSpace) {
if (isImplicitDefaultValue(expression)) {
return true;
}
}
return false;
}
private boolean checkFillerValue(BUnionType type) {
if (type.isNullable()) {
return true;
}
Set<BType> memberTypes = new HashSet<>();
boolean hasFillerValue = false;
boolean defaultValuePresent = false;
boolean finiteTypePresent = false;
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.FINITE) {
Set<BType> uniqueValues = getValueTypes(((BFiniteType) member).getValueSpace());
memberTypes.addAll(uniqueValues);
if (!defaultValuePresent && hasImplicitDefaultValue(((BFiniteType) member).getValueSpace())) {
defaultValuePresent = true;
}
finiteTypePresent = true;
} else {
memberTypes.add(member);
}
if (!hasFillerValue && hasFillerValue(member)) {
hasFillerValue = true;
}
}
if (!hasFillerValue) {
return false;
}
Iterator<BType> iterator = memberTypes.iterator();
BType firstMember = iterator.next();
while (iterator.hasNext()) {
if (!isSameBasicType(firstMember, iterator.next())) {
return false;
}
}
if (finiteTypePresent) {
return defaultValuePresent;
}
return true;
}
private boolean isSameBasicType(BType source, BType target) {
if (isSameType(source, target)) {
return true;
}
if (TypeTags.isIntegerTypeTag(source.tag) && TypeTags.isIntegerTypeTag(target.tag)) {
return true;
}
return false;
}
private Set<BType> getValueTypes(Set<BLangExpression> valueSpace) {
Set<BType> uniqueType = new HashSet<>();
for (BLangExpression expression : valueSpace) {
uniqueType.add(expression.type);
}
return uniqueType;
}
private boolean isImplicitDefaultValue(BLangExpression expression) {
if ((expression.getKind() == NodeKind.LITERAL) || (expression.getKind() == NodeKind.NUMERIC_LITERAL)) {
BLangLiteral literalExpression = (BLangLiteral) expression;
BType literalExprType = literalExpression.type;
Object value = literalExpression.getValue();
switch (literalExprType.getKind()) {
case INT:
case BYTE:
return value.equals(Long.valueOf(0));
case STRING:
return value == null || value.equals("");
case DECIMAL:
case FLOAT:
return value.equals(String.valueOf(0.0));
case BOOLEAN:
return value.equals(Boolean.valueOf(false));
case NIL:
return true;
default:
return false;
}
}
return false;
}
private boolean checkFillerValue(BRecordType type) {
for (BField field : type.fields.values()) {
if (Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
continue;
}
if (Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
return false;
}
}
return true;
}
private boolean checkFillerValue(BArrayType type) {
if (type.size == -1) {
return true;
}
return hasFillerValue(type.eType);
}
/**
* Get result type of the query output.
*
* @param type type of query expression.
* @return result type.
*/
public BType resolveExprType(BType type) {
switch (type.tag) {
case TypeTags.STREAM:
return ((BStreamType) type).constraint;
case TypeTags.TABLE:
return ((BTableType) type).constraint;
case TypeTags.ARRAY:
return ((BArrayType) type).eType;
case TypeTags.UNION:
List<BType> exprTypes = new ArrayList<>(((BUnionType) type).getMemberTypes());
for (BType returnType : exprTypes) {
switch (returnType.tag) {
case TypeTags.STREAM:
return ((BStreamType) returnType).constraint;
case TypeTags.TABLE:
return ((BTableType) returnType).constraint;
case TypeTags.ARRAY:
return ((BArrayType) returnType).eType;
case TypeTags.STRING:
case TypeTags.XML:
return returnType;
}
}
default:
return type;
}
}
private boolean isSimpleBasicType(int tag) {
switch (tag) {
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
return true;
default:
return (TypeTags.isIntegerTypeTag(tag)) || (TypeTags.isStringTypeTag(tag));
}
}
/**
* Check whether a type is an ordered type.
*
* @param type type.
* @return boolean whether the type is an ordered type or not.
*/
public boolean isOrderedType(BType type) {
switch (type.tag) {
case TypeTags.UNION:
Set<BType> memberTypes = ((BUnionType) type).getMemberTypes();
for (BType memType : memberTypes) {
if (!isOrderedType(memType)) {
return false;
}
}
return memberTypes.size() <= 2 && memberTypes.contains(symTable.nilType);
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isOrderedType(elementType);
default:
return isSimpleBasicType(type.tag);
}
}
public boolean isUnionOfSimpleBasicTypes(BType type) {
if (type.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) type).getMemberTypes();
for (BType memType : memberTypes) {
if (!isSimpleBasicType(memType.tag)) {
return false;
}
}
return true;
}
return isSimpleBasicType(type.tag);
}
public boolean isSubTypeOfReadOnlyOrIsolatedObjectUnion(BType type) {
if (isInherentlyImmutableType(type) || Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
int tag = type.tag;
if (tag == TypeTags.OBJECT) {
return isIsolated(type);
}
if (tag != TypeTags.UNION) {
return false;
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isSubTypeOfReadOnlyOrIsolatedObjectUnion(memberType)) {
return false;
}
}
return true;
}
private boolean isIsolated(BType type) {
return Symbols.isFlagOn(type.flags, Flags.ISOLATED);
}
private static class ListenerValidationModel {
private final Types types;
private final SymbolTable symtable;
private final BType serviceNameType;
boolean attachFound;
boolean detachFound;
boolean startFound;
boolean gracefulStopFound;
boolean immediateStopFound;
public ListenerValidationModel(Types types, SymbolTable symTable) {
this.types = types;
this.symtable = symTable;
this.serviceNameType =
BUnionType.create(null, symtable.stringType, symtable.arrayStringType, symtable.nilType);
}
boolean isValidListener() {
return attachFound && detachFound && startFound && gracefulStopFound && immediateStopFound;
}
private boolean checkMethods(List<BAttachedFunction> rhsFuncs) {
for (BAttachedFunction func : rhsFuncs) {
switch (func.funcName.value) {
case "attach":
if (!checkAttachMethod(func)) {
return false;
}
break;
case "detach":
if (!checkDetachMethod(func)) {
return false;
}
break;
case "start":
if (!checkStartMethod(func)) {
return true;
}
break;
case "gracefulStop":
if (!checkGracefulStop(func)) {
return false;
}
break;
case "immediateStop":
if (!checkImmediateStop(func)) {
return false;
}
break;
}
}
return isValidListener();
}
private boolean emptyParamList(BAttachedFunction func) {
return func.type.paramTypes.isEmpty() && func.type.restType != symtable.noType;
}
private boolean publicAndReturnsErrorOrNil(BAttachedFunction func) {
if (!Symbols.isPublic(func.symbol)) {
return false;
}
if (!types.isAssignable(func.type.retType, symtable.errorOrNilType)) {
return false;
}
return true;
}
private boolean isPublicNoParamReturnsErrorOrNil(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (!emptyParamList(func)) {
return false;
}
return true;
}
private boolean checkImmediateStop(BAttachedFunction func) {
return immediateStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkGracefulStop(BAttachedFunction func) {
return gracefulStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkStartMethod(BAttachedFunction func) {
return startFound = publicAndReturnsErrorOrNil(func);
}
private boolean checkDetachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 1) {
return false;
}
BType firstParamType = func.type.paramTypes.get(0);
boolean isMatchingSignature = firstParamType.tag == TypeTags.OBJECT
&& Symbols.isService(firstParamType.tsymbol);
return detachFound = isMatchingSignature;
}
private boolean checkAttachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 2) {
return false;
}
BType firstParamType = func.type.paramTypes.get(0);
if (firstParamType.tag != TypeTags.OBJECT) {
return false;
}
if (!Symbols.isService(firstParamType.tsymbol)) {
return false;
}
BType secondParamType = func.type.paramTypes.get(1);
boolean sameType = types.isAssignable(secondParamType, this.serviceNameType);
return attachFound = sameType;
}
private boolean isServiceObject(BType type) {
if (type.tag != TypeTags.OBJECT) {
return false;
}
return Symbols.isService(type.tsymbol);
}
}
} | class Types {
private static final CompilerContext.Key<Types> TYPES_KEY =
new CompilerContext.Key<>();
private final ResolvedTypeBuilder typeBuilder;
private SymbolTable symTable;
private SymbolResolver symResolver;
private BLangDiagnosticLog dlog;
private Names names;
private int finiteTypeCount = 0;
private BUnionType expandedXMLBuiltinSubtypes;
private final BLangAnonymousModelHelper anonymousModelHelper;
public static Types getInstance(CompilerContext context) {
Types types = context.get(TYPES_KEY);
if (types == null) {
types = new Types(context);
}
return types;
}
public Types(CompilerContext context) {
context.put(TYPES_KEY, this);
this.symTable = SymbolTable.getInstance(context);
this.symResolver = SymbolResolver.getInstance(context);
this.dlog = BLangDiagnosticLog.getInstance(context);
this.names = Names.getInstance(context);
this.expandedXMLBuiltinSubtypes = BUnionType.create(null,
symTable.xmlElementType, symTable.xmlCommentType,
symTable.xmlPIType, symTable.xmlTextType);
this.typeBuilder = new ResolvedTypeBuilder();
this.anonymousModelHelper = BLangAnonymousModelHelper.getInstance(context);
}
public List<BType> checkTypes(BLangExpression node,
List<BType> actualTypes,
List<BType> expTypes) {
List<BType> resTypes = new ArrayList<>();
for (int i = 0; i < actualTypes.size(); i++) {
resTypes.add(checkType(node, actualTypes.get(i), expTypes.size() > i ? expTypes.get(i) : symTable.noType));
}
return resTypes;
}
public BType checkType(BLangExpression node,
BType actualType,
BType expType) {
return checkType(node, actualType, expType, DiagnosticErrorCode.INCOMPATIBLE_TYPES);
}
public BType checkType(BLangExpression expr,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
expr.type = checkType(expr.pos, actualType, expType, diagCode);
if (expr.type.tag == TypeTags.SEMANTIC_ERROR) {
return expr.type;
}
setImplicitCastExpr(expr, actualType, expType);
return expr.type;
}
public BType checkType(Location pos,
BType actualType,
BType expType,
DiagnosticCode diagCode) {
if (expType.tag == TypeTags.SEMANTIC_ERROR) {
return expType;
} else if (expType.tag == TypeTags.NONE) {
return actualType;
} else if (actualType.tag == TypeTags.SEMANTIC_ERROR) {
return actualType;
} else if (isAssignable(actualType, expType)) {
return actualType;
}
dlog.error(pos, diagCode, expType, actualType);
return symTable.semanticError;
}
public boolean isJSONContext(BType type) {
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream().anyMatch(memType -> memType.tag == TypeTags.JSON);
}
return type.tag == TypeTags.JSON;
}
public boolean isJSONUnionType(BUnionType type) {
if (type.name != null && (type.name.getValue().equals(Names.JSON.getValue()))) {
return true;
}
return isSameType(type, symTable.jsonType);
}
public boolean isLax(BType type) {
Set<BType> visited = new HashSet<>();
int result = isLaxType(type, visited);
if (result == 1) {
return true;
}
return false;
}
public int isLaxType(BType type, Set<BType> visited) {
if (!visited.add(type)) {
return -1;
}
switch (type.tag) {
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
return 1;
case TypeTags.MAP:
return isLaxType(((BMapType) type).constraint, visited);
case TypeTags.UNION:
if (isSameType(type, symTable.jsonType)) {
visited.add(type);
return 1;
}
boolean atleastOneLaxType = false;
for (BType member : ((BUnionType) type).getMemberTypes()) {
int result = isLaxType(member, visited);
if (result == -1) {
continue;
}
if (result == 0) {
return 0;
}
atleastOneLaxType = true;
}
return atleastOneLaxType ? 1 : 0;
}
return 0;
}
public boolean isLaxType(BType type, Map<BType, Boolean> visited) {
if (visited.containsKey(type)) {
return visited.get(type);
}
switch (type.tag) {
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_ELEMENT:
visited.put(type, true);
return true;
case TypeTags.MAP:
boolean result = isLaxType(((BMapType) type).constraint, visited);
visited.put(type, result);
return result;
case TypeTags.UNION:
if (type == symTable.jsonType || isSameType(type, symTable.jsonType)) {
visited.put(type, true);
return true;
}
for (BType member : ((BUnionType) type).getMemberTypes()) {
if (!isLaxType(member, visited)) {
visited.put(type, false);
return false;
}
}
visited.put(type, true);
return true;
}
visited.put(type, false);
return false;
}
public boolean isSameType(BType source, BType target) {
return isSameType(source, target, new HashSet<>());
}
public boolean isPureType(BType type) {
IsPureTypeUniqueVisitor visitor = new IsPureTypeUniqueVisitor();
return visitor.visit(type);
}
public boolean isAnydata(BType type) {
IsAnydataUniqueVisitor visitor = new IsAnydataUniqueVisitor();
return visitor.visit(type);
}
private boolean isSameType(BType source, BType target, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
BTypeVisitor<BType, Boolean> sameTypeVisitor = new BSameTypeVisitor(unresolvedTypes);
return target.accept(sameTypeVisitor, source);
}
public boolean isValueType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.BYTE:
case TypeTags.DECIMAL:
case TypeTags.FLOAT:
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
return true;
default:
return false;
}
}
boolean isBasicNumericType(BType type) {
return type.tag < TypeTags.STRING || TypeTags.isIntegerTypeTag(type.tag);
}
boolean finiteTypeContainsNumericTypeValues(BFiniteType finiteType) {
return finiteType.getValueSpace().stream().anyMatch(valueExpr -> isBasicNumericType(valueExpr.type));
}
public boolean containsErrorType(BType type) {
if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.anyMatch(this::containsErrorType);
}
return type.tag == TypeTags.ERROR;
}
public boolean isSubTypeOfList(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.ARRAY) || isSubTypeOfBaseType(type, TypeTags.TUPLE);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfList);
}
BType resolvePatternTypeFromMatchExpr(BLangExpression matchExpr, BTupleType listMatchPatternType,
SymbolEnv env) {
if (matchExpr == null) {
return listMatchPatternType;
}
BType matchExprType = matchExpr.type;
BType intersectionType = getTypeIntersection(matchExprType, listMatchPatternType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
if (matchExprType.tag == TypeTags.ANYDATA) {
Collections.fill(listMatchPatternType.tupleTypes, symTable.anydataType);
if (listMatchPatternType.restType != null) {
listMatchPatternType.restType = symTable.anydataType;
}
return listMatchPatternType;
}
return symTable.noType;
}
public BType resolvePatternTypeFromMatchExpr(BLangConstPattern constPattern, BLangExpression constPatternExpr) {
if (constPattern.matchExpr == null) {
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
return ((BLangSimpleVarRef) constPatternExpr).symbol.type;
} else {
return constPatternExpr.type;
}
}
BType matchExprType = constPattern.matchExpr.type;
BType constMatchPatternExprType = constPatternExpr.type;
if (constPatternExpr.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef constVarRef = (BLangSimpleVarRef) constPatternExpr;
if (constVarRef.symbol == null) {
return symTable.noType;
}
BType constVarRefSymbolType = constVarRef.symbol.type;
if (isAssignable(constVarRefSymbolType, matchExprType)) {
return constVarRefSymbolType;
}
return symTable.noType;
}
BLangLiteral constPatternLiteral = (BLangLiteral) constPatternExpr;
if (containsAnyType(constMatchPatternExprType)) {
return matchExprType;
} else if (containsAnyType(matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.BYTE && constMatchPatternExprType.tag == TypeTags.INT) {
return matchExprType;
}
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
if (matchExprType.tag == TypeTags.UNION) {
for (BType memberType : ((BUnionType) matchExprType).getMemberTypes()) {
if (memberType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(memberType, constPatternLiteral)) {
return memberType;
}
} else {
if (isAssignable(constMatchPatternExprType, matchExprType)) {
return constMatchPatternExprType;
}
}
}
} else if (matchExprType.tag == TypeTags.FINITE) {
if (isAssignableToFiniteType(matchExprType, constPatternLiteral)) {
return matchExprType;
}
}
return symTable.noType;
}
BType resolvePatternTypeFromMatchExpr(BLangMappingMatchPattern mappingMatchPattern, BType patternType,
SymbolEnv env) {
if (mappingMatchPattern.matchExpr == null) {
return patternType;
}
BType intersectionType = getTypeIntersection(mappingMatchPattern.matchExpr.type, patternType, env);
if (intersectionType == symTable.semanticError) {
return symTable.noType;
}
return intersectionType;
}
private boolean containsAnyType(BType type) {
if (type.tag != TypeTags.UNION) {
return type.tag == TypeTags.ANY;
}
for (BType memberTypes : ((BUnionType) type).getMemberTypes()) {
if (memberTypes.tag == TypeTags.ANY) {
return true;
}
}
return false;
}
private boolean containsAnyDataType(BType type) {
if (type.tag != TypeTags.UNION) {
return type.tag == TypeTags.ANYDATA;
}
for (BType memberTypes : ((BUnionType) type).getMemberTypes()) {
if (memberTypes.tag == TypeTags.ANYDATA) {
return true;
}
}
return false;
}
BType mergeTypes(BType typeFirst, BType typeSecond) {
if (containsAnyType(typeFirst) && !containsErrorType(typeSecond)) {
return typeSecond;
}
if (containsAnyType(typeSecond) && !containsErrorType(typeFirst)) {
return typeFirst;
}
if (containsAnyDataType(typeFirst) && !containsErrorType(typeSecond)) {
return typeSecond;
}
if (containsAnyDataType(typeSecond) && !containsErrorType(typeFirst)) {
return typeFirst;
}
if (isSameBasicType(typeFirst, typeSecond)) {
return typeFirst;
}
return BUnionType.create(null, typeFirst, typeSecond);
}
public boolean isSubTypeOfMapping(BType type) {
if (type.tag != TypeTags.UNION) {
return isSubTypeOfBaseType(type, TypeTags.MAP) || isSubTypeOfBaseType(type, TypeTags.RECORD);
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(this::isSubTypeOfMapping);
}
public boolean isSubTypeOfBaseType(BType type, int baseTypeTag) {
if (type.tag != TypeTags.UNION) {
return type.tag == baseTypeTag;
}
if (TypeTags.isXMLTypeTag(baseTypeTag)) {
return true;
}
return ((BUnionType) type).getMemberTypes().stream().allMatch(memType -> memType.tag == baseTypeTag);
}
/**
* Checks whether source type is assignable to the target type.
* <p>
* Source type is assignable to the target type if,
* 1) the target type is any and the source type is not a value type.
* 2) there exists an implicit cast symbol from source to target.
* 3) both types are JSON and the target constraint is no type.
* 4) both types are array type and both array types are assignable.
* 5) both types are MAP and the target constraint is any type or constraints are structurally equivalent.
*
* @param source type.
* @param target type.
* @return true if source type is assignable to the target type.
*/
public boolean isAssignable(BType source, BType target) {
return isAssignable(source, target, new HashSet<>());
}
boolean isStampingAllowed(BType source, BType target) {
return (isAssignable(source, target) || isAssignable(target, source) ||
checkTypeEquivalencyForStamping(source, target) || checkTypeEquivalencyForStamping(target, source));
}
private boolean checkTypeEquivalencyForStamping(BType source, BType target) {
if (target.tag == TypeTags.RECORD) {
if (source.tag == TypeTags.RECORD) {
TypePair pair = new TypePair(source, target);
Set<TypePair> unresolvedTypes = new HashSet<>();
unresolvedTypes.add(pair);
return checkRecordEquivalencyForStamping((BRecordType) source, (BRecordType) target, unresolvedTypes);
} else if (source.tag == TypeTags.MAP) {
int mapConstraintTypeTag = ((BMapType) source).constraint.tag;
if ((!(mapConstraintTypeTag == TypeTags.ANY || mapConstraintTypeTag == TypeTags.ANYDATA)) &&
((BRecordType) target).sealed) {
for (BField field : ((BStructureType) target).getFields().values()) {
if (field.getType().tag != mapConstraintTypeTag) {
return false;
}
}
}
return true;
}
} else if (target.tag == TypeTags.JSON) {
return source.tag == TypeTags.JSON || source.tag == TypeTags.RECORD || source.tag == TypeTags.MAP;
} else if (target.tag == TypeTags.MAP) {
if (source.tag == TypeTags.MAP) {
return isStampingAllowed(((BMapType) source).getConstraint(), ((BMapType) target).getConstraint());
} else if (source.tag == TypeTags.UNION) {
return checkUnionEquivalencyForStamping(source, target);
}
} else if (target.tag == TypeTags.ARRAY) {
if (source.tag == TypeTags.JSON) {
return true;
} else if (source.tag == TypeTags.TUPLE) {
BType arrayElementType = ((BArrayType) target).eType;
for (BType tupleMemberType : ((BTupleType) source).getTupleTypes()) {
if (!isStampingAllowed(tupleMemberType, arrayElementType)) {
return false;
}
}
return true;
} else if (source.tag == TypeTags.ARRAY) {
return checkTypeEquivalencyForStamping(((BArrayType) source).eType, ((BArrayType) target).eType);
}
} else if (target.tag == TypeTags.UNION) {
return checkUnionEquivalencyForStamping(source, target);
} else if (target.tag == TypeTags.TUPLE && source.tag == TypeTags.TUPLE) {
return checkTupleEquivalencyForStamping(source, target);
}
return false;
}
private boolean checkRecordEquivalencyForStamping(BRecordType rhsType, BRecordType lhsType,
Set<TypePair> unresolvedTypes) {
if (Symbols.isFlagOn(lhsType.tsymbol.flags ^ rhsType.tsymbol.flags, Flags.PUBLIC)) {
return false;
}
if (Symbols.isPrivate(lhsType.tsymbol) && rhsType.tsymbol.pkgID != lhsType.tsymbol.pkgID) {
return false;
}
if (lhsType.fields.size() > rhsType.fields.size()) {
return false;
}
if (lhsType.sealed && !rhsType.sealed) {
return false;
}
return checkFieldEquivalencyForStamping(lhsType, rhsType, unresolvedTypes);
}
private boolean checkFieldEquivalencyForStamping(BStructureType lhsType, BStructureType rhsType,
Set<TypePair> unresolvedTypes) {
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsType.fields.get(lhsField.name.value);
if (rhsField == null || !isStampingAllowed(rhsField.type, lhsField.type)) {
return false;
}
}
for (BField rhsField : rhsType.fields.values()) {
BField lhsField = lhsType.fields.get(rhsField.name.value);
if (lhsField == null && !isStampingAllowed(rhsField.type, ((BRecordType) lhsType).restFieldType)) {
return false;
}
}
return true;
}
private boolean checkUnionEquivalencyForStamping(BType source, BType target) {
Set<BType> sourceTypes = new LinkedHashSet<>();
Set<BType> targetTypes = new LinkedHashSet<>();
if (source.tag == TypeTags.UNION) {
BUnionType sourceUnionType = (BUnionType) source;
sourceTypes.addAll(sourceUnionType.getMemberTypes());
} else {
sourceTypes.add(source);
}
if (target.tag == TypeTags.UNION) {
BUnionType targetUnionType = (BUnionType) target;
targetTypes.addAll(targetUnionType.getMemberTypes());
} else {
targetTypes.add(target);
}
boolean notAssignable = sourceTypes
.stream()
.map(s -> targetTypes
.stream()
.anyMatch(t -> isStampingAllowed(s, t)))
.anyMatch(assignable -> !assignable);
return !notAssignable;
}
private boolean checkTupleEquivalencyForStamping(BType source, BType target) {
if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {
return false;
}
BTupleType lhsTupleType = (BTupleType) target;
BTupleType rhsTupleType = (BTupleType) source;
if (lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {
return false;
}
for (int i = 0; i < lhsTupleType.tupleTypes.size(); i++) {
if (!isStampingAllowed(rhsTupleType.tupleTypes.get(i), lhsTupleType.tupleTypes.get(i))) {
return false;
}
}
return true;
}
private boolean isAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (isSameType(source, target)) {
return true;
}
int sourceTag = source.tag;
int targetTag = target.tag;
if (!Symbols.isFlagOn(source.flags, Flags.PARAMETERIZED) &&
!isInherentlyImmutableType(target) && Symbols.isFlagOn(target.flags, Flags.READONLY) &&
!isInherentlyImmutableType(source) && !Symbols.isFlagOn(source.flags, Flags.READONLY)) {
return false;
}
if (sourceTag == TypeTags.INTERSECTION) {
return isAssignable(((BIntersectionType) source).effectiveType,
targetTag != TypeTags.INTERSECTION ? target :
((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (targetTag == TypeTags.INTERSECTION) {
return isAssignable(source, ((BIntersectionType) target).effectiveType, unresolvedTypes);
}
if (sourceTag == TypeTags.PARAMETERIZED_TYPE) {
return isParameterizedTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.BYTE && targetTag == TypeTags.INT) {
return true;
}
if (TypeTags.isXMLTypeTag(sourceTag) && (TypeTags.isXMLTypeTag(targetTag) || targetTag == TypeTags.STRING)) {
return isXMLTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.STRING) {
return true;
}
if (sourceTag == TypeTags.CHAR_STRING && targetTag == TypeTags.XML_TEXT) {
return true;
}
if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.CHAR_STRING) {
return true;
}
if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ERROR) {
return isErrorTypeAssignable((BErrorType) source, (BErrorType) target, unresolvedTypes);
} else if (sourceTag == TypeTags.ERROR && targetTag == TypeTags.ANY) {
return false;
}
if (sourceTag == TypeTags.NIL && (isNullable(target) || targetTag == TypeTags.JSON)) {
return true;
}
if (targetTag == TypeTags.ANY && !containsErrorType(source) && !isValueType(source)) {
return true;
}
if (targetTag == TypeTags.ANYDATA && !containsErrorType(source)) {
if (isAnydata(source)) {
return true;
}
}
if (targetTag == TypeTags.READONLY &&
(isInherentlyImmutableType(source) || Symbols.isFlagOn(source.flags, Flags.READONLY))) {
return true;
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.RECORD) {
BRecordType recordType = (BRecordType) source;
return isAssignableRecordType(recordType, target, unresolvedTypes);
}
if (targetTag == TypeTags.RECORD && sourceTag == TypeTags.MAP) {
return isAssignableMapType((BMapType) source, (BRecordType) target);
}
if (targetTag == TypeTags.TYPEDESC && sourceTag == TypeTags.TYPEDESC) {
return isAssignable(((BTypedescType) source).constraint, (((BTypedescType) target).constraint),
unresolvedTypes);
}
if (targetTag == TypeTags.TABLE && sourceTag == TypeTags.TABLE) {
return isAssignableTableType((BTableType) source, (BTableType) target, unresolvedTypes);
}
if (targetTag == TypeTags.STREAM && sourceTag == TypeTags.STREAM) {
return isAssignable(((BStreamType) source).constraint, ((BStreamType) target).constraint, unresolvedTypes);
}
if (isBuiltInTypeWidenPossible(source, target) == TypeTestResult.TRUE) {
return true;
}
if (sourceTag == TypeTags.FINITE) {
return isFiniteTypeAssignable((BFiniteType) source, target, unresolvedTypes);
}
if ((targetTag == TypeTags.UNION || sourceTag == TypeTags.UNION) &&
isAssignableToUnionType(source, target, unresolvedTypes)) {
return true;
}
if (targetTag == TypeTags.JSON) {
if (sourceTag == TypeTags.JSON) {
return true;
}
if (sourceTag == TypeTags.ARRAY) {
return isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.MAP) {
return isAssignable(((BMapType) source).constraint, target, unresolvedTypes);
}
if (sourceTag == TypeTags.RECORD) {
return isAssignableRecordType((BRecordType) source, target, unresolvedTypes);
}
}
if (targetTag == TypeTags.FUTURE && sourceTag == TypeTags.FUTURE) {
if (((BFutureType) target).constraint.tag == TypeTags.NONE) {
return true;
}
return isAssignable(((BFutureType) source).constraint, ((BFutureType) target).constraint, unresolvedTypes);
}
if (targetTag == TypeTags.MAP && sourceTag == TypeTags.MAP) {
if (((BMapType) target).constraint.tag == TypeTags.ANY &&
((BMapType) source).constraint.tag != TypeTags.UNION) {
return true;
}
return isAssignable(((BMapType) source).constraint, ((BMapType) target).constraint, unresolvedTypes);
}
if ((sourceTag == TypeTags.OBJECT || sourceTag == TypeTags.RECORD)
&& (targetTag == TypeTags.OBJECT || targetTag == TypeTags.RECORD)) {
return checkStructEquivalency(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE && targetTag == TypeTags.ARRAY) {
return isTupleTypeAssignableToArrayType((BTupleType) source, (BArrayType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.ARRAY && targetTag == TypeTags.TUPLE) {
return isArrayTypeAssignableToTupleType((BArrayType) source, (BTupleType) target, unresolvedTypes);
}
if (sourceTag == TypeTags.TUPLE || targetTag == TypeTags.TUPLE) {
return isTupleTypeAssignable(source, target, unresolvedTypes);
}
if (sourceTag == TypeTags.INVOKABLE && targetTag == TypeTags.INVOKABLE) {
return isFunctionTypeAssignable((BInvokableType) source, (BInvokableType) target, new HashSet<>());
}
return sourceTag == TypeTags.ARRAY && targetTag == TypeTags.ARRAY &&
isArrayTypesAssignable((BArrayType) source, target, unresolvedTypes);
}
private boolean isParameterizedTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
BType resolvedSourceType = typeBuilder.build(source);
if (target.tag != TypeTags.PARAMETERIZED_TYPE) {
return isAssignable(resolvedSourceType, target, unresolvedTypes);
}
if (((BParameterizedType) source).paramIndex != ((BParameterizedType) target).paramIndex) {
return false;
}
return isAssignable(resolvedSourceType, typeBuilder.build(target), unresolvedTypes);
}
private boolean isAssignableRecordType(BRecordType recordType, BType type, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(recordType, type);
if (!unresolvedTypes.add(pair)) {
return true;
}
BType targetType;
switch (type.tag) {
case TypeTags.MAP:
targetType = ((BMapType) type).constraint;
break;
case TypeTags.JSON:
targetType = type;
break;
default:
throw new IllegalArgumentException("Incompatible target type: " + type.toString());
}
return recordFieldsAssignableToType(recordType, targetType, unresolvedTypes);
}
private boolean recordFieldsAssignableToType(BRecordType recordType, BType targetType,
Set<TypePair> unresolvedTypes) {
for (BField field : recordType.fields.values()) {
if (!isAssignable(field.type, targetType, unresolvedTypes)) {
return false;
}
}
if (!recordType.sealed) {
return isAssignable(recordType.restFieldType, targetType, unresolvedTypes);
}
return true;
}
private boolean isAssignableTableType(BTableType sourceTableType, BTableType targetTableType,
Set<TypePair> unresolvedTypes) {
if (!isAssignable(sourceTableType.constraint, targetTableType.constraint, unresolvedTypes)) {
return false;
}
if (targetTableType.keyTypeConstraint == null && targetTableType.fieldNameList == null) {
return true;
}
if (targetTableType.keyTypeConstraint != null) {
if (sourceTableType.keyTypeConstraint != null &&
(isAssignable(sourceTableType.keyTypeConstraint, targetTableType.keyTypeConstraint,
unresolvedTypes))) {
return true;
}
if (sourceTableType.fieldNameList == null) {
return false;
}
List<BType> fieldTypes = new ArrayList<>();
sourceTableType.fieldNameList.forEach(field -> fieldTypes
.add(getTableConstraintField(sourceTableType.constraint, field).type));
if (fieldTypes.size() == 1) {
return isAssignable(fieldTypes.get(0), targetTableType.keyTypeConstraint, unresolvedTypes);
}
BTupleType tupleType = new BTupleType(fieldTypes);
return isAssignable(tupleType, targetTableType.keyTypeConstraint, unresolvedTypes);
}
return targetTableType.fieldNameList.equals(sourceTableType.fieldNameList);
}
BField getTableConstraintField(BType constraintType, String fieldName) {
switch (constraintType.tag) {
case TypeTags.RECORD:
Map<String, BField> fieldList = ((BRecordType) constraintType).getFields();
return fieldList.get(fieldName);
case TypeTags.UNION:
BUnionType unionType = (BUnionType) constraintType;
Set<BType> memTypes = unionType.getMemberTypes();
List<BField> fields = memTypes.stream().map(type -> getTableConstraintField(type, fieldName))
.filter(Objects::nonNull).collect(Collectors.toList());
if (fields.size() != memTypes.size()) {
return null;
}
if (fields.stream().allMatch(field -> isAssignable(field.type, fields.get(0).type) &&
isAssignable(fields.get(0).type, field.type))) {
return fields.get(0);
}
break;
case TypeTags.INTERSECTION:
return getTableConstraintField(((BIntersectionType) constraintType).effectiveType, fieldName);
}
return null;
}
private boolean isAssignableMapType(BMapType sourceMapType, BRecordType targetRecType) {
if (targetRecType.sealed) {
return false;
}
for (BField field : targetRecType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
return false;
}
if (hasIncompatibleReadOnlyFlags(field.symbol.flags, sourceMapType.flags)) {
return false;
}
if (!isAssignable(sourceMapType.constraint, field.type)) {
return false;
}
}
return isAssignable(sourceMapType.constraint, targetRecType.restFieldType);
}
private boolean hasIncompatibleReadOnlyFlags(long targetFlags, long sourceFlags) {
return Symbols.isFlagOn(targetFlags, Flags.READONLY) && !Symbols.isFlagOn(sourceFlags, Flags.READONLY);
}
private boolean isErrorTypeAssignable(BErrorType source, BErrorType target, Set<TypePair> unresolvedTypes) {
if (target == symTable.errorType) {
return true;
}
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
return isAssignable(source.detailType, target.detailType, unresolvedTypes)
&& target.typeIdSet.isAssignableFrom(source.typeIdSet);
}
private boolean isXMLTypeAssignable(BType sourceType, BType targetType, Set<TypePair> unresolvedTypes) {
int sourceTag = sourceType.tag;
int targetTag = targetType.tag;
if (targetTag == TypeTags.XML) {
BXMLType target = (BXMLType) targetType;
if (target.constraint != null) {
if (TypeTags.isXMLNonSequenceType(sourceTag)) {
return isAssignable(sourceType, target.constraint, unresolvedTypes);
}
BXMLType source = (BXMLType) sourceType;
if (source.constraint.tag == TypeTags.NEVER) {
if (sourceTag == targetTag) {
return true;
}
return isAssignable(source, target.constraint, unresolvedTypes);
}
return isAssignable(source.constraint, target.constraint, unresolvedTypes);
}
return true;
}
if (sourceTag == TypeTags.XML) {
BXMLType source = (BXMLType) sourceType;
if (targetTag == TypeTags.XML_TEXT) {
if (source.constraint != null) {
return source.constraint.tag == TypeTags.NEVER;
}
return false;
}
if (targetTag == TypeTags.STRING) {
if (source.constraint.tag == TypeTags.NEVER) {
return true;
}
return isAssignable(source.constraint, targetType, unresolvedTypes);
}
} else if (sourceTag == TypeTags.XML_TEXT && targetTag == TypeTags.STRING) {
return true;
}
return sourceTag == targetTag;
}
public boolean isXMLExprCastableToString(BType source, BType target) {
if (target.tag == TypeTags.STRING && isXMLSourceCastableToString(source)) {
return true;
}
if (target.tag == TypeTags.UNION || target.tag == TypeTags.FINITE) {
return isAssignable(target, symTable.stringType) && isXMLSourceCastableToString(source);
}
return false;
}
public boolean isXMLSourceCastableToString(BType source) {
int exprTag = source.tag;
if (exprTag == TypeTags.XML_TEXT) {
return true;
}
if (exprTag == TypeTags.XML) {
BXMLType conversionExpressionType = (BXMLType) source;
while (conversionExpressionType.constraint.tag == TypeTags.XML) {
conversionExpressionType = (BXMLType) conversionExpressionType.constraint;
}
return conversionExpressionType.constraint.tag == TypeTags.NEVER ||
conversionExpressionType.constraint.tag == TypeTags.XML_TEXT;
}
if (exprTag == TypeTags.UNION) {
for (BType member : ((BUnionType) source).getMemberTypes()) {
if (!TypeTags.isXMLTypeTag(member.tag) && !(member.tag == TypeTags.STRING)) {
return false;
}
}
return isAssignable(source, symTable.stringType);
}
return false;
}
private boolean isTupleTypeAssignable(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (source.tag != TypeTags.TUPLE || target.tag != TypeTags.TUPLE) {
return false;
}
BTupleType lhsTupleType = (BTupleType) target;
BTupleType rhsTupleType = (BTupleType) source;
if (lhsTupleType.restType == null && rhsTupleType.restType != null) {
return false;
}
if (lhsTupleType.restType == null && lhsTupleType.tupleTypes.size() != rhsTupleType.tupleTypes.size()) {
return false;
}
if (lhsTupleType.restType != null && rhsTupleType.restType != null) {
if (!isAssignable(rhsTupleType.restType, lhsTupleType.restType, unresolvedTypes)) {
return false;
}
}
if (lhsTupleType.tupleTypes.size() > rhsTupleType.tupleTypes.size()) {
return false;
}
for (int i = 0; i < rhsTupleType.tupleTypes.size(); i++) {
BType lhsType = (lhsTupleType.tupleTypes.size() > i)
? lhsTupleType.tupleTypes.get(i) : lhsTupleType.restType;
if (!isAssignable(rhsTupleType.tupleTypes.get(i), lhsType, unresolvedTypes)) {
return false;
}
}
return true;
}
private boolean isTupleTypeAssignableToArrayType(BTupleType source, BArrayType target,
Set<TypePair> unresolvedTypes) {
if (target.state != BArrayState.OPEN
&& (source.restType != null || source.tupleTypes.size() != target.size)) {
return false;
}
List<BType> sourceTypes = new ArrayList<>(source.tupleTypes);
if (source.restType != null) {
sourceTypes.add(source.restType);
}
return sourceTypes.stream()
.allMatch(tupleElemType -> isAssignable(tupleElemType, target.eType, unresolvedTypes));
}
private boolean isArrayTypeAssignableToTupleType(BArrayType source, BTupleType target,
Set<TypePair> unresolvedTypes) {
if (!target.tupleTypes.isEmpty()) {
if (source.state == BArrayState.OPEN) {
return false;
}
if (target.restType != null && target.tupleTypes.size() > source.size) {
return false;
}
if (target.restType == null && target.tupleTypes.size() != source.size) {
return false;
}
}
List<BType> targetTypes = new ArrayList<>(target.tupleTypes);
if (target.restType != null) {
targetTypes.add(target.restType);
}
return targetTypes.stream()
.allMatch(tupleElemType -> isAssignable(source.eType, tupleElemType, unresolvedTypes));
}
private boolean isArrayTypesAssignable(BArrayType source, BType target, Set<TypePair> unresolvedTypes) {
BType sourceElementType = source.getElementType();
if (target.tag == TypeTags.ARRAY) {
BArrayType targetArrayType = (BArrayType) target;
BType targetElementType = targetArrayType.getElementType();
if (targetArrayType.state == BArrayState.OPEN) {
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
}
if (targetArrayType.size != source.size) {
return false;
}
return isAssignable(sourceElementType, targetElementType, unresolvedTypes);
} else if (target.tag == TypeTags.JSON) {
return isAssignable(sourceElementType, target, unresolvedTypes);
} else if (target.tag == TypeTags.ANYDATA) {
return isAssignable(sourceElementType, target, unresolvedTypes);
}
return false;
}
private boolean isFunctionTypeAssignable(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (containsTypeParams(target)) {
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
BType sourceParam = source.paramTypes.get(i);
BType targetParam = target.paramTypes.get(i);
boolean isTypeParam = TypeParamAnalyzer.isTypeParam(targetParam);
if (isTypeParam) {
if (!isAssignable(sourceParam, targetParam)) {
return false;
}
} else {
if (!isAssignable(targetParam, sourceParam)) {
return false;
}
}
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
return checkFunctionTypeEquality(source, target, unresolvedTypes, (s, t, ut) -> isAssignable(t, s, ut));
}
public boolean isInherentlyImmutableType(BType type) {
if (isValueType(type)) {
return true;
}
switch (type.tag) {
case TypeTags.XML_TEXT:
case TypeTags.FINITE:
case TypeTags.READONLY:
case TypeTags.NIL:
case TypeTags.ERROR:
case TypeTags.INVOKABLE:
case TypeTags.TYPEDESC:
case TypeTags.HANDLE:
return true;
case TypeTags.XML:
return ((BXMLType) type).constraint.tag == TypeTags.NEVER;
}
return false;
}
boolean isSelectivelyImmutableType(BType type) {
return isSelectivelyImmutableType(type, new HashSet<>(), false);
}
boolean isSelectivelyImmutableType(BType type, boolean forceCheck) {
return isSelectivelyImmutableType(type, new HashSet<>(), forceCheck);
}
public boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes) {
return isSelectivelyImmutableType(type, unresolvedTypes, false);
}
private boolean isSelectivelyImmutableType(BType type, Set<BType> unresolvedTypes, boolean forceCheck) {
return isSelectivelyImmutableType(type, false, unresolvedTypes, forceCheck);
}
private boolean isSelectivelyImmutableType(BType type, boolean disallowReadOnlyObjects, Set<BType> unresolvedTypes,
boolean forceCheck) {
if (isInherentlyImmutableType(type) || !(type instanceof SelectivelyImmutableReferenceType)) {
return false;
}
if (!unresolvedTypes.add(type)) {
return true;
}
if (!forceCheck && ((SelectivelyImmutableReferenceType) type).getImmutableType() != null) {
return true;
}
switch (type.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.XML_COMMENT:
case TypeTags.XML_ELEMENT:
case TypeTags.XML_PI:
return true;
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isInherentlyImmutableType(elementType) ||
isSelectivelyImmutableType(elementType, unresolvedTypes, forceCheck);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
for (BType tupMemType : tupleType.tupleTypes) {
if (!isInherentlyImmutableType(tupMemType) &&
!isSelectivelyImmutableType(tupMemType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType tupRestType = tupleType.restType;
if (tupRestType == null) {
return true;
}
return isInherentlyImmutableType(tupRestType) ||
isSelectivelyImmutableType(tupRestType, unresolvedTypes, forceCheck);
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
for (BField field : recordType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
BType recordRestType = recordType.restFieldType;
if (recordRestType == null || recordRestType == symTable.noType) {
return true;
}
return isInherentlyImmutableType(recordRestType) ||
isSelectivelyImmutableType(recordRestType, unresolvedTypes, forceCheck);
case TypeTags.MAP:
BType constraintType = ((BMapType) type).constraint;
return isInherentlyImmutableType(constraintType) ||
isSelectivelyImmutableType(constraintType, unresolvedTypes, forceCheck);
case TypeTags.OBJECT:
BObjectType objectType = (BObjectType) type;
for (BField field : objectType.fields.values()) {
BType fieldType = field.type;
if (!isInherentlyImmutableType(fieldType) &&
!isSelectivelyImmutableType(fieldType, unresolvedTypes, forceCheck)) {
return false;
}
}
return true;
case TypeTags.TABLE:
BType tableConstraintType = ((BTableType) type).constraint;
return isInherentlyImmutableType(tableConstraintType) ||
isSelectivelyImmutableType(tableConstraintType, unresolvedTypes, forceCheck);
case TypeTags.UNION:
boolean readonlyIntersectionExists = false;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (isInherentlyImmutableType(memberType) ||
isSelectivelyImmutableType(memberType, unresolvedTypes, forceCheck)) {
readonlyIntersectionExists = true;
}
}
return readonlyIntersectionExists;
case TypeTags.INTERSECTION:
return isSelectivelyImmutableType(((BIntersectionType) type).effectiveType, unresolvedTypes,
forceCheck);
}
return false;
}
private boolean containsTypeParams(BInvokableType type) {
boolean hasParameterizedTypes = type.paramTypes.stream()
.anyMatch(t -> {
if (t.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) t);
}
return TypeParamAnalyzer.isTypeParam(t);
});
if (hasParameterizedTypes) {
return hasParameterizedTypes;
}
if (type.retType.tag == TypeTags.FUNCTION_POINTER) {
return containsTypeParams((BInvokableType) type.retType);
}
return TypeParamAnalyzer.isTypeParam(type.retType);
}
private boolean isSameFunctionType(BInvokableType source, BInvokableType target, Set<TypePair> unresolvedTypes) {
return checkFunctionTypeEquality(source, target, unresolvedTypes, this::isSameType);
}
private boolean checkFunctionTypeEquality(BInvokableType source, BInvokableType target,
Set<TypePair> unresolvedTypes, TypeEqualityPredicate equality) {
if (hasIncompatibleIsolatedFlags(source, target) || hasIncompatibleTransactionalFlags(source, target)) {
return false;
}
if (source.paramTypes.size() != target.paramTypes.size()) {
return false;
}
for (int i = 0; i < source.paramTypes.size(); i++) {
if (!equality.test(source.paramTypes.get(i), target.paramTypes.get(i), unresolvedTypes)) {
return false;
}
}
if ((source.restType != null && target.restType == null) ||
target.restType != null && source.restType == null) {
return false;
} else if (source.restType != null && !equality.test(source.restType, target.restType, unresolvedTypes)) {
return false;
}
if (source.retType == null && target.retType == null) {
return true;
} else if (source.retType == null || target.retType == null) {
return false;
}
return isAssignable(source.retType, target.retType, unresolvedTypes);
}
private boolean hasIncompatibleIsolatedFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(target.flags, Flags.ISOLATED) && !Symbols.isFlagOn(source.flags, Flags.ISOLATED);
}
private boolean hasIncompatibleTransactionalFlags(BInvokableType source, BInvokableType target) {
return Symbols.isFlagOn(source.flags, Flags.TRANSACTIONAL) &&
!Symbols.isFlagOn(target.flags, Flags.TRANSACTIONAL);
}
public boolean isSameArrayType(BType source, BType target, Set<TypePair> unresolvedTypes) {
if (target.tag != TypeTags.ARRAY || source.tag != TypeTags.ARRAY) {
return false;
}
BArrayType lhsArrayType = (BArrayType) target;
BArrayType rhsArrayType = (BArrayType) source;
boolean hasSameTypeElements = isSameType(lhsArrayType.eType, rhsArrayType.eType, unresolvedTypes);
if (lhsArrayType.state == BArrayState.OPEN) {
return (rhsArrayType.state == BArrayState.OPEN) && hasSameTypeElements;
}
return checkSealedArraySizeEquality(rhsArrayType, lhsArrayType) && hasSameTypeElements;
}
public boolean checkSealedArraySizeEquality(BArrayType rhsArrayType, BArrayType lhsArrayType) {
return lhsArrayType.size == rhsArrayType.size;
}
public boolean checkStructEquivalency(BType rhsType, BType lhsType) {
return checkStructEquivalency(rhsType, lhsType, new HashSet<>());
}
private boolean checkStructEquivalency(BType rhsType, BType lhsType, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(rhsType, lhsType);
if (unresolvedTypes.contains(pair)) {
return true;
}
unresolvedTypes.add(pair);
if (rhsType.tag == TypeTags.OBJECT && lhsType.tag == TypeTags.OBJECT) {
return checkObjectEquivalency((BObjectType) rhsType, (BObjectType) lhsType, unresolvedTypes);
}
if (rhsType.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {
return checkRecordEquivalency((BRecordType) rhsType, (BRecordType) lhsType, unresolvedTypes);
}
return false;
}
public boolean checkObjectEquivalency(BObjectType rhsType, BObjectType lhsType, Set<TypePair> unresolvedTypes) {
if (Symbols.isFlagOn(lhsType.flags, Flags.ISOLATED) && !Symbols.isFlagOn(rhsType.flags, Flags.ISOLATED)) {
return false;
}
BObjectTypeSymbol lhsStructSymbol = (BObjectTypeSymbol) lhsType.tsymbol;
BObjectTypeSymbol rhsStructSymbol = (BObjectTypeSymbol) rhsType.tsymbol;
List<BAttachedFunction> lhsFuncs = lhsStructSymbol.attachedFuncs;
List<BAttachedFunction> rhsFuncs = ((BObjectTypeSymbol) rhsType.tsymbol).attachedFuncs;
int lhsAttachedFuncCount = getObjectFuncCount(lhsStructSymbol);
int rhsAttachedFuncCount = getObjectFuncCount(rhsStructSymbol);
boolean isLhsAService = Symbols.isService(lhsStructSymbol);
if (isLhsAService && !Symbols.isService(rhsStructSymbol)) {
return false;
}
if (lhsType.fields.size() > rhsType.fields.size() || lhsAttachedFuncCount > rhsAttachedFuncCount) {
return false;
}
for (BField bField : lhsType.fields.values()) {
if (Symbols.isPrivate(bField.symbol)) {
return false;
}
}
for (BAttachedFunction func : lhsFuncs) {
if (Symbols.isPrivate(func.symbol)) {
return false;
}
}
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsType.fields.get(lhsField.name.value);
if (rhsField == null ||
!isInSameVisibilityRegion(lhsField.symbol, rhsField.symbol) ||
!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
}
for (BAttachedFunction lhsFunc : lhsFuncs) {
if (lhsFunc == lhsStructSymbol.initializerFunc) {
continue;
}
if (isLhsAService && Symbols.isResource(lhsFunc.symbol)) {
continue;
}
BAttachedFunction rhsFunc = getMatchingInvokableType(rhsFuncs, lhsFunc, unresolvedTypes);
if (rhsFunc == null || !isInSameVisibilityRegion(lhsFunc.symbol, rhsFunc.symbol)) {
return false;
}
if (Symbols.isRemote(lhsFunc.symbol) != Symbols.isRemote(rhsFunc.symbol)) {
return false;
}
}
return lhsType.typeIdSet.isAssignableFrom(rhsType.typeIdSet);
}
private int getObjectFuncCount(BObjectTypeSymbol sym) {
if (sym.initializerFunc != null && sym.attachedFuncs.contains(sym.initializerFunc)) {
return sym.attachedFuncs.size() - 1;
}
return sym.attachedFuncs.size();
}
public boolean checkRecordEquivalency(BRecordType rhsType, BRecordType lhsType, Set<TypePair> unresolvedTypes) {
if (lhsType.sealed && !rhsType.sealed) {
return false;
}
if (!rhsType.sealed && !isAssignable(rhsType.restFieldType, lhsType.restFieldType, unresolvedTypes)) {
return false;
}
return checkFieldEquivalency(lhsType, rhsType, unresolvedTypes);
}
public void setForeachTypedBindingPatternType(BLangForeach foreachNode) {
BType collectionType = foreachNode.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
BType constraint = ((BXMLType) collectionType).constraint;
while (constraint.tag == TypeTags.XML) {
collectionType = constraint;
constraint = ((BXMLType) collectionType).constraint;
}
switch (constraint.tag) {
case TypeTags.XML_ELEMENT:
varType = symTable.xmlElementType;
break;
case TypeTags.XML_COMMENT:
varType = symTable.xmlCommentType;
break;
case TypeTags.XML_TEXT:
varType = symTable.xmlTextType;
break;
case TypeTags.XML_PI:
varType = symTable.xmlPIType;
break;
default:
Set<BType> collectionTypes = getEffectiveMemberTypes((BUnionType) constraint);
Set<BType> builtinXMLConstraintTypes = getEffectiveMemberTypes
((BUnionType) ((BXMLType) symTable.xmlType).constraint);
if (collectionTypes.size() == 4 && builtinXMLConstraintTypes.equals(collectionTypes)) {
varType = symTable.xmlType;
} else {
LinkedHashSet<BType> collectionTypesInSymTable = new LinkedHashSet<>();
for (BType subType : collectionTypes) {
switch (subType.tag) {
case TypeTags.XML_ELEMENT:
collectionTypesInSymTable.add(symTable.xmlElementType);
break;
case TypeTags.XML_COMMENT:
collectionTypesInSymTable.add(symTable.xmlCommentType);
break;
case TypeTags.XML_TEXT:
collectionTypesInSymTable.add(symTable.xmlTextType);
break;
case TypeTags.XML_PI:
collectionTypesInSymTable.add(symTable.xmlPIType);
break;
}
}
varType = BUnionType.create(null, collectionTypesInSymTable);
}
}
break;
case TypeTags.XML_TEXT:
varType = symTable.xmlTextType;
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
if (streamType.error != null) {
BType actualType = BUnionType.create(null, varType, streamType.error);
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
varType, actualType);
}
break;
case TypeTags.OBJECT:
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
foreachNode.resultType = getRecordType(nextMethodReturnType);
BType valueType = (foreachNode.resultType != null)
? ((BRecordType) foreachNode.resultType).fields.get("value").type : null;
BType errorType = getErrorType(nextMethodReturnType);
if (errorType != null) {
BType actualType = BUnionType.create(null, valueType, errorType);
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_TYPES,
valueType, actualType);
}
foreachNode.nillableResultType = nextMethodReturnType;
foreachNode.varType = valueType;
return;
}
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);
case TypeTags.SEMANTIC_ERROR:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
return;
default:
foreachNode.varType = symTable.semanticError;
foreachNode.resultType = symTable.semanticError;
foreachNode.nillableResultType = symTable.semanticError;
dlog.error(foreachNode.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
foreachNode.varType = varType;
foreachNode.resultType = getRecordType(nextMethodReturnType);
foreachNode.nillableResultType = nextMethodReturnType;
}
public void setInputClauseTypedBindingPatternType(BLangInputClause bLangInputClause) {
if (bLangInputClause.collection == null) {
return;
}
BType collectionType = bLangInputClause.collection.type;
BType varType;
switch (collectionType.tag) {
case TypeTags.STRING:
varType = symTable.stringType;
break;
case TypeTags.ARRAY:
BArrayType arrayType = (BArrayType) collectionType;
varType = arrayType.eType;
break;
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) collectionType;
LinkedHashSet<BType> tupleTypes = new LinkedHashSet<>(tupleType.tupleTypes);
if (tupleType.restType != null) {
tupleTypes.add(tupleType.restType);
}
varType = tupleTypes.size() == 1 ?
tupleTypes.iterator().next() : BUnionType.create(null, tupleTypes);
break;
case TypeTags.MAP:
BMapType bMapType = (BMapType) collectionType;
varType = bMapType.constraint;
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) collectionType;
varType = inferRecordFieldType(recordType);
break;
case TypeTags.XML:
varType = BUnionType.create(null, symTable.xmlType, symTable.stringType);
break;
case TypeTags.TABLE:
BTableType tableType = (BTableType) collectionType;
varType = tableType.constraint;
break;
case TypeTags.STREAM:
BStreamType streamType = (BStreamType) collectionType;
if (streamType.constraint.tag == TypeTags.NONE) {
varType = symTable.anydataType;
break;
}
varType = streamType.constraint;
break;
case TypeTags.OBJECT:
BUnionType nextMethodReturnType = getVarTypeFromIterableObject((BObjectType) collectionType);
if (nextMethodReturnType != null) {
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
bLangInputClause.varType = ((BRecordType) bLangInputClause.resultType).fields.get("value").type;
return;
}
dlog.error(bLangInputClause.collection.pos,
DiagnosticErrorCode.INCOMPATIBLE_ITERATOR_FUNCTION_SIGNATURE);
case TypeTags.SEMANTIC_ERROR:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
return;
default:
bLangInputClause.varType = symTable.semanticError;
bLangInputClause.resultType = symTable.semanticError;
bLangInputClause.nillableResultType = symTable.semanticError;
dlog.error(bLangInputClause.collection.pos, DiagnosticErrorCode.ITERABLE_NOT_SUPPORTED_COLLECTION,
collectionType);
return;
}
BInvokableSymbol iteratorSymbol = (BInvokableSymbol) symResolver.lookupLangLibMethod(collectionType,
names.fromString(BLangCompilerConstants.ITERABLE_COLLECTION_ITERATOR_FUNC));
BUnionType nextMethodReturnType =
(BUnionType) getResultTypeOfNextInvocation((BObjectType) iteratorSymbol.retType);
bLangInputClause.varType = varType;
bLangInputClause.resultType = getRecordType(nextMethodReturnType);
bLangInputClause.nillableResultType = nextMethodReturnType;
}
public BUnionType getVarTypeFromIterableObject(BObjectType collectionType) {
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) collectionType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.ITERABLE_OBJECT_ITERATOR_FUNC)) {
return getVarTypeFromIteratorFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromIteratorFunc(BAttachedFunction candidateIteratorFunc) {
if (!candidateIteratorFunc.type.paramTypes.isEmpty()) {
return null;
}
BType returnType = candidateIteratorFunc.type.retType;
return getVarTypeFromIteratorFuncReturnType(returnType);
}
public BUnionType getVarTypeFromIteratorFuncReturnType(BType returnType) {
BObjectTypeSymbol objectTypeSymbol;
if (returnType.tag != TypeTags.OBJECT) {
return null;
}
objectTypeSymbol = (BObjectTypeSymbol) returnType.tsymbol;
for (BAttachedFunction func : objectTypeSymbol.attachedFuncs) {
if (func.funcName.value.equals(BLangCompilerConstants.NEXT_FUNC)) {
return getVarTypeFromNextFunc(func);
}
}
return null;
}
private BUnionType getVarTypeFromNextFunc(BAttachedFunction nextFunc) {
BType returnType;
if (!nextFunc.type.paramTypes.isEmpty()) {
return null;
}
returnType = nextFunc.type.retType;
if (checkNextFuncReturnType(returnType)) {
return (BUnionType) returnType;
}
return null;
}
private boolean checkNextFuncReturnType(BType returnType) {
if (returnType.tag != TypeTags.UNION) {
return false;
}
List<BType> types = new ArrayList<>(((BUnionType) returnType).getMemberTypes());
boolean containsCompletionType = types.removeIf(type -> type.tag == TypeTags.NIL);
containsCompletionType = types.removeIf(type -> type.tag == TypeTags.ERROR) || containsCompletionType;
if (!containsCompletionType) {
return false;
}
if (types.size() != 1) {
return false;
}
if (types.get(0).tag != TypeTags.RECORD) {
return false;
}
BRecordType recordType = (BRecordType) types.get(0);
return checkRecordTypeInNextFuncReturnType(recordType);
}
private boolean checkRecordTypeInNextFuncReturnType(BRecordType recordType) {
if (!recordType.sealed) {
return false;
}
if (recordType.fields.size() != 1) {
return false;
}
return recordType.fields.containsKey(BLangCompilerConstants.VALUE_FIELD);
}
private BRecordType getRecordType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.RECORD) {
return (BRecordType) member;
}
}
return null;
}
public BErrorType getErrorType(BUnionType type) {
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.ERROR) {
return (BErrorType) member;
} else if (member.tag == TypeTags.UNION) {
BErrorType e = getErrorType((BUnionType) member);
if (e != null) {
return e;
}
}
}
return null;
}
public BType getResultTypeOfNextInvocation(BObjectType iteratorType) {
BAttachedFunction nextFunc = getAttachedFuncFromObject(iteratorType, BLangCompilerConstants.NEXT_FUNC);
return Objects.requireNonNull(nextFunc).type.retType;
}
public BAttachedFunction getAttachedFuncFromObject(BObjectType objectType, String funcName) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) objectType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (funcName.equals(bAttachedFunction.funcName.value)) {
return bAttachedFunction;
}
}
return null;
}
public BType inferRecordFieldType(BRecordType recordType) {
Map<String, BField> fields = recordType.fields;
BUnionType unionType = BUnionType.create(null);
if (!recordType.sealed) {
unionType.add(recordType.restFieldType);
}
for (BField field : fields.values()) {
if (isAssignable(field.type, unionType)) {
continue;
}
if (isAssignable(unionType, field.type)) {
unionType = BUnionType.create(null);
}
unionType.add(field.type);
}
if (unionType.getMemberTypes().size() > 1) {
unionType.tsymbol = Symbols.createTypeSymbol(SymTag.UNION_TYPE, Flags.asMask(EnumSet.of(Flag.PUBLIC)),
Names.EMPTY, recordType.tsymbol.pkgID, null,
recordType.tsymbol.owner, symTable.builtinPos, VIRTUAL);
return unionType;
}
return unionType.getMemberTypes().iterator().next();
}
/**
* Enum to represent type test result.
*
* @since 1.2.0
*/
enum TypeTestResult {
NOT_FOUND,
TRUE,
FALSE
}
TypeTestResult isBuiltInTypeWidenPossible(BType actualType, BType targetType) {
int targetTag = targetType.tag;
int actualTag = actualType.tag;
if (actualTag < TypeTags.JSON && targetTag < TypeTags.JSON) {
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
if (targetTag == TypeTags.BOOLEAN || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.BOOLEAN:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.STRING) {
return TypeTestResult.FALSE;
}
break;
case TypeTags.STRING:
if (targetTag == TypeTags.INT || targetTag == TypeTags.BYTE || targetTag == TypeTags.FLOAT
|| targetTag == TypeTags.DECIMAL || targetTag == TypeTags.BOOLEAN) {
return TypeTestResult.FALSE;
}
break;
}
}
switch (actualTag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.STRING:
case TypeTags.SIGNED32_INT:
case TypeTags.SIGNED16_INT:
case TypeTags.SIGNED8_INT:
case TypeTags.UNSIGNED32_INT:
case TypeTags.UNSIGNED16_INT:
case TypeTags.UNSIGNED8_INT:
case TypeTags.CHAR_STRING:
if (targetTag == TypeTags.JSON || targetTag == TypeTags.ANYDATA || targetTag == TypeTags.ANY ||
targetTag == TypeTags.READONLY) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.ANYDATA:
case TypeTags.TYPEDESC:
if (targetTag == TypeTags.ANY) {
return TypeTestResult.TRUE;
}
break;
default:
}
if (TypeTags.isIntegerTypeTag(targetTag) && actualTag == targetTag) {
return TypeTestResult.FALSE;
}
if ((TypeTags.isIntegerTypeTag(actualTag) || actualTag == TypeTags.BYTE)
&& (TypeTags.isIntegerTypeTag(targetTag) || targetTag == TypeTags.BYTE)) {
return checkBuiltInIntSubtypeWidenPossible(actualType, targetType);
}
if (actualTag == TypeTags.CHAR_STRING && TypeTags.STRING == targetTag) {
return TypeTestResult.TRUE;
}
return TypeTestResult.NOT_FOUND;
}
private TypeTestResult checkBuiltInIntSubtypeWidenPossible(BType actualType, BType targetType) {
int actualTag = actualType.tag;
switch (targetType.tag) {
case TypeTags.INT:
if (actualTag == TypeTags.BYTE || TypeTags.isIntegerTypeTag(actualTag)) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED32_INT:
if (actualTag == TypeTags.SIGNED16_INT || actualTag == TypeTags.SIGNED8_INT ||
actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.SIGNED16_INT:
if (actualTag == TypeTags.SIGNED8_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED32_INT:
if (actualTag == TypeTags.UNSIGNED16_INT || actualTag == TypeTags.UNSIGNED8_INT ||
actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED16_INT:
if (actualTag == TypeTags.UNSIGNED8_INT || actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.BYTE:
if (actualTag == TypeTags.UNSIGNED8_INT) {
return TypeTestResult.TRUE;
}
break;
case TypeTags.UNSIGNED8_INT:
if (actualTag == TypeTags.BYTE) {
return TypeTestResult.TRUE;
}
break;
}
return TypeTestResult.NOT_FOUND;
}
public boolean isImplicityCastable(BType actualType, BType targetType) {
/* The word Builtin refers for Compiler known types. */
BType newTargetType = targetType;
if ((targetType.tag == TypeTags.UNION || targetType.tag == TypeTags.FINITE) && isValueType(actualType)) {
newTargetType = symTable.anyType;
} else if (targetType.tag == TypeTags.INTERSECTION) {
newTargetType = ((BIntersectionType) targetType).effectiveType;
}
TypeTestResult result = isBuiltInTypeWidenPossible(actualType, newTargetType);
if (result != TypeTestResult.NOT_FOUND) {
return result == TypeTestResult.TRUE;
}
if (isValueType(targetType) &&
(actualType.tag == TypeTags.FINITE ||
(actualType.tag == TypeTags.UNION && ((BUnionType) actualType).getMemberTypes().stream()
.anyMatch(type -> type.tag == TypeTags.FINITE && isAssignable(type, targetType))))) {
return targetType.tag == TypeTags.INT || targetType.tag == TypeTags.BYTE || targetType.tag == TypeTags.FLOAT
|| targetType.tag == TypeTags.STRING || targetType.tag == TypeTags.BOOLEAN;
} else if (targetType.tag == TypeTags.ERROR
&& (actualType.tag == TypeTags.UNION
&& isAllErrorMembers((BUnionType) actualType))) {
return true;
} else if (targetType.tag == TypeTags.STRING) {
if (actualType.tag == TypeTags.XML) {
return isXMLTypeAssignable(actualType, targetType, new HashSet<>());
}
if (actualType.tag == TypeTags.UNION) {
return isAssignable(actualType, symTable.stringType);
}
return actualType.tag == TypeTags.XML_TEXT;
}
return false;
}
public boolean isTypeCastable(BLangExpression expr, BType sourceType, BType targetType, SymbolEnv env) {
if (getTypeIntersection(sourceType, symTable.errorType, env) != symTable.semanticError
&& getTypeIntersection(targetType, symTable.errorType, env) == symTable.semanticError) {
return false;
}
if (sourceType.tag == TypeTags.SEMANTIC_ERROR || targetType.tag == TypeTags.SEMANTIC_ERROR ||
sourceType == targetType) {
return true;
}
if (isAssignable(sourceType, targetType) || isAssignable(targetType, sourceType)) {
return true;
}
if (isNumericConversionPossible(expr, sourceType, targetType)) {
return true;
}
boolean validTypeCast = false;
if (sourceType.tag == TypeTags.UNION) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) sourceType, targetType, null)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType.tag == TypeTags.UNION) {
if (getTypeForUnionTypeMembersAssignableToType((BUnionType) targetType, sourceType, null)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (sourceType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) sourceType, targetType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (targetType.tag == TypeTags.FINITE) {
if (getTypeForFiniteTypeValuesAssignableToType((BFiniteType) targetType, sourceType)
!= symTable.semanticError) {
validTypeCast = true;
}
}
if (validTypeCast) {
if (isValueType(sourceType)) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
}
return true;
}
return false;
}
boolean isNumericConversionPossible(BLangExpression expr, BType sourceType,
BType targetType) {
final boolean isSourceNumericType = isBasicNumericType(sourceType);
final boolean isTargetNumericType = isBasicNumericType(targetType);
if (isSourceNumericType && isTargetNumericType) {
return true;
}
if (targetType.tag == TypeTags.UNION) {
HashSet<Integer> typeTags = new HashSet<>();
for (BType bType : ((BUnionType) targetType).getMemberTypes()) {
if (isBasicNumericType(bType)) {
typeTags.add(bType.tag);
if (typeTags.size() > 1) {
return false;
}
}
}
}
if (!isTargetNumericType && targetType.tag != TypeTags.UNION) {
return false;
}
if (isSourceNumericType) {
setImplicitCastExpr(expr, sourceType, symTable.anyType);
return true;
}
switch (sourceType.tag) {
case TypeTags.ANY:
case TypeTags.ANYDATA:
case TypeTags.JSON:
return true;
case TypeTags.UNION:
for (BType memType : ((BUnionType) sourceType).getMemberTypes()) {
if (isBasicNumericType(memType) ||
(memType.tag == TypeTags.FINITE &&
finiteTypeContainsNumericTypeValues((BFiniteType) memType))) {
return true;
}
}
break;
case TypeTags.FINITE:
if (finiteTypeContainsNumericTypeValues((BFiniteType) sourceType)) {
return true;
}
break;
}
return false;
}
private boolean isAllErrorMembers(BUnionType actualType) {
return actualType.getMemberTypes().stream().allMatch(t -> isAssignable(t, symTable.errorType));
}
public void setImplicitCastExpr(BLangExpression expr, BType actualType, BType expType) {
if (!isImplicityCastable(actualType, expType)) {
return;
}
BLangTypeConversionExpr implicitConversionExpr =
(BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
implicitConversionExpr.pos = expr.pos;
implicitConversionExpr.expr = expr.impConversionExpr == null ? expr : expr.impConversionExpr;
implicitConversionExpr.type = expType;
implicitConversionExpr.targetType = expType;
implicitConversionExpr.internal = true;
expr.impConversionExpr = implicitConversionExpr;
}
public BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
public boolean checkListenerCompatibilityAtServiceDecl(BType type) {
if (type.tag == TypeTags.UNION) {
int listenerCompatibleTypeCount = 0;
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (memberType.tag != TypeTags.ERROR) {
if (!checkListenerCompatibility(memberType)) {
return false;
}
listenerCompatibleTypeCount++;
}
}
return listenerCompatibleTypeCount > 0;
}
return checkListenerCompatibility(type);
}
public boolean checkListenerCompatibility(BType type) {
if (type.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) type;
for (BType memberType : unionType.getMemberTypes()) {
if (!checkListenerCompatibility(memberType)) {
return false;
}
}
return true;
}
if (type.tag != TypeTags.OBJECT) {
return false;
}
BObjectType rhsType = (BObjectType) type;
List<BAttachedFunction> rhsFuncs = ((BStructureTypeSymbol) rhsType.tsymbol).attachedFuncs;
ListenerValidationModel listenerValidationModel = new ListenerValidationModel(this, symTable);
return listenerValidationModel.checkMethods(rhsFuncs);
}
public boolean isValidErrorDetailType(BType detailType) {
switch (detailType.tag) {
case TypeTags.MAP:
return isAssignable(detailType, symTable.detailType);
case TypeTags.RECORD: {
if (isSealedRecord((BRecordType) detailType)) {
return false;
}
return isAssignable(detailType, symTable.detailType);
}
}
return false;
}
private boolean isSealedRecord(BType recordType) {
return recordType.getKind() == TypeKind.RECORD && ((BRecordType) recordType).sealed;
}
private boolean isNullable(BType fieldType) {
return fieldType.isNullable();
}
private class BSameTypeVisitor implements BTypeVisitor<BType, Boolean> {
Set<TypePair> unresolvedTypes;
BSameTypeVisitor(Set<TypePair> unresolvedTypes) {
this.unresolvedTypes = unresolvedTypes;
}
@Override
public Boolean visit(BType t, BType s) {
if (t == s) {
return true;
}
switch (t.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
return t.tag == s.tag
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
case TypeTags.ANY:
case TypeTags.ANYDATA:
return t.tag == s.tag && hasSameReadonlyFlag(s, t)
&& (TypeParamAnalyzer.isTypeParam(t) || TypeParamAnalyzer.isTypeParam(s));
default:
break;
}
return false;
}
@Override
public Boolean visit(BBuiltInRefType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnyType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BAnydataType t, BType s) {
if (t == s) {
return true;
}
return t.tag == s.tag;
}
@Override
public Boolean visit(BMapType t, BType s) {
if (s.tag != TypeTags.MAP || !hasSameReadonlyFlag(s, t)) {
return false;
}
BMapType sType = ((BMapType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFutureType t, BType s) {
return s.tag == TypeTags.FUTURE && t.constraint.tag == ((BFutureType) s).constraint.tag;
}
@Override
public Boolean visit(BXMLType t, BType s) {
return visit((BBuiltInRefType) t, s);
}
@Override
public Boolean visit(BJSONType t, BType s) {
return s.tag == TypeTags.JSON && hasSameReadonlyFlag(s, t);
}
@Override
public Boolean visit(BArrayType t, BType s) {
return s.tag == TypeTags.ARRAY && hasSameReadonlyFlag(s, t) && isSameArrayType(s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BObjectType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.OBJECT) {
return false;
}
return t.tsymbol.pkgID.equals(s.tsymbol.pkgID) && t.tsymbol.name.equals(s.tsymbol.name);
}
@Override
public Boolean visit(BRecordType t, BType s) {
if (t == s) {
return true;
}
if (s.tag != TypeTags.RECORD || !hasSameReadonlyFlag(s, t)) {
return false;
}
BRecordType source = (BRecordType) s;
if (source.fields.size() != t.fields.size()) {
return false;
}
for (BField sourceField : source.fields.values()) {
if (t.fields.containsKey(sourceField.name.value)) {
BField targetField = t.fields.get(sourceField.name.value);
if (isSameType(sourceField.type, targetField.type, this.unresolvedTypes) &&
hasSameOptionalFlag(sourceField.symbol, targetField.symbol) &&
(!Symbols.isFlagOn(targetField.symbol.flags, Flags.READONLY) ||
Symbols.isFlagOn(sourceField.symbol.flags, Flags.READONLY))) {
continue;
}
}
return false;
}
return isSameType(source.restFieldType, t.restFieldType, this.unresolvedTypes);
}
private boolean hasSameOptionalFlag(BVarSymbol s, BVarSymbol t) {
return ((s.flags & Flags.OPTIONAL) ^ (t.flags & Flags.OPTIONAL)) != Flags.OPTIONAL;
}
private boolean hasSameReadonlyFlag(BType source, BType target) {
return Symbols.isFlagOn(target.flags, Flags.READONLY) == Symbols.isFlagOn(source.flags, Flags.READONLY);
}
public Boolean visit(BTupleType t, BType s) {
if (s.tag != TypeTags.TUPLE || !hasSameReadonlyFlag(s, t)) {
return false;
}
BTupleType source = (BTupleType) s;
if (source.tupleTypes.size() != t.tupleTypes.size()) {
return false;
}
for (int i = 0; i < source.tupleTypes.size(); i++) {
if (t.getTupleTypes().get(i) == symTable.noType) {
continue;
}
if (!isSameType(source.getTupleTypes().get(i), t.tupleTypes.get(i), this.unresolvedTypes)) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BStreamType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BTableType t, BType s) {
return t == s;
}
@Override
public Boolean visit(BInvokableType t, BType s) {
return s.tag == TypeTags.INVOKABLE && isSameFunctionType((BInvokableType) s, t, this.unresolvedTypes);
}
@Override
public Boolean visit(BUnionType tUnionType, BType s) {
if (s.tag != TypeTags.UNION || !hasSameReadonlyFlag(s, tUnionType)) {
return false;
}
BUnionType sUnionType = (BUnionType) s;
if (sUnionType.getMemberTypes().size()
!= tUnionType.getMemberTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sUnionType.getMemberTypes().size());
Set<BType> targetTypes = new LinkedHashSet<>(tUnionType.getMemberTypes().size());
sourceTypes.add(sUnionType);
sourceTypes.addAll(sUnionType.getMemberTypes());
targetTypes.add(tUnionType);
targetTypes.addAll(tUnionType.getMemberTypes());
boolean notSameType = sourceTypes
.stream()
.map(sT -> targetTypes
.stream()
.anyMatch(it -> isSameType(it, sT, this.unresolvedTypes)))
.anyMatch(foundSameType -> !foundSameType);
return !notSameType;
}
@Override
public Boolean visit(BIntersectionType tIntersectionType, BType s) {
if (s.tag != TypeTags.INTERSECTION || !hasSameReadonlyFlag(s, tIntersectionType)) {
return false;
}
BIntersectionType sIntersectionType = (BIntersectionType) s;
if (sIntersectionType.getConstituentTypes().size() != tIntersectionType.getConstituentTypes().size()) {
return false;
}
Set<BType> sourceTypes = new LinkedHashSet<>(sIntersectionType.getConstituentTypes());
Set<BType> targetTypes = new LinkedHashSet<>(tIntersectionType.getConstituentTypes());
for (BType sourceType : sourceTypes) {
boolean foundSameType = false;
for (BType targetType : targetTypes) {
if (isSameType(sourceType, targetType, this.unresolvedTypes)) {
foundSameType = true;
break;
}
}
if (!foundSameType) {
return false;
}
}
return true;
}
@Override
public Boolean visit(BErrorType t, BType s) {
if (s.tag != TypeTags.ERROR) {
return false;
}
BErrorType source = (BErrorType) s;
if (!source.typeIdSet.equals(t.typeIdSet)) {
return false;
}
if (source.detailType == t.detailType) {
return true;
}
return isSameType(source.detailType, t.detailType, this.unresolvedTypes);
}
@Override
public Boolean visit(BTypedescType t, BType s) {
if (s.tag != TypeTags.TYPEDESC) {
return false;
}
BTypedescType sType = ((BTypedescType) s);
return isSameType(sType.constraint, t.constraint, this.unresolvedTypes);
}
@Override
public Boolean visit(BFiniteType t, BType s) {
return s == t;
}
@Override
public Boolean visit(BParameterizedType t, BType s) {
if (s.tag != TypeTags.PARAMETERIZED_TYPE) {
return false;
}
BParameterizedType sType = (BParameterizedType) s;
return isSameType(sType.paramValueType, t.paramValueType) && sType.paramSymbol.equals(t.paramSymbol);
}
};
private boolean checkFieldEquivalency(BRecordType lhsType, BRecordType rhsType, Set<TypePair> unresolvedTypes) {
Map<String, BField> rhsFields = new LinkedHashMap<>(rhsType.fields);
for (BField lhsField : lhsType.fields.values()) {
BField rhsField = rhsFields.get(lhsField.name.value);
if (rhsField == null) {
return false;
}
if (hasIncompatibleReadOnlyFlags(lhsField.symbol.flags, rhsField.symbol.flags)) {
return false;
}
if (!Symbols.isOptional(lhsField.symbol) && Symbols.isOptional(rhsField.symbol)) {
return false;
}
if (!isAssignable(rhsField.type, lhsField.type, unresolvedTypes)) {
return false;
}
rhsFields.remove(lhsField.name.value);
}
return rhsFields.entrySet().stream().allMatch(
fieldEntry -> isAssignable(fieldEntry.getValue().type, lhsType.restFieldType, unresolvedTypes));
}
private BAttachedFunction getMatchingInvokableType(List<BAttachedFunction> rhsFuncList, BAttachedFunction lhsFunc,
Set<TypePair> unresolvedTypes) {
return rhsFuncList.stream()
.filter(rhsFunc -> lhsFunc.funcName.equals(rhsFunc.funcName))
.filter(rhsFunc -> isFunctionTypeAssignable(rhsFunc.type, lhsFunc.type, unresolvedTypes))
.findFirst()
.orElse(null);
}
private boolean isInSameVisibilityRegion(BSymbol lhsSym, BSymbol rhsSym) {
if (Symbols.isPrivate(lhsSym)) {
return Symbols.isPrivate(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID)
&& lhsSym.owner.name.equals(rhsSym.owner.name);
} else if (Symbols.isPublic(lhsSym)) {
return Symbols.isPublic(rhsSym);
}
return !Symbols.isPrivate(rhsSym) && !Symbols.isPublic(rhsSym) && lhsSym.pkgID.equals(rhsSym.pkgID);
}
private boolean isAssignableToUnionType(BType source, BType target, Set<TypePair> unresolvedTypes) {
TypePair pair = new TypePair(source, target);
if (unresolvedTypes.contains(pair)) {
return true;
}
Set<BType> sourceTypes = new LinkedHashSet<>();
Set<BType> targetTypes = new LinkedHashSet<>();
if (source.tag == TypeTags.UNION || source.tag == TypeTags.JSON || source.tag == TypeTags.ANYDATA) {
sourceTypes.addAll(getEffectiveMemberTypes((BUnionType) source));
} else {
sourceTypes.add(source);
}
if (target.tag == TypeTags.UNION) {
targetTypes.addAll(getEffectiveMemberTypes((BUnionType) target));
} else {
targetTypes.add(target);
}
var sourceIterator = sourceTypes.iterator();
while (sourceIterator.hasNext()) {
BType s = sourceIterator.next();
if (s.tag == TypeTags.NEVER) {
sourceIterator.remove();
continue;
}
if (s.tag == TypeTags.FINITE && isAssignable(s, target, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
if (s.tag == TypeTags.XML && isAssignableToUnionType(expandedXMLBuiltinSubtypes, target, unresolvedTypes)) {
sourceIterator.remove();
continue;
}
if (!isValueType(s)) {
continue;
}
boolean sourceTypeIsNotAssignableToAnyTargetType = true;
var targetIterator = targetTypes.iterator();
while (targetIterator.hasNext()) {
BType t = targetIterator.next();
if (isAssignable(s, t, unresolvedTypes)) {
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
}
if (sourceTypeIsNotAssignableToAnyTargetType) {
return false;
}
}
sourceIterator = sourceTypes.iterator();
while (sourceIterator.hasNext()) {
BType s = sourceIterator.next();
boolean sourceTypeIsNotAssignableToAnyTargetType = true;
var targetIterator = targetTypes.iterator();
boolean selfReferencedSource = (s != source) && isSelfReferencedStructuredType(source, s);
while (targetIterator.hasNext()) {
BType t = targetIterator.next();
boolean selfReferencedTarget = isSelfReferencedStructuredType(target, t);
if (selfReferencedTarget) {
if (selfReferencedSource) {
if (s.tag == t.tag) {
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
}
}
if (isAssignable(s, t, unresolvedTypes)) {
sourceTypeIsNotAssignableToAnyTargetType = false;
break;
}
}
if (sourceTypeIsNotAssignableToAnyTargetType) {
return false;
}
}
unresolvedTypes.add(pair);
return true;
}
public boolean isSelfReferencedStructuredType(BType source, BType s) {
if (source == s) {
return true;
}
if (s.tag == TypeTags.ARRAY) {
return isSelfReferencedStructuredType(source, ((BArrayType) s).eType);
}
if (s.tag == TypeTags.MAP) {
return isSelfReferencedStructuredType(source, ((BMapType) s).constraint);
}
if (s.tag == TypeTags.TABLE) {
return isSelfReferencedStructuredType(source, ((BTableType) s).constraint);
}
return false;
}
public BType updateSelfReferencedWithNewType(BType source, BType s, BType target) {
if (s.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) s;
if (arrayType.eType == source) {
return new BArrayType(target, arrayType.tsymbol, arrayType.size,
arrayType.state, arrayType.flags);
}
}
if (s.tag == TypeTags.MAP) {
BMapType mapType = (BMapType) s;
if (mapType.constraint == source) {
return new BMapType(mapType.tag, target, mapType.tsymbol, mapType.flags);
}
}
if (s.tag == TypeTags.TABLE) {
BTableType tableType = (BTableType) s;
if (tableType.constraint == source) {
return new BTableType(tableType.tag, target, tableType.tsymbol,
tableType.flags);
} else if (tableType.constraint instanceof BMapType) {
return updateSelfReferencedWithNewType(source, (BMapType) tableType.constraint, target);
}
}
return s;
}
public static void fixSelfReferencingSameUnion(BType originalMemberType, BUnionType origUnionType,
BType immutableMemberType, BUnionType newImmutableUnion,
LinkedHashSet<BType> readOnlyMemTypes) {
boolean sameMember = originalMemberType == immutableMemberType;
if (originalMemberType.tag == TypeTags.ARRAY) {
var arrayType = (BArrayType) originalMemberType;
if (origUnionType == arrayType.eType) {
if (sameMember) {
BArrayType newArrayType = new BArrayType(newImmutableUnion, arrayType.tsymbol, arrayType.size,
arrayType.state, arrayType.flags);
readOnlyMemTypes.add(newArrayType);
} else {
((BArrayType) immutableMemberType).eType = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
}
} else if (originalMemberType.tag == TypeTags.MAP) {
var mapType = (BMapType) originalMemberType;
if (origUnionType == mapType.constraint) {
if (sameMember) {
BMapType newMapType = new BMapType(mapType.tag, newImmutableUnion, mapType.tsymbol, mapType.flags);
readOnlyMemTypes.add(newMapType);
} else {
((BMapType) immutableMemberType).constraint = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
}
} else if (originalMemberType.tag == TypeTags.TABLE) {
var tableType = (BTableType) originalMemberType;
if (origUnionType == tableType.constraint) {
if (sameMember) {
BTableType newTableType = new BTableType(tableType.tag, newImmutableUnion, tableType.tsymbol,
tableType.flags);
readOnlyMemTypes.add(newTableType);
} else {
((BTableType) immutableMemberType).constraint = newImmutableUnion;
readOnlyMemTypes.add(immutableMemberType);
}
return;
}
var immutableConstraint = ((BTableType) immutableMemberType).constraint;
if (tableType.constraint.tag == TypeTags.MAP) {
sameMember = tableType.constraint == immutableConstraint;
var mapType = (BMapType) tableType.constraint;
if (origUnionType == mapType.constraint) {
if (sameMember) {
BMapType newMapType = new BMapType(mapType.tag, newImmutableUnion, mapType.tsymbol,
mapType.flags);
((BTableType) immutableMemberType).constraint = newMapType;
} else {
((BTableType) immutableMemberType).constraint = newImmutableUnion;
}
readOnlyMemTypes.add(immutableMemberType);
}
}
} else {
readOnlyMemTypes.add(immutableMemberType);
}
}
private Set<BType> getEffectiveMemberTypes(BUnionType unionType) {
Set<BType> memTypes = new LinkedHashSet<>();
for (BType memberType : unionType.getMemberTypes()) {
if (memberType.tag == TypeTags.INTERSECTION) {
BType effectiveType = ((BIntersectionType) memberType).effectiveType;
if (effectiveType.tag == TypeTags.UNION) {
memTypes.addAll(getEffectiveMemberTypes((BUnionType) effectiveType));
continue;
}
memTypes.add(effectiveType);
continue;
}
memTypes.add(memberType);
}
return memTypes;
}
private boolean isFiniteTypeAssignable(BFiniteType finiteType, BType targetType, Set<TypePair> unresolvedTypes) {
if (targetType.tag == TypeTags.FINITE) {
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignableToFiniteType(targetType, (BLangLiteral) expression));
}
if (targetType.tag == TypeTags.UNION) {
List<BType> unionMemberTypes = getAllTypes(targetType);
return finiteType.getValueSpace().stream()
.allMatch(valueExpr -> unionMemberTypes.stream()
.anyMatch(targetMemType -> targetMemType.tag == TypeTags.FINITE ?
isAssignableToFiniteType(targetMemType, (BLangLiteral) valueExpr) :
isAssignable(valueExpr.type, targetType, unresolvedTypes)));
}
return finiteType.getValueSpace().stream()
.allMatch(expression -> isAssignable(expression.type, targetType, unresolvedTypes));
}
boolean isAssignableToFiniteType(BType type, BLangLiteral literalExpr) {
if (type.tag != TypeTags.FINITE) {
return false;
}
BFiniteType expType = (BFiniteType) type;
return expType.getValueSpace().stream().anyMatch(memberLiteral -> {
if (((BLangLiteral) memberLiteral).value == null) {
return literalExpr.value == null;
}
return checkLiteralAssignabilityBasedOnType((BLangLiteral) memberLiteral, literalExpr);
});
}
/**
* Method to check the literal assignability based on the types of the literals. For numeric literals the
* assignability depends on the equivalency of the literals. If the candidate literal could either be a simple
* literal or a constant. In case of a constant, it is assignable to the base literal if and only if both
* literals have same type and equivalent values.
*
* @param baseLiteral Literal based on which we check the assignability.
* @param candidateLiteral Literal to be tested whether it is assignable to the base literal or not.
* @return true if assignable; false otherwise.
*/
boolean checkLiteralAssignabilityBasedOnType(BLangLiteral baseLiteral, BLangLiteral candidateLiteral) {
if (baseLiteral.getKind() != candidateLiteral.getKind()) {
return false;
}
Object baseValue = baseLiteral.value;
Object candidateValue = candidateLiteral.value;
int candidateTypeTag = candidateLiteral.type.tag;
switch (baseLiteral.type.tag) {
case TypeTags.BYTE:
if (candidateTypeTag == TypeTags.BYTE || (candidateTypeTag == TypeTags.INT &&
!candidateLiteral.isConstant && isByteLiteralValue((Long) candidateValue))) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.INT:
if (candidateTypeTag == TypeTags.INT) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isSigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isSigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.SIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isSigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED32_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned32LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED16_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned16LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.UNSIGNED8_INT:
if (candidateTypeTag == TypeTags.INT && isUnsigned8LiteralValue((Long) candidateValue)) {
return ((Number) baseValue).longValue() == ((Number) candidateValue).longValue();
}
break;
case TypeTags.FLOAT:
String baseValueStr = String.valueOf(baseValue);
String originalValue = baseLiteral.originalValue != null ? baseLiteral.originalValue : baseValueStr;
if (NumericLiteralSupport.isDecimalDiscriminated(originalValue)) {
return false;
}
double baseDoubleVal = Double.parseDouble(baseValueStr);
double candidateDoubleVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDoubleVal = ((Long) candidateValue).doubleValue();
return baseDoubleVal == candidateDoubleVal;
} else if (candidateTypeTag == TypeTags.FLOAT) {
candidateDoubleVal = Double.parseDouble(String.valueOf(candidateValue));
return baseDoubleVal == candidateDoubleVal;
}
break;
case TypeTags.DECIMAL:
BigDecimal baseDecimalVal = NumericLiteralSupport.parseBigDecimal(baseValue);
BigDecimal candidateDecimalVal;
if (candidateTypeTag == TypeTags.INT && !candidateLiteral.isConstant) {
candidateDecimalVal = new BigDecimal((long) candidateValue, MathContext.DECIMAL128);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
} else if (candidateTypeTag == TypeTags.FLOAT && !candidateLiteral.isConstant ||
candidateTypeTag == TypeTags.DECIMAL) {
if (NumericLiteralSupport.isFloatDiscriminated(String.valueOf(candidateValue))) {
return false;
}
candidateDecimalVal = NumericLiteralSupport.parseBigDecimal(candidateValue);
return baseDecimalVal.compareTo(candidateDecimalVal) == 0;
}
break;
default:
return baseValue.equals(candidateValue);
}
return false;
}
boolean isByteLiteralValue(Long longObject) {
return (longObject.intValue() >= BBYTE_MIN_VALUE && longObject.intValue() <= BBYTE_MAX_VALUE);
}
boolean isSigned32LiteralValue(Long longObject) {
return (longObject >= SIGNED32_MIN_VALUE && longObject <= SIGNED32_MAX_VALUE);
}
boolean isSigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED16_MIN_VALUE && longObject.intValue() <= SIGNED16_MAX_VALUE);
}
boolean isSigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= SIGNED8_MIN_VALUE && longObject.intValue() <= SIGNED8_MAX_VALUE);
}
boolean isUnsigned32LiteralValue(Long longObject) {
return (longObject >= 0 && longObject <= UNSIGNED32_MAX_VALUE);
}
boolean isUnsigned16LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED16_MAX_VALUE);
}
boolean isUnsigned8LiteralValue(Long longObject) {
return (longObject.intValue() >= 0 && longObject.intValue() <= UNSIGNED8_MAX_VALUE);
}
boolean isCharLiteralValue(String literal) {
return (literal.codePoints().count() == 1);
}
/**
* Method to retrieve a type representing all the values in the value space of a finite type that are assignable to
* the target type.
*
* @param finiteType the finite type
* @param targetType the target type
* @return a new finite type if at least one value in the value space of the specified finiteType is
* assignable to targetType (the same if all are assignable), else semanticError
*/
BType getTypeForFiniteTypeValuesAssignableToType(BFiniteType finiteType, BType targetType) {
if (isAssignable(finiteType, targetType)) {
return finiteType;
}
Set<BLangExpression> matchingValues = finiteType.getValueSpace().stream()
.filter(
expr -> isAssignable(expr.type, targetType) ||
isAssignableToFiniteType(targetType, (BLangLiteral) expr) ||
(targetType.tag == TypeTags.UNION &&
((BUnionType) targetType).getMemberTypes().stream()
.filter(memType -> memType.tag == TypeTags.FINITE)
.anyMatch(filteredType -> isAssignableToFiniteType(filteredType,
(BLangLiteral) expr))))
.collect(Collectors.toSet());
if (matchingValues.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, finiteType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
finiteType.tsymbol.pkgID, null,
finiteType.tsymbol.owner, finiteType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, matchingValues);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
/**
* Method to retrieve a type representing all the member types of a union type that are assignable to
* the target type.
*
* @param unionType the union type
* @param targetType the target type
* @return a single type or a new union type if at least one member type of the union type is
* assignable to targetType, else semanticError
*/
BType getTypeForUnionTypeMembersAssignableToType(BUnionType unionType, BType targetType, SymbolEnv env) {
List<BType> intersection = new LinkedList<>();
unionType.getMemberTypes().forEach(memType -> {
BType memberIntersectionType = getTypeIntersection(memType, targetType, env);
if (memberIntersectionType != symTable.semanticError) {
intersection.add(memberIntersectionType);
}
});
if (intersection.isEmpty()) {
return symTable.semanticError;
}
if (intersection.size() == 1) {
return intersection.get(0);
} else {
return BUnionType.create(null, new LinkedHashSet<>(intersection));
}
}
boolean validEqualityIntersectionExists(BType lhsType, BType rhsType) {
if (!isPureType(lhsType) || !isPureType(rhsType)) {
return false;
}
if (isAssignable(lhsType, rhsType) || isAssignable(rhsType, lhsType)) {
return true;
}
Set<BType> lhsTypes = expandAndGetMemberTypesRecursive(lhsType);
Set<BType> rhsTypes = expandAndGetMemberTypesRecursive(rhsType);
return equalityIntersectionExists(lhsTypes, rhsTypes);
}
private boolean equalityIntersectionExists(Set<BType> lhsTypes, Set<BType> rhsTypes) {
if ((lhsTypes.contains(symTable.anydataType) &&
rhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR)) ||
(rhsTypes.contains(symTable.anydataType) &&
lhsTypes.stream().anyMatch(type -> type.tag != TypeTags.ERROR))) {
return true;
}
boolean matchFound = lhsTypes
.stream()
.anyMatch(s -> rhsTypes
.stream()
.anyMatch(t -> isSameType(s, t)));
if (!matchFound) {
matchFound = equalityIntersectionExistsForComplexTypes(lhsTypes, rhsTypes);
}
return matchFound;
}
/**
* Retrieves member types of the specified type, expanding maps/arrays of/constrained by unions types to individual
* maps/arrays.
*
* e.g., (string|int)[] would cause three entries as string[], int[], (string|int)[]
*
* @param bType the type for which member types needs to be identified
* @return a set containing all the retrieved member types
*/
public Set<BType> expandAndGetMemberTypesRecursive(BType bType) {
Set<BType> memberTypes = new LinkedHashSet<>();
switch (bType.tag) {
case TypeTags.BYTE:
case TypeTags.INT:
memberTypes.add(symTable.intType);
memberTypes.add(symTable.byteType);
break;
case TypeTags.FINITE:
BFiniteType expType = (BFiniteType) bType;
expType.getValueSpace().forEach(value -> {
memberTypes.add(value.type);
});
break;
case TypeTags.UNION:
BUnionType unionType = (BUnionType) bType;
unionType.getMemberTypes().forEach(member -> {
memberTypes.addAll(expandAndGetMemberTypesRecursive(member));
});
break;
case TypeTags.ARRAY:
BType arrayElementType = ((BArrayType) bType).getElementType();
if (((BArrayType) bType).getSize() != -1) {
memberTypes.add(new BArrayType(arrayElementType));
}
if (arrayElementType.tag == TypeTags.UNION) {
Set<BType> elementUnionTypes = expandAndGetMemberTypesRecursive(arrayElementType);
elementUnionTypes.forEach(elementUnionType -> {
memberTypes.add(new BArrayType(elementUnionType));
});
}
memberTypes.add(bType);
break;
case TypeTags.MAP:
BType mapConstraintType = ((BMapType) bType).getConstraint();
if (mapConstraintType.tag == TypeTags.UNION) {
Set<BType> constraintUnionTypes = expandAndGetMemberTypesRecursive(mapConstraintType);
constraintUnionTypes.forEach(constraintUnionType -> {
memberTypes.add(new BMapType(TypeTags.MAP, constraintUnionType, symTable.mapType.tsymbol));
});
}
memberTypes.add(bType);
break;
default:
memberTypes.add(bType);
}
return memberTypes;
}
private boolean tupleIntersectionExists(BTupleType lhsType, BTupleType rhsType) {
if (lhsType.getTupleTypes().size() != rhsType.getTupleTypes().size()) {
return false;
}
List<BType> lhsMemberTypes = lhsType.getTupleTypes();
List<BType> rhsMemberTypes = rhsType.getTupleTypes();
for (int i = 0; i < lhsType.getTupleTypes().size(); i++) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberTypes.get(i)),
expandAndGetMemberTypesRecursive(rhsMemberTypes.get(i)))) {
return false;
}
}
return true;
}
private boolean equalityIntersectionExistsForComplexTypes(Set<BType> lhsTypes, Set<BType> rhsTypes) {
for (BType lhsMemberType : lhsTypes) {
switch (lhsMemberType.tag) {
case TypeTags.INT:
case TypeTags.STRING:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
break;
case TypeTags.JSON:
if (jsonEqualityIntersectionExists(rhsTypes)) {
return true;
}
break;
case TypeTags.TUPLE:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
tupleIntersectionExists((BTupleType) lhsMemberType, (BTupleType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
arrayTupleEqualityIntersectionExists((BArrayType) rhsMemberType,
(BTupleType) lhsMemberType))) {
return true;
}
break;
case TypeTags.ARRAY:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.ARRAY &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BArrayType) lhsMemberType).eType),
expandAndGetMemberTypesRecursive(((BArrayType) rhsMemberType).eType)))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.TUPLE &&
arrayTupleEqualityIntersectionExists((BArrayType) lhsMemberType,
(BTupleType) rhsMemberType))) {
return true;
}
break;
case TypeTags.MAP:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
equalityIntersectionExists(
expandAndGetMemberTypesRecursive(((BMapType) lhsMemberType).constraint),
expandAndGetMemberTypesRecursive(((BMapType) rhsMemberType).constraint)))) {
return true;
}
if (!isAssignable(((BMapType) lhsMemberType).constraint, symTable.errorType) &&
rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON)) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
mapRecordEqualityIntersectionExists((BMapType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
break;
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (rhsTypes.stream().anyMatch(
rhsMemberType -> checkStructEquivalency(rhsMemberType, lhsMemberType) ||
checkStructEquivalency(lhsMemberType, rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.RECORD &&
recordEqualityIntersectionExists((BRecordType) lhsMemberType,
(BRecordType) rhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(rhsMemberType -> rhsMemberType.tag == TypeTags.JSON) &&
jsonEqualityIntersectionExists(expandAndGetMemberTypesRecursive(lhsMemberType))) {
return true;
}
if (rhsTypes.stream().anyMatch(
rhsMemberType -> rhsMemberType.tag == TypeTags.MAP &&
mapRecordEqualityIntersectionExists((BMapType) rhsMemberType,
(BRecordType) lhsMemberType))) {
return true;
}
break;
}
}
return false;
}
private boolean arrayTupleEqualityIntersectionExists(BArrayType arrayType, BTupleType tupleType) {
Set<BType> elementTypes = expandAndGetMemberTypesRecursive(arrayType.eType);
return tupleType.tupleTypes.stream()
.allMatch(tupleMemType -> equalityIntersectionExists(elementTypes,
expandAndGetMemberTypesRecursive(tupleMemType)));
}
private boolean recordEqualityIntersectionExists(BRecordType lhsType, BRecordType rhsType) {
Map<String, BField> lhsFields = lhsType.fields;
Map<String, BField> rhsFields = rhsType.fields;
List<Name> matchedFieldNames = new ArrayList<>();
for (BField lhsField : lhsFields.values()) {
if (rhsFields.containsKey(lhsField.name.value)) {
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(
rhsFields.get(lhsField.name.value).type))) {
return false;
}
matchedFieldNames.add(lhsField.getName());
} else {
if (Symbols.isFlagOn(lhsField.symbol.flags, Flags.OPTIONAL)) {
break;
}
if (rhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(lhsField.type),
expandAndGetMemberTypesRecursive(rhsType.restFieldType))) {
return false;
}
}
}
for (BField rhsField : rhsFields.values()) {
if (matchedFieldNames.contains(rhsField.getName())) {
continue;
}
if (!Symbols.isFlagOn(rhsField.symbol.flags, Flags.OPTIONAL)) {
if (lhsType.sealed) {
return false;
}
if (!equalityIntersectionExists(expandAndGetMemberTypesRecursive(rhsField.type),
expandAndGetMemberTypesRecursive(lhsType.restFieldType))) {
return false;
}
}
}
return true;
}
private boolean mapRecordEqualityIntersectionExists(BMapType mapType, BRecordType recordType) {
Set<BType> mapConstrTypes = expandAndGetMemberTypesRecursive(mapType.getConstraint());
for (BField field : recordType.fields.values()) {
if (!Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) &&
!equalityIntersectionExists(mapConstrTypes, expandAndGetMemberTypesRecursive(field.type))) {
return false;
}
}
return true;
}
private boolean jsonEqualityIntersectionExists(Set<BType> typeSet) {
for (BType type : typeSet) {
switch (type.tag) {
case TypeTags.MAP:
if (!isAssignable(((BMapType) type).constraint, symTable.errorType)) {
return true;
}
break;
case TypeTags.RECORD:
BRecordType recordType = (BRecordType) type;
if (recordType.fields.values().stream()
.allMatch(field -> Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL) ||
!isAssignable(field.type, symTable.errorType))) {
return true;
}
break;
default:
if (isAssignable(type, symTable.jsonType)) {
return true;
}
}
}
return false;
}
public BType getRemainingMatchExprType(BType originalType, BType typeToRemove) {
switch (originalType.tag) {
case TypeTags.UNION:
return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));
case TypeTags.FINITE:
return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));
case TypeTags.TUPLE:
return getRemainingType((BTupleType) originalType, typeToRemove);
default:
return originalType;
}
}
private BType getRemainingType(BTupleType originalType, BType typeToRemove) {
switch (typeToRemove.tag) {
case TypeTags.TUPLE:
return getRemainingType(originalType, (BTupleType) typeToRemove);
case TypeTags.ARRAY:
return getRemainingType(originalType, (BArrayType) typeToRemove);
default:
return originalType;
}
}
private BType getRemainingType(BTupleType originalType, BTupleType typeToRemove) {
if (originalType.restType != null) {
return originalType;
}
List<BType> originalTupleTypes = new ArrayList<>(originalType.tupleTypes);
List<BType> typesToRemove = new ArrayList<>(typeToRemove.tupleTypes);
if (originalTupleTypes.size() < typesToRemove.size()) {
return originalType;
}
List<BType> tupleTypes = new ArrayList<>();
for (int i = 0; i < originalTupleTypes.size(); i++) {
tupleTypes.add(getRemainingMatchExprType(originalTupleTypes.get(i), typesToRemove.get(i)));
}
if (typeToRemove.restType == null) {
return new BTupleType(tupleTypes);
}
if (originalTupleTypes.size() == typesToRemove.size()) {
return originalType;
}
for (int i = typesToRemove.size(); i < originalTupleTypes.size(); i++) {
tupleTypes.add(getRemainingMatchExprType(originalTupleTypes.get(i), typeToRemove.restType));
}
return new BTupleType(tupleTypes);
}
private BType getRemainingType(BTupleType originalType, BArrayType typeToRemove) {
BType eType = typeToRemove.eType;
List<BType> tupleTypes = new ArrayList<>();
for (BType tupleType : originalType.tupleTypes) {
tupleTypes.add(getRemainingMatchExprType(tupleType, eType));
}
BTupleType remainingType = new BTupleType(tupleTypes);
if (originalType.restType != null) {
remainingType.restType = getRemainingMatchExprType(originalType.restType, eType);
}
return remainingType;
}
public BType getRemainingType(BType originalType, BType typeToRemove) {
switch (originalType.tag) {
case TypeTags.UNION:
return getRemainingType((BUnionType) originalType, getAllTypes(typeToRemove));
case TypeTags.FINITE:
return getRemainingType((BFiniteType) originalType, getAllTypes(typeToRemove));
default:
return originalType;
}
}
BType getTypeIntersection(BType lhsType, BType rhsType, SymbolEnv env) {
List<BType> narrowingTypes = getAllTypes(rhsType);
LinkedHashSet<BType> intersection = narrowingTypes.stream().map(type -> {
if (isAssignable(type, lhsType)) {
return type;
} else if (isAssignable(lhsType, type)) {
return lhsType;
} else if (lhsType.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) lhsType, type);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.FINITE) {
BType intersectionType = getTypeForFiniteTypeValuesAssignableToType((BFiniteType) type, lhsType);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (lhsType.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.UNION) {
BType intersectionType = getTypeForUnionTypeMembersAssignableToType((BUnionType) type, lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.NULL_SET) {
return type;
} else if (type.tag == TypeTags.ERROR && lhsType.tag == TypeTags.ERROR) {
BType intersectionType = getIntersectionForErrorTypes(lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.RECORD && lhsType.tag == TypeTags.RECORD) {
BType intersectionType = createRecordIntersection(lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.MAP && lhsType.tag == TypeTags.RECORD) {
BType intersectionType = createRecordAndMapIntersection(lhsType, type, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.RECORD && lhsType.tag == TypeTags.MAP) {
BType intersectionType = createRecordAndMapIntersection(type, lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
} else if (type.tag == TypeTags.MAP && lhsType.tag == TypeTags.MAP) {
BType intersectionType = createRecordAndMapIntersection(type, lhsType, env);
if (intersectionType != symTable.semanticError) {
return intersectionType;
}
}
return null;
}).filter(type -> type != null).collect(Collectors.toCollection(LinkedHashSet::new));
if (intersection.isEmpty()) {
if (lhsType.tag == TypeTags.NULL_SET) {
return lhsType;
}
return symTable.semanticError;
}
if (intersection.contains(symTable.semanticError)) {
return symTable.semanticError;
} else if (intersection.size() == 1) {
return intersection.toArray(new BType[0])[0];
} else {
return BUnionType.create(null, intersection);
}
}
private BType getIntersectionForErrorTypes(BType lhsType, BType rhsType, SymbolEnv env) {
BType detailTypeOne = ((BErrorType) lhsType).detailType;
BType detailTypeTwo = ((BErrorType) rhsType).detailType;
if (isSealedRecord(detailTypeOne) || isSealedRecord(detailTypeTwo)) {
return symTable.semanticError;
}
BType detailIntersectionType = getTypeIntersection(detailTypeOne, detailTypeTwo, env);
if (detailIntersectionType == symTable.semanticError) {
return symTable.semanticError;
}
BErrorType intersectionErrorType = createErrorType(lhsType, rhsType, detailIntersectionType, env);
return intersectionErrorType;
}
private BType createRecordIntersection(BType recordTypeOne, BType recordTypeTwo, SymbolEnv env) {
BRecordType recordType = createAnonymousRecord(env);
if (!populateRecordFields(recordType, recordTypeOne, env, null) ||
!populateRecordFields(recordType, recordTypeTwo, env, null)) {
return symTable.semanticError;
}
recordType.restFieldType = getTypeIntersection(((BRecordType) recordTypeOne).restFieldType,
((BRecordType) recordTypeTwo).restFieldType, env);
if (recordType.restFieldType == symTable.semanticError) {
return symTable.semanticError;
}
return recordType;
}
private BRecordType createAnonymousRecord(SymbolEnv env) {
EnumSet<Flag> flags = EnumSet.of(Flag.PUBLIC, Flag.ANONYMOUS);
BRecordTypeSymbol recordSymbol = Symbols.createRecordSymbol(Flags.asMask(flags), Names.EMPTY,
env.enclPkg.packageID, null,
env.scope.owner, null, VIRTUAL);
recordSymbol.name = names.fromString(
anonymousModelHelper.getNextAnonymousTypeKey(env.enclPkg.packageID));
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner, false,
symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
recordSymbol.initializerFunc = new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol,
bInvokableType, symTable.builtinPos);
recordSymbol.scope = new Scope(recordSymbol);
BRecordType recordType = new BRecordType(recordSymbol);
recordType.tsymbol = recordSymbol;
recordSymbol.type = recordType;
return recordType;
}
private BType createRecordAndMapIntersection(BType type, BType mapType, SymbolEnv env) {
BRecordType intersectionRecord = createAnonymousRecord(env);
if (!populateRecordFields(intersectionRecord, type, env, ((BMapType) mapType).constraint)) {
return symTable.semanticError;
}
intersectionRecord.restFieldType = getRestFieldIntersectionType(type, (BMapType) mapType, env);
if (intersectionRecord.restFieldType == symTable.semanticError) {
return symTable.semanticError;
}
return intersectionRecord;
}
private BType getRestFieldIntersectionType(BType type, BMapType mapType, SymbolEnv env) {
if (type.tag == TypeTags.RECORD) {
return getTypeIntersection(((BRecordType) type).restFieldType, mapType.constraint, env);
} else {
return getTypeIntersection(((BMapType) type).constraint, mapType.constraint, env);
}
}
private BErrorType createErrorType(BType lhsType, BType rhsType, BType detailType, SymbolEnv env) {
BErrorType errorType = createErrorType(detailType, lhsType.flags, env);
errorType.tsymbol.flags |= rhsType.flags;
return errorType;
}
public BErrorType createErrorType(BType detailType, long flags, SymbolEnv env) {
BErrorTypeSymbol errorTypeSymbol = Symbols.createErrorSymbol(flags, Names.EMPTY,
env.enclPkg.symbol.pkgID, null,
env.scope.owner, null, VIRTUAL);
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorType.flags |= errorTypeSymbol.flags;
errorTypeSymbol.type = errorType;
errorType.typeIdSet = BTypeIdSet.emptySet();
return errorType;
}
private boolean populateRecordFields(BRecordType recordType, BType originalType, SymbolEnv env, BType constraint) {
BTypeSymbol intersectionRecordSymbol = recordType.tsymbol;
if (originalType.getKind() != TypeKind.RECORD) {
return true;
}
BRecordType originalRecordType = (BRecordType) originalType;
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
for (BField origField : originalRecordType.fields.values()) {
org.wso2.ballerinalang.compiler.util.Name origFieldName = origField.name;
String nameString = origFieldName.value;
BType recordFieldType = validateRecordField(recordType, origField, constraint, env);
if (recordFieldType == symTable.semanticError) {
return false;
}
BVarSymbol recordFieldSymbol = new BVarSymbol(origField.symbol.flags, origFieldName,
env.enclPkg.packageID, recordFieldType,
intersectionRecordSymbol, origField.pos, SOURCE);
if (recordFieldType.tag == TypeTags.INVOKABLE && recordFieldType.tsymbol != null) {
BInvokableTypeSymbol tsymbol = (BInvokableTypeSymbol) recordFieldType.tsymbol;
BInvokableSymbol invokableSymbol = (BInvokableSymbol) recordFieldSymbol;
invokableSymbol.params = tsymbol.params;
invokableSymbol.restParam = tsymbol.restParam;
invokableSymbol.retType = tsymbol.returnType;
invokableSymbol.flags = tsymbol.flags;
}
fields.put(nameString, new BField(origFieldName, null, recordFieldSymbol));
}
recordType.fields.putAll(fields);
return true;
}
private BType validateRecordField(BRecordType recordType, BField origField, BType constraint, SymbolEnv env) {
BType fieldType = validateOverlappingFields(recordType, origField);
if (fieldType == symTable.semanticError) {
return fieldType;
}
if (constraint == null) {
return fieldType;
}
fieldType = getTypeIntersection(fieldType, constraint, env);
if (fieldType != symTable.semanticError) {
return fieldType;
}
if (Symbols.isOptional(origField.symbol)) {
return null;
}
return symTable.semanticError;
}
private BType validateOverlappingFields(BRecordType recordType, BField origField) {
BField overlappingField = recordType.fields.get(origField.name.value);
if (overlappingField == null) {
return origField.type;
}
if (isAssignable(overlappingField.type, origField.type)) {
return overlappingField.type;
}
if (isAssignable(origField.type, overlappingField.type)) {
return origField.type;
}
return symTable.semanticError;
}
private BType getRemainingType(BUnionType originalType, List<BType> removeTypes) {
List<BType> remainingTypes = getAllTypes(originalType);
removeTypes.forEach(removeType -> remainingTypes.removeIf(type -> isAssignable(type, removeType)));
List<BType> finiteTypesToRemove = new ArrayList<>();
List<BType> finiteTypesToAdd = new ArrayList<>();
for (BType remainingType : remainingTypes) {
if (remainingType.tag == TypeTags.FINITE) {
BFiniteType finiteType = (BFiniteType) remainingType;
finiteTypesToRemove.add(finiteType);
BType remainingTypeWithMatchesRemoved = getRemainingType(finiteType, removeTypes);
if (remainingTypeWithMatchesRemoved != symTable.semanticError) {
finiteTypesToAdd.add(remainingTypeWithMatchesRemoved);
}
}
}
remainingTypes.removeAll(finiteTypesToRemove);
remainingTypes.addAll(finiteTypesToAdd);
if (remainingTypes.size() == 1) {
return remainingTypes.get(0);
}
if (remainingTypes.isEmpty()) {
return symTable.nullSet;
}
return BUnionType.create(null, new LinkedHashSet<>(remainingTypes));
}
private BType getRemainingType(BFiniteType originalType, List<BType> removeTypes) {
Set<BLangExpression> remainingValueSpace = new LinkedHashSet<>();
for (BLangExpression valueExpr : originalType.getValueSpace()) {
boolean matchExists = false;
for (BType remType : removeTypes) {
if (isAssignable(valueExpr.type, remType) ||
isAssignableToFiniteType(remType, (BLangLiteral) valueExpr)) {
matchExists = true;
break;
}
}
if (!matchExists) {
remainingValueSpace.add(valueExpr);
}
}
if (remainingValueSpace.isEmpty()) {
return symTable.semanticError;
}
BTypeSymbol finiteTypeSymbol = Symbols.createTypeSymbol(SymTag.FINITE_TYPE, originalType.tsymbol.flags,
names.fromString("$anonType$" + UNDERSCORE + finiteTypeCount++),
originalType.tsymbol.pkgID, null,
originalType.tsymbol.owner, originalType.tsymbol.pos,
VIRTUAL);
BFiniteType intersectingFiniteType = new BFiniteType(finiteTypeSymbol, remainingValueSpace);
finiteTypeSymbol.type = intersectingFiniteType;
return intersectingFiniteType;
}
public BType getSafeType(BType type, boolean liftNil, boolean liftError) {
switch (type.tag) {
case TypeTags.JSON:
return new BJSONType((BJSONType) type, false);
case TypeTags.ANY:
return new BAnyType(type.tag, type.tsymbol, false);
case TypeTags.ANYDATA:
return new BAnydataType((BAnydataType) type, false);
case TypeTags.READONLY:
return new BReadonlyType(type.tag, type.tsymbol, false);
}
if (type.tag != TypeTags.UNION) {
return type;
}
BUnionType unionType = (BUnionType) type;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>(unionType.getMemberTypes());
BUnionType errorLiftedType = BUnionType.create(null, memTypes);
if (liftNil) {
errorLiftedType.remove(symTable.nilType);
}
if (liftError) {
errorLiftedType.remove(symTable.errorType);
}
if (errorLiftedType.getMemberTypes().size() == 1) {
return errorLiftedType.getMemberTypes().toArray(new BType[0])[0];
}
return errorLiftedType;
}
public List<BType> getAllTypes(BType type) {
if (type.tag != TypeTags.UNION) {
return Lists.of(type);
}
List<BType> memberTypes = new ArrayList<>();
((BUnionType) type).getMemberTypes().forEach(memberType -> memberTypes.addAll(getAllTypes(memberType)));
return memberTypes;
}
public boolean isAllowedConstantType(BType type) {
switch (type.tag) {
case TypeTags.BOOLEAN:
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.NIL:
return true;
case TypeTags.MAP:
return isAllowedConstantType(((BMapType) type).constraint);
case TypeTags.FINITE:
BLangExpression finiteValue = ((BFiniteType) type).getValueSpace().toArray(new BLangExpression[0])[0];
return isAllowedConstantType(finiteValue.type);
default:
return false;
}
}
public boolean isValidLiteral(BLangLiteral literal, BType targetType) {
BType literalType = literal.type;
if (literalType.tag == targetType.tag) {
return true;
}
switch (targetType.tag) {
case TypeTags.BYTE:
return literalType.tag == TypeTags.INT && isByteLiteralValue((Long) literal.value);
case TypeTags.DECIMAL:
return literalType.tag == TypeTags.FLOAT || literalType.tag == TypeTags.INT;
case TypeTags.FLOAT:
return literalType.tag == TypeTags.INT;
case TypeTags.SIGNED32_INT:
return literalType.tag == TypeTags.INT && isSigned32LiteralValue((Long) literal.value);
case TypeTags.SIGNED16_INT:
return literalType.tag == TypeTags.INT && isSigned16LiteralValue((Long) literal.value);
case TypeTags.SIGNED8_INT:
return literalType.tag == TypeTags.INT && isSigned8LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED32_INT:
return literalType.tag == TypeTags.INT && isUnsigned32LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED16_INT:
return literalType.tag == TypeTags.INT && isUnsigned16LiteralValue((Long) literal.value);
case TypeTags.UNSIGNED8_INT:
return literalType.tag == TypeTags.INT && isUnsigned8LiteralValue((Long) literal.value);
case TypeTags.CHAR_STRING:
return literalType.tag == TypeTags.STRING && isCharLiteralValue((String) literal.value);
default:
return false;
}
}
/**
* Validate if the return type of the given function is a subtype of `error?`, containing `()`.
*
* @param function The function of which the return type should be validated
* @param diagnosticCode The code to log if the return type is invalid
*/
public void validateErrorOrNilReturn(BLangFunction function, DiagnosticCode diagnosticCode) {
BType returnType = function.returnTypeNode.type;
if (returnType.tag == TypeTags.NIL) {
return;
}
if (returnType.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) returnType).getMemberTypes();
if (returnType.isNullable() &&
memberTypes.stream().allMatch(type -> type.tag == TypeTags.NIL || type.tag == TypeTags.ERROR)) {
return;
}
}
dlog.error(function.returnTypeNode.pos, diagnosticCode, function.returnTypeNode.type.toString());
}
/**
* Type vector of size two, to hold the source and the target types.
*
* @since 0.982.0
*/
private static class TypePair {
BType sourceType;
BType targetType;
public TypePair(BType sourceType, BType targetType) {
this.sourceType = sourceType;
this.targetType = targetType;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof TypePair)) {
return false;
}
TypePair other = (TypePair) obj;
return this.sourceType.equals(other.sourceType) && this.targetType.equals(other.targetType);
}
@Override
public int hashCode() {
return Objects.hash(sourceType, targetType);
}
}
/**
* A functional interface for parameterizing the type of type checking that needs to be done on the source and
* target types.
*
* @since 0.995.0
*/
private interface TypeEqualityPredicate {
boolean test(BType source, BType target, Set<TypePair> unresolvedTypes);
}
public boolean hasFillerValue(BType type) {
switch (type.tag) {
case TypeTags.INT:
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.STRING:
case TypeTags.BOOLEAN:
case TypeTags.JSON:
case TypeTags.XML:
case TypeTags.NIL:
case TypeTags.TABLE:
case TypeTags.ANYDATA:
case TypeTags.MAP:
case TypeTags.ANY:
return true;
case TypeTags.ARRAY:
return checkFillerValue((BArrayType) type);
case TypeTags.FINITE:
return checkFillerValue((BFiniteType) type);
case TypeTags.UNION:
return checkFillerValue((BUnionType) type);
case TypeTags.OBJECT:
return checkFillerValue((BObjectType) type);
case TypeTags.RECORD:
return checkFillerValue((BRecordType) type);
case TypeTags.TUPLE:
BTupleType tupleType = (BTupleType) type;
return tupleType.getTupleTypes().stream().allMatch(eleType -> hasFillerValue(eleType));
default:
if (TypeTags.isIntegerTypeTag(type.tag)) {
return true;
}
return false;
}
}
private boolean checkFillerValue(BObjectType type) {
if ((type.tsymbol.flags & Flags.CLASS) != Flags.CLASS) {
return false;
}
BAttachedFunction initFunction = ((BObjectTypeSymbol) type.tsymbol).initializerFunc;
if (initFunction == null) {
return true;
}
if (initFunction.symbol.getReturnType().getKind() != TypeKind.NIL) {
return false;
}
for (BVarSymbol bVarSymbol : initFunction.symbol.getParameters()) {
if (!bVarSymbol.defaultableParam) {
return false;
}
}
return true;
}
/**
* This will handle two types. Singleton : As singleton can have one value that value should it self be a valid fill
* value Union : 1. if nil is a member it is the fill values 2. else all the values should belong to same type and
* the default value for that type should be a member of the union precondition : value space should have at least
* one element
*
* @param type BFiniteType union or finite
* @return boolean whether type has a valid filler value or not
*/
private boolean checkFillerValue(BFiniteType type) {
if (type.isNullable()) {
return true;
}
if (type.getValueSpace().size() == 1) {
return true;
}
Iterator iterator = type.getValueSpace().iterator();
BLangExpression firstElement = (BLangExpression) iterator.next();
boolean defaultFillValuePresent = isImplicitDefaultValue(firstElement);
while (iterator.hasNext()) {
BLangExpression value = (BLangExpression) iterator.next();
if (!isSameBasicType(value.type, firstElement.type)) {
return false;
}
if (!defaultFillValuePresent && isImplicitDefaultValue(value)) {
defaultFillValuePresent = true;
}
}
return defaultFillValuePresent;
}
private boolean hasImplicitDefaultValue(Set<BLangExpression> valueSpace) {
for (BLangExpression expression : valueSpace) {
if (isImplicitDefaultValue(expression)) {
return true;
}
}
return false;
}
private boolean checkFillerValue(BUnionType type) {
if (type.isNullable()) {
return true;
}
Set<BType> memberTypes = new HashSet<>();
boolean hasFillerValue = false;
boolean defaultValuePresent = false;
boolean finiteTypePresent = false;
for (BType member : type.getMemberTypes()) {
if (member.tag == TypeTags.FINITE) {
Set<BType> uniqueValues = getValueTypes(((BFiniteType) member).getValueSpace());
memberTypes.addAll(uniqueValues);
if (!defaultValuePresent && hasImplicitDefaultValue(((BFiniteType) member).getValueSpace())) {
defaultValuePresent = true;
}
finiteTypePresent = true;
} else {
memberTypes.add(member);
}
if (!hasFillerValue && hasFillerValue(member)) {
hasFillerValue = true;
}
}
if (!hasFillerValue) {
return false;
}
Iterator<BType> iterator = memberTypes.iterator();
BType firstMember = iterator.next();
while (iterator.hasNext()) {
if (!isSameBasicType(firstMember, iterator.next())) {
return false;
}
}
if (finiteTypePresent) {
return defaultValuePresent;
}
return true;
}
private boolean isSameBasicType(BType source, BType target) {
if (isSameType(source, target)) {
return true;
}
if (TypeTags.isIntegerTypeTag(source.tag) && TypeTags.isIntegerTypeTag(target.tag)) {
return true;
}
return false;
}
private Set<BType> getValueTypes(Set<BLangExpression> valueSpace) {
Set<BType> uniqueType = new HashSet<>();
for (BLangExpression expression : valueSpace) {
uniqueType.add(expression.type);
}
return uniqueType;
}
private boolean isImplicitDefaultValue(BLangExpression expression) {
if ((expression.getKind() == NodeKind.LITERAL) || (expression.getKind() == NodeKind.NUMERIC_LITERAL)) {
BLangLiteral literalExpression = (BLangLiteral) expression;
BType literalExprType = literalExpression.type;
Object value = literalExpression.getValue();
switch (literalExprType.getKind()) {
case INT:
case BYTE:
return value.equals(Long.valueOf(0));
case STRING:
return value == null || value.equals("");
case DECIMAL:
case FLOAT:
return value.equals(String.valueOf(0.0));
case BOOLEAN:
return value.equals(Boolean.valueOf(false));
case NIL:
return true;
default:
return false;
}
}
return false;
}
private boolean checkFillerValue(BRecordType type) {
for (BField field : type.fields.values()) {
if (Symbols.isFlagOn(field.symbol.flags, Flags.OPTIONAL)) {
continue;
}
if (Symbols.isFlagOn(field.symbol.flags, Flags.REQUIRED)) {
return false;
}
}
return true;
}
private boolean checkFillerValue(BArrayType type) {
if (type.size == -1) {
return true;
}
return hasFillerValue(type.eType);
}
/**
* Get result type of the query output.
*
* @param type type of query expression.
* @return result type.
*/
public BType resolveExprType(BType type) {
switch (type.tag) {
case TypeTags.STREAM:
return ((BStreamType) type).constraint;
case TypeTags.TABLE:
return ((BTableType) type).constraint;
case TypeTags.ARRAY:
return ((BArrayType) type).eType;
case TypeTags.UNION:
List<BType> exprTypes = new ArrayList<>(((BUnionType) type).getMemberTypes());
for (BType returnType : exprTypes) {
switch (returnType.tag) {
case TypeTags.STREAM:
return ((BStreamType) returnType).constraint;
case TypeTags.TABLE:
return ((BTableType) returnType).constraint;
case TypeTags.ARRAY:
return ((BArrayType) returnType).eType;
case TypeTags.STRING:
case TypeTags.XML:
return returnType;
}
}
default:
return type;
}
}
private boolean isSimpleBasicType(int tag) {
switch (tag) {
case TypeTags.BYTE:
case TypeTags.FLOAT:
case TypeTags.DECIMAL:
case TypeTags.BOOLEAN:
case TypeTags.NIL:
return true;
default:
return (TypeTags.isIntegerTypeTag(tag)) || (TypeTags.isStringTypeTag(tag));
}
}
/**
* Check whether a type is an ordered type.
*
* @param type type.
* @return boolean whether the type is an ordered type or not.
*/
public boolean isOrderedType(BType type) {
switch (type.tag) {
case TypeTags.UNION:
Set<BType> memberTypes = ((BUnionType) type).getMemberTypes();
for (BType memType : memberTypes) {
if (!isOrderedType(memType)) {
return false;
}
}
return memberTypes.size() <= 2 && memberTypes.contains(symTable.nilType);
case TypeTags.ARRAY:
BType elementType = ((BArrayType) type).eType;
return isOrderedType(elementType);
default:
return isSimpleBasicType(type.tag);
}
}
public boolean isUnionOfSimpleBasicTypes(BType type) {
if (type.tag == TypeTags.UNION) {
Set<BType> memberTypes = ((BUnionType) type).getMemberTypes();
for (BType memType : memberTypes) {
if (!isSimpleBasicType(memType.tag)) {
return false;
}
}
return true;
}
return isSimpleBasicType(type.tag);
}
public boolean isSubTypeOfReadOnlyOrIsolatedObjectUnion(BType type) {
if (isInherentlyImmutableType(type) || Symbols.isFlagOn(type.flags, Flags.READONLY)) {
return true;
}
int tag = type.tag;
if (tag == TypeTags.OBJECT) {
return isIsolated(type);
}
if (tag != TypeTags.UNION) {
return false;
}
for (BType memberType : ((BUnionType) type).getMemberTypes()) {
if (!isSubTypeOfReadOnlyOrIsolatedObjectUnion(memberType)) {
return false;
}
}
return true;
}
private boolean isIsolated(BType type) {
return Symbols.isFlagOn(type.flags, Flags.ISOLATED);
}
private static class ListenerValidationModel {
private final Types types;
private final SymbolTable symtable;
private final BType serviceNameType;
boolean attachFound;
boolean detachFound;
boolean startFound;
boolean gracefulStopFound;
boolean immediateStopFound;
public ListenerValidationModel(Types types, SymbolTable symTable) {
this.types = types;
this.symtable = symTable;
this.serviceNameType =
BUnionType.create(null, symtable.stringType, symtable.arrayStringType, symtable.nilType);
}
boolean isValidListener() {
return attachFound && detachFound && startFound && gracefulStopFound && immediateStopFound;
}
private boolean checkMethods(List<BAttachedFunction> rhsFuncs) {
for (BAttachedFunction func : rhsFuncs) {
switch (func.funcName.value) {
case "attach":
if (!checkAttachMethod(func)) {
return false;
}
break;
case "detach":
if (!checkDetachMethod(func)) {
return false;
}
break;
case "start":
if (!checkStartMethod(func)) {
return true;
}
break;
case "gracefulStop":
if (!checkGracefulStop(func)) {
return false;
}
break;
case "immediateStop":
if (!checkImmediateStop(func)) {
return false;
}
break;
}
}
return isValidListener();
}
private boolean emptyParamList(BAttachedFunction func) {
return func.type.paramTypes.isEmpty() && func.type.restType != symtable.noType;
}
private boolean publicAndReturnsErrorOrNil(BAttachedFunction func) {
if (!Symbols.isPublic(func.symbol)) {
return false;
}
if (!types.isAssignable(func.type.retType, symtable.errorOrNilType)) {
return false;
}
return true;
}
private boolean isPublicNoParamReturnsErrorOrNil(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (!emptyParamList(func)) {
return false;
}
return true;
}
private boolean checkImmediateStop(BAttachedFunction func) {
return immediateStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkGracefulStop(BAttachedFunction func) {
return gracefulStopFound = isPublicNoParamReturnsErrorOrNil(func);
}
private boolean checkStartMethod(BAttachedFunction func) {
return startFound = publicAndReturnsErrorOrNil(func);
}
private boolean checkDetachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 1) {
return false;
}
BType firstParamType = func.type.paramTypes.get(0);
boolean isMatchingSignature = firstParamType.tag == TypeTags.OBJECT
&& Symbols.isService(firstParamType.tsymbol);
return detachFound = isMatchingSignature;
}
private boolean checkAttachMethod(BAttachedFunction func) {
if (!publicAndReturnsErrorOrNil(func)) {
return false;
}
if (func.type.paramTypes.size() != 2) {
return false;
}
BType firstParamType = func.type.paramTypes.get(0);
if (firstParamType.tag != TypeTags.OBJECT) {
return false;
}
if (!Symbols.isService(firstParamType.tsymbol)) {
return false;
}
BType secondParamType = func.type.paramTypes.get(1);
boolean sameType = types.isAssignable(secondParamType, this.serviceNameType);
return attachFound = sameType;
}
private boolean isServiceObject(BType type) {
if (type.tag != TypeTags.OBJECT) {
return false;
}
return Symbols.isService(type.tsymbol);
}
}
} |
BTW, it seems that the precision will always set to the default, why not change it to a constant. | public ResourceSpec merge(final ResourceSpec other) {
checkNotNull(other, "Cannot merge with null resources");
if (this.equals(UNKNOWN) || other.equals(UNKNOWN)) {
return UNKNOWN;
}
ResourceSpec target = new ResourceSpec(
this.cpuCores.merge(other.cpuCores).getValue(),
this.heapMemoryInMB + other.heapMemoryInMB,
this.directMemoryInMB + other.directMemoryInMB,
this.nativeMemoryInMB + other.nativeMemoryInMB,
this.stateSizeInMB + other.stateSizeInMB,
this.managedMemoryInMB + other.managedMemoryInMB);
target.extendedResources.putAll(extendedResources);
for (Resource resource : other.extendedResources.values()) {
target.extendedResources.merge(resource.getName(), resource, (v1, v2) -> v1.merge(v2));
}
return target;
} | this.cpuCores.merge(other.cpuCores).getValue(), | public ResourceSpec merge(final ResourceSpec other) {
checkNotNull(other, "Cannot merge with null resources");
if (this.equals(UNKNOWN) || other.equals(UNKNOWN)) {
return UNKNOWN;
}
ResourceSpec target = new ResourceSpec(
this.cpuCores.merge(other.cpuCores),
this.taskHeapMemory.add(other.taskHeapMemory),
this.taskOffHeapMemory.add(other.taskOffHeapMemory),
this.onHeapManagedMemory.add(other.onHeapManagedMemory),
this.offHeapManagedMemory.add(other.offHeapManagedMemory));
target.extendedResources.putAll(extendedResources);
for (Resource resource : other.extendedResources.values()) {
target.extendedResources.merge(resource.getName(), resource, (v1, v2) -> v1.merge(v2));
}
return target;
} | class ResourceSpec implements Serializable {
private static final long serialVersionUID = 1L;
/**
* A ResourceSpec that indicates an unknown set of resources.
*/
public static final ResourceSpec UNKNOWN = new ResourceSpec();
/**
* The default ResourceSpec used for operators and transformation functions.
* Currently equal to {@link
*/
public static final ResourceSpec DEFAULT = UNKNOWN;
public static final String GPU_NAME = "GPU";
/** How many cpu cores are needed, use double so we can specify cpu like 0.1. */
@Nullable
private final ResourceValue cpuCores;
/** How many java heap memory in mb are needed. */
private final int heapMemoryInMB;
/** How many nio direct memory in mb are needed. */
private final int directMemoryInMB;
/** How many native memory in mb are needed. */
private final int nativeMemoryInMB;
/** How many state size in mb are used. */
private final int stateSizeInMB;
/** The required amount of managed memory (in MB). */
private final int managedMemoryInMB;
private final Map<String, Resource> extendedResources = new HashMap<>(1);
/**
* Creates a new ResourceSpec with full resources.
*
* @param cpuCores The number of CPU cores (possibly fractional, i.e., 0.2 cores)
* @param heapMemoryInMB The size of the java heap memory, in megabytes.
* @param directMemoryInMB The size of the java nio direct memory, in megabytes.
* @param nativeMemoryInMB The size of the native memory, in megabytes.
* @param stateSizeInMB The state size for storing in checkpoint.
* @param managedMemoryInMB The size of managed memory, in megabytes.
* @param extendedResources The extended resources, associated with the resource manager used
*/
private ResourceSpec(
double cpuCores,
int heapMemoryInMB,
int directMemoryInMB,
int nativeMemoryInMB,
int stateSizeInMB,
int managedMemoryInMB,
Resource... extendedResources) {
checkArgument(heapMemoryInMB >= 0, "The heap memory of the resource spec should not be negative");
checkArgument(directMemoryInMB >= 0, "The direct memory of the resource spec should not be negative");
checkArgument(nativeMemoryInMB >= 0, "The native memory of the resource spec should not be negative");
checkArgument(stateSizeInMB >= 0, "The state size of the resource spec should not be negative");
checkArgument(managedMemoryInMB >= 0, "The managed memory of the resource spec should not be negative");
this.cpuCores = new AdditiveResourceValue(cpuCores);
this.heapMemoryInMB = heapMemoryInMB;
this.directMemoryInMB = directMemoryInMB;
this.nativeMemoryInMB = nativeMemoryInMB;
this.stateSizeInMB = stateSizeInMB;
this.managedMemoryInMB = managedMemoryInMB;
for (Resource resource : extendedResources) {
if (resource != null) {
this.extendedResources.put(resource.getName(), resource);
}
}
}
/**
* Creates a new ResourceSpec with all fields unknown.
*/
private ResourceSpec() {
this.cpuCores = null;
this.heapMemoryInMB = -1;
this.directMemoryInMB = -1;
this.nativeMemoryInMB = -1;
this.stateSizeInMB = -1;
this.managedMemoryInMB = -1;
}
/**
* Used by system internally to merge the other resources of chained operators
* when generating the job graph or merge the resource consumed by state backend.
*
* @param other Reference to resource to merge in.
* @return The new resource with merged values.
*/
@Nullable
public ResourceValue getCpuCores() {
return this.cpuCores;
}
public int getHeapMemory() {
return this.heapMemoryInMB;
}
public int getDirectMemory() {
return this.directMemoryInMB;
}
public int getNativeMemory() {
return this.nativeMemoryInMB;
}
public int getStateSize() {
return this.stateSizeInMB;
}
public int getManagedMemory() {
return this.managedMemoryInMB;
}
public ResourceValue getGPUResource() {
Resource gpuResource = extendedResources.get(GPU_NAME);
if (gpuResource != null) {
return gpuResource.getValue();
} else {
return null;
}
}
public Map<String, Resource> getExtendedResources() {
return extendedResources;
}
/**
* Checks the current resource less than or equal with the other resource by comparing
* all the fields in the resource.
*
* @param other The resource to compare
* @return True if current resource is less than or equal with the other resource, otherwise return false.
*/
public boolean lessThanOrEqual(final ResourceSpec other) {
checkNotNull(other);
if (this.isUnknown() && other.isUnknown()) {
return true;
} else if (this.isUnknown() || other.isUnknown()) {
throw new IllegalArgumentException("Cannot compare specified resources with UNKNOWN resources.");
}
int cmp1 = this.cpuCores.compareTo(other.cpuCores);
int cmp2 = Integer.compare(this.heapMemoryInMB, other.heapMemoryInMB);
int cmp3 = Integer.compare(this.directMemoryInMB, other.directMemoryInMB);
int cmp4 = Integer.compare(this.nativeMemoryInMB, other.nativeMemoryInMB);
int cmp5 = Integer.compare(this.stateSizeInMB, other.stateSizeInMB);
int cmp6 = Integer.compare(this.managedMemoryInMB, other.managedMemoryInMB);
if (cmp1 <= 0 && cmp2 <= 0 && cmp3 <= 0 && cmp4 <= 0 && cmp5 <= 0 && cmp6 <= 0) {
for (Resource resource : extendedResources.values()) {
if (!other.extendedResources.containsKey(resource.getName()) ||
other.extendedResources.get(resource.getName()).getValue().compareTo(resource.getValue()) < 0) {
return false;
}
}
return true;
}
return false;
}
public boolean isUnknown() {
return this.equals(UNKNOWN);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (obj != null && obj.getClass() == ResourceSpec.class) {
ResourceSpec that = (ResourceSpec) obj;
return Objects.equals(this.cpuCores, that.cpuCores) &&
this.heapMemoryInMB == that.heapMemoryInMB &&
this.directMemoryInMB == that.directMemoryInMB &&
this.nativeMemoryInMB == that.nativeMemoryInMB &&
this.stateSizeInMB == that.stateSizeInMB &&
this.managedMemoryInMB == that.managedMemoryInMB &&
Objects.equals(this.extendedResources, that.extendedResources);
} else {
return false;
}
}
@Override
public int hashCode() {
int result = cpuCores == null ? 0 : cpuCores.hashCode();
result = 31 * result + heapMemoryInMB;
result = 31 * result + directMemoryInMB;
result = 31 * result + nativeMemoryInMB;
result = 31 * result + stateSizeInMB;
result = 31 * result + managedMemoryInMB;
result = 31 * result + extendedResources.hashCode();
return result;
}
@Override
public String toString() {
StringBuilder extend = new StringBuilder();
for (Resource resource : extendedResources.values()) {
extend.append(", ").append(resource.getName()).append("=").append(resource.getValue());
}
return "ResourceSpec{" +
"cpuCores=" + cpuCores +
", heapMemoryInMB=" + heapMemoryInMB +
", directMemoryInMB=" + directMemoryInMB +
", nativeMemoryInMB=" + nativeMemoryInMB +
", stateSizeInMB=" + stateSizeInMB +
", managedMemoryInMB=" + managedMemoryInMB + extend +
'}';
}
private Object readResolve() {
return this.equals(UNKNOWN) ? UNKNOWN : this;
}
public static Builder newBuilder() {
return new Builder();
}
/**
* Builder for the {@link ResourceSpec}.
*/
public static class Builder {
private double cpuCores;
private int heapMemoryInMB;
private int directMemoryInMB;
private int nativeMemoryInMB;
private int stateSizeInMB;
private int managedMemoryInMB;
private GPUResource gpuResource;
public Builder setCpuCores(double cpuCores) {
this.cpuCores = cpuCores;
return this;
}
public Builder setHeapMemoryInMB(int heapMemory) {
this.heapMemoryInMB = heapMemory;
return this;
}
public Builder setDirectMemoryInMB(int directMemory) {
this.directMemoryInMB = directMemory;
return this;
}
public Builder setNativeMemoryInMB(int nativeMemory) {
this.nativeMemoryInMB = nativeMemory;
return this;
}
public Builder setStateSizeInMB(int stateSize) {
this.stateSizeInMB = stateSize;
return this;
}
public Builder setManagedMemoryInMB(int managedMemory) {
this.managedMemoryInMB = managedMemory;
return this;
}
public Builder setGPUResource(double gpus) {
this.gpuResource = new GPUResource(gpus);
return this;
}
public ResourceSpec build() {
return new ResourceSpec(
cpuCores,
heapMemoryInMB,
directMemoryInMB,
nativeMemoryInMB,
stateSizeInMB,
managedMemoryInMB,
gpuResource);
}
}
} | class ResourceSpec implements Serializable {
private static final long serialVersionUID = 1L;
/**
* A ResourceSpec that indicates an unknown set of resources.
*/
public static final ResourceSpec UNKNOWN = new ResourceSpec();
/**
* The default ResourceSpec used for operators and transformation functions.
* Currently equal to {@link
*/
public static final ResourceSpec DEFAULT = UNKNOWN;
/** How many cpu cores are needed. Can be null only if it is unknown. */
@Nullable
private final Resource cpuCores;
/** How much task heap memory is needed. */
@Nullable
private final MemorySize taskHeapMemory;
/** How much task off-heap memory is needed. */
@Nullable
private final MemorySize taskOffHeapMemory;
/** How much on-heap managed memory is needed. */
@Nullable
private final MemorySize onHeapManagedMemory;
/** How much off-heap managed memory is needed. */
@Nullable
private final MemorySize offHeapManagedMemory;
private final Map<String, Resource> extendedResources = new HashMap<>(1);
private ResourceSpec(
final Resource cpuCores,
final MemorySize taskHeapMemory,
final MemorySize taskOffHeapMemory,
final MemorySize onHeapManagedMemory,
final MemorySize offHeapManagedMemory,
final Resource... extendedResources) {
checkNotNull(cpuCores);
checkArgument(cpuCores instanceof CPUResource, "cpuCores must be CPUResource");
this.cpuCores = cpuCores;
this.taskHeapMemory = checkNotNull(taskHeapMemory);
this.taskOffHeapMemory = checkNotNull(taskOffHeapMemory);
this.onHeapManagedMemory = checkNotNull(onHeapManagedMemory);
this.offHeapManagedMemory = checkNotNull(offHeapManagedMemory);
for (Resource resource : extendedResources) {
if (resource != null) {
this.extendedResources.put(resource.getName(), resource);
}
}
}
/**
* Creates a new ResourceSpec with all fields unknown.
*/
private ResourceSpec() {
this.cpuCores = null;
this.taskHeapMemory = null;
this.taskOffHeapMemory = null;
this.onHeapManagedMemory = null;
this.offHeapManagedMemory = null;
}
/**
* Used by system internally to merge the other resources of chained operators
* when generating the job graph.
*
* @param other Reference to resource to merge in.
* @return The new resource with merged values.
*/
public Resource getCpuCores() {
throwUnsupportedOperationExceptionIfUnknown();
return this.cpuCores;
}
public MemorySize getTaskHeapMemory() {
throwUnsupportedOperationExceptionIfUnknown();
return this.taskHeapMemory;
}
public MemorySize getTaskOffHeapMemory() {
throwUnsupportedOperationExceptionIfUnknown();
return taskOffHeapMemory;
}
public MemorySize getOnHeapManagedMemory() {
throwUnsupportedOperationExceptionIfUnknown();
return onHeapManagedMemory;
}
public MemorySize getOffHeapManagedMemory() {
throwUnsupportedOperationExceptionIfUnknown();
return offHeapManagedMemory;
}
public Resource getGPUResource() {
throwUnsupportedOperationExceptionIfUnknown();
return extendedResources.get(GPUResource.NAME);
}
public Map<String, Resource> getExtendedResources() {
throwUnsupportedOperationExceptionIfUnknown();
return extendedResources;
}
private void throwUnsupportedOperationExceptionIfUnknown() {
if (this.equals(UNKNOWN)) {
throw new UnsupportedOperationException();
}
}
/**
* Checks the current resource less than or equal with the other resource by comparing
* all the fields in the resource.
*
* @param other The resource to compare
* @return True if current resource is less than or equal with the other resource, otherwise return false.
*/
public boolean lessThanOrEqual(final ResourceSpec other) {
checkNotNull(other, "Cannot compare with null resources");
if (this.equals(UNKNOWN) && other.equals(UNKNOWN)) {
return true;
} else if (this.equals(UNKNOWN) || other.equals(UNKNOWN)) {
throw new IllegalArgumentException("Cannot compare specified resources with UNKNOWN resources.");
}
int cmp1 = this.cpuCores.getValue().compareTo(other.getCpuCores().getValue());
int cmp2 = this.taskHeapMemory.compareTo(other.taskHeapMemory);
int cmp3 = this.taskOffHeapMemory.compareTo(other.taskOffHeapMemory);
int cmp4 = this.onHeapManagedMemory.compareTo(other.onHeapManagedMemory);
int cmp5 = this.offHeapManagedMemory.compareTo(other.offHeapManagedMemory);
if (cmp1 <= 0 && cmp2 <= 0 && cmp3 <= 0 && cmp4 <= 0 && cmp5 <= 0) {
for (Resource resource : extendedResources.values()) {
if (!other.extendedResources.containsKey(resource.getName()) ||
other.extendedResources.get(resource.getName()).getValue().compareTo(resource.getValue()) < 0) {
return false;
}
}
return true;
}
return false;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (obj != null && obj.getClass() == ResourceSpec.class) {
ResourceSpec that = (ResourceSpec) obj;
return Objects.equals(this.cpuCores, that.cpuCores) &&
Objects.equals(this.taskHeapMemory, that.taskHeapMemory) &&
Objects.equals(this.taskOffHeapMemory, that.taskOffHeapMemory) &&
Objects.equals(this.onHeapManagedMemory, that.onHeapManagedMemory) &&
Objects.equals(this.offHeapManagedMemory, that.offHeapManagedMemory) &&
Objects.equals(extendedResources, that.extendedResources);
} else {
return false;
}
}
@Override
public int hashCode() {
int result = Objects.hashCode(cpuCores);
result = 31 * result + Objects.hashCode(taskHeapMemory);
result = 31 * result + Objects.hashCode(taskOffHeapMemory);
result = 31 * result + Objects.hashCode(onHeapManagedMemory);
result = 31 * result + Objects.hashCode(offHeapManagedMemory);
result = 31 * result + extendedResources.hashCode();
return result;
}
@Override
public String toString() {
if (this.equals(UNKNOWN)) {
return "ResourceSpec{UNKNOWN}";
}
final StringBuilder extResources = new StringBuilder(extendedResources.size() * 10);
for (Map.Entry<String, Resource> resource : extendedResources.entrySet()) {
extResources.append(", ").append(resource.getKey()).append('=').append(resource.getValue().getValue());
}
return "ResourceSpec{" +
"cpuCores=" + cpuCores.getValue() +
", taskHeapMemory=" + taskHeapMemory +
", taskOffHeapMemory=" + taskOffHeapMemory +
", onHeapManagedMemory=" + onHeapManagedMemory +
", offHeapManagedMemory=" + offHeapManagedMemory + extResources +
'}';
}
private Object readResolve() {
return this.equals(UNKNOWN) ? UNKNOWN : this;
}
public static Builder newBuilder(double cpuCores, int taskHeapMemoryMB) {
return new Builder(new CPUResource(cpuCores), MemorySize.parse(taskHeapMemoryMB + "m"));
}
/**
* Builder for the {@link ResourceSpec}.
*/
public static class Builder {
private Resource cpuCores;
private MemorySize taskHeapMemory;
private MemorySize taskOffHeapMemory = MemorySize.ZERO;
private MemorySize onHeapManagedMemory = MemorySize.ZERO;
private MemorySize offHeapManagedMemory = MemorySize.ZERO;
private GPUResource gpuResource;
private Builder(CPUResource cpuCores, MemorySize taskHeapMemory) {
this.cpuCores = cpuCores;
this.taskHeapMemory = taskHeapMemory;
}
public Builder setCpuCores(double cpuCores) {
this.cpuCores = new CPUResource(cpuCores);
return this;
}
public Builder setTaskHeapMemory(MemorySize taskHeapMemory) {
this.taskHeapMemory = taskHeapMemory;
return this;
}
public Builder setTaskHeapMemoryMB(int taskHeapMemoryMB) {
this.taskHeapMemory = MemorySize.parse(taskHeapMemoryMB + "m");
return this;
}
public Builder setTaskOffHeapMemory(MemorySize taskOffHeapMemory) {
this.taskOffHeapMemory = taskOffHeapMemory;
return this;
}
public Builder setOffTaskHeapMemoryMB(int taskOffHeapMemoryMB) {
this.taskOffHeapMemory = MemorySize.parse(taskOffHeapMemoryMB + "m");
return this;
}
public Builder setOnHeapManagedMemory(MemorySize onHeapManagedMemory) {
this.onHeapManagedMemory = onHeapManagedMemory;
return this;
}
public Builder setOnHeapManagedMemoryMB(int onHeapManagedMemoryMB) {
this.onHeapManagedMemory = MemorySize.parse(onHeapManagedMemoryMB + "m");
return this;
}
public Builder setOffHeapManagedMemory(MemorySize offHeapManagedMemory) {
this.offHeapManagedMemory = offHeapManagedMemory;
return this;
}
public Builder setOffHeapManagedMemoryMB(int offHeapManagedMemoryMB) {
this.offHeapManagedMemory = MemorySize.parse(offHeapManagedMemoryMB + "m");
return this;
}
public Builder setGPUResource(double gpus) {
this.gpuResource = new GPUResource(gpus);
return this;
}
public ResourceSpec build() {
return new ResourceSpec(
cpuCores,
taskHeapMemory,
taskOffHeapMemory,
onHeapManagedMemory,
offHeapManagedMemory,
gpuResource);
}
}
} |
should also have 503 and 500 - https://github.com/eclipse/microprofile-health/blob/master/spec/src/main/asciidoc/protocol-wireformat.adoc#status-codes | private APIResponses createAPIResponses() {
APIResponses responses = new APIResponsesImpl();
responses.addAPIResponse("200", createAPIResponse());
return responses;
} | responses.addAPIResponse("200", createAPIResponse()); | private APIResponses createAPIResponses() {
APIResponses responses = new APIResponsesImpl();
responses.addAPIResponse("200", createAPIResponse());
responses.addAPIResponse("503", createAPIResponse());
responses.addAPIResponse("500", createAPIResponse());
return responses;
} | class HealthOpenAPIFilter implements OASFilter {
private static final List<String> MICROPROFILE_HEALTH_TAG = Collections.singletonList("MicroProfile Health");
private final String rootPath;
private final String livenessPath;
private final String readinessPath;
public HealthOpenAPIFilter(String rootPath, String livenessPath, String readinessPath) {
this.rootPath = rootPath;
this.livenessPath = livenessPath;
this.readinessPath = readinessPath;
}
@Override
public void filterOpenAPI(OpenAPI openAPI) {
if (openAPI.getComponents() == null) {
openAPI.setComponents(new ComponentsImpl());
}
openAPI.getComponents().addSchema("HealthCheckResponse", createHealthCheckResponse());
openAPI.getComponents().addSchema("State", createState());
if (openAPI.getPaths() == null) {
openAPI.setPaths(new PathsImpl());
}
Paths paths = openAPI.getPaths();
paths.addPathItem(rootPath, createHealthPathItem());
paths.addPathItem(livenessPath, createLivenessPathItem());
paths.addPathItem(readinessPath, createReadinessPathItem());
}
private PathItem createHealthPathItem() {
PathItem pathItem = new PathItemImpl();
pathItem.setDescription("MicroProfile Health Endpoint");
pathItem.setSummary(
"MicroProfile Health provides a way for your application to distribute information about its healthiness state to state whether or not it is able to function properly");
pathItem.setGET(createHealthOperation());
return pathItem;
}
private PathItem createLivenessPathItem() {
PathItem pathItem = new PathItemImpl();
pathItem.setDescription("MicroProfile Health - Liveness Endpoint");
pathItem.setSummary(
"Liveness checks are utilized to tell whether the application should be restarted");
pathItem.setGET(createLivenessOperation());
return pathItem;
}
private PathItem createReadinessPathItem() {
PathItem pathItem = new PathItemImpl();
pathItem.setDescription("MicroProfile Health - Readiness Endpoint");
pathItem.setSummary(
"Readiness checks are used to tell whether the application is able to process requests");
pathItem.setGET(createReadinessOperation());
return pathItem;
}
private Operation createHealthOperation() {
Operation operation = new OperationImpl();
operation.setDescription("Check the health of the application");
operation.setOperationId("microprofile_health_root");
operation.setTags(MICROPROFILE_HEALTH_TAG);
operation.setSummary("An aggregated view of the Liveness and Readiness of this application");
operation.setResponses(createAPIResponses());
return operation;
}
private Operation createLivenessOperation() {
Operation operation = new OperationImpl();
operation.setDescription("Check the liveness of the application");
operation.setOperationId("microprofile_health_liveness");
operation.setTags(MICROPROFILE_HEALTH_TAG);
operation.setSummary("The Liveness check of this application");
operation.setResponses(createAPIResponses());
return operation;
}
private Operation createReadinessOperation() {
Operation operation = new OperationImpl();
operation.setDescription("Check the readiness of the application");
operation.setOperationId("microprofile_health_readiness");
operation.setTags(MICROPROFILE_HEALTH_TAG);
operation.setSummary("The Readiness check of this application");
operation.setResponses(createAPIResponses());
return operation;
}
private APIResponse createAPIResponse() {
APIResponse response = new APIResponseImpl();
response.setContent(createContent());
return response;
}
private Content createContent() {
Content content = new ContentImpl();
content.addMediaType("application/json", createMediaType());
return content;
}
private MediaType createMediaType() {
MediaType mediaType = new MediaTypeImpl();
mediaType.setSchema(new SchemaImpl().ref("
return mediaType;
}
/**
* HealthCheckResponse:
* type: object
* properties:
* data:
* type: object
* nullable: true
* name:
* type: string
* state:
* $ref: '
*
* @return Schema representing HealthCheckResponse
*/
private Schema createHealthCheckResponse() {
Schema schema = new SchemaImpl("HealthCheckResponse");
schema.setType(Schema.SchemaType.OBJECT);
schema.setProperties(createProperties());
return schema;
}
private Map<String, Schema> createProperties() {
Map<String, Schema> map = new HashMap<>();
map.put("data", createData());
map.put("name", createName());
map.put("state", new SchemaImpl().ref("
return map;
}
private Schema createData() {
Schema schema = new SchemaImpl("data");
schema.setType(Schema.SchemaType.OBJECT);
schema.setNullable(Boolean.TRUE);
return schema;
}
private Schema createName() {
Schema schema = new SchemaImpl("name");
schema.setType(Schema.SchemaType.STRING);
return schema;
}
/**
* State:
* enum:
* - DOWN
* - UP
* type: string
*
* @return Schema representing State
*/
private Schema createState() {
Schema schema = new SchemaImpl("State");
schema.setEnumeration(createStateEnumValues());
schema.setType(Schema.SchemaType.STRING);
return schema;
}
private List<Object> createStateEnumValues() {
List<Object> values = new ArrayList<>();
values.add("DOWN");
values.add("UP");
return values;
}
} | class HealthOpenAPIFilter implements OASFilter {
private static final List<String> MICROPROFILE_HEALTH_TAG = Collections.singletonList("MicroProfile Health");
private final String rootPath;
private final String livenessPath;
private final String readinessPath;
public HealthOpenAPIFilter(String rootPath, String livenessPath, String readinessPath) {
this.rootPath = rootPath;
this.livenessPath = livenessPath;
this.readinessPath = readinessPath;
}
@Override
public void filterOpenAPI(OpenAPI openAPI) {
if (openAPI.getComponents() == null) {
openAPI.setComponents(new ComponentsImpl());
}
openAPI.getComponents().addSchema("HealthCheckResponse", createHealthCheckResponse());
openAPI.getComponents().addSchema("State", createState());
if (openAPI.getPaths() == null) {
openAPI.setPaths(new PathsImpl());
}
Paths paths = openAPI.getPaths();
paths.addPathItem(rootPath, createHealthPathItem());
paths.addPathItem(livenessPath, createLivenessPathItem());
paths.addPathItem(readinessPath, createReadinessPathItem());
}
private PathItem createHealthPathItem() {
PathItem pathItem = new PathItemImpl();
pathItem.setDescription("MicroProfile Health Endpoint");
pathItem.setSummary(
"MicroProfile Health provides a way for your application to distribute information about its healthiness state to state whether or not it is able to function properly");
pathItem.setGET(createHealthOperation());
return pathItem;
}
private PathItem createLivenessPathItem() {
PathItem pathItem = new PathItemImpl();
pathItem.setDescription("MicroProfile Health - Liveness Endpoint");
pathItem.setSummary(
"Liveness checks are utilized to tell whether the application should be restarted");
pathItem.setGET(createLivenessOperation());
return pathItem;
}
private PathItem createReadinessPathItem() {
PathItem pathItem = new PathItemImpl();
pathItem.setDescription("MicroProfile Health - Readiness Endpoint");
pathItem.setSummary(
"Readiness checks are used to tell whether the application is able to process requests");
pathItem.setGET(createReadinessOperation());
return pathItem;
}
private Operation createHealthOperation() {
Operation operation = new OperationImpl();
operation.setDescription("Check the health of the application");
operation.setOperationId("microprofile_health_root");
operation.setTags(MICROPROFILE_HEALTH_TAG);
operation.setSummary("An aggregated view of the Liveness and Readiness of this application");
operation.setResponses(createAPIResponses());
return operation;
}
private Operation createLivenessOperation() {
Operation operation = new OperationImpl();
operation.setDescription("Check the liveness of the application");
operation.setOperationId("microprofile_health_liveness");
operation.setTags(MICROPROFILE_HEALTH_TAG);
operation.setSummary("The Liveness check of this application");
operation.setResponses(createAPIResponses());
return operation;
}
private Operation createReadinessOperation() {
Operation operation = new OperationImpl();
operation.setDescription("Check the readiness of the application");
operation.setOperationId("microprofile_health_readiness");
operation.setTags(MICROPROFILE_HEALTH_TAG);
operation.setSummary("The Readiness check of this application");
operation.setResponses(createAPIResponses());
return operation;
}
private APIResponse createAPIResponse() {
APIResponse response = new APIResponseImpl();
response.setContent(createContent());
return response;
}
private Content createContent() {
Content content = new ContentImpl();
content.addMediaType("application/json", createMediaType());
return content;
}
private MediaType createMediaType() {
MediaType mediaType = new MediaTypeImpl();
mediaType.setSchema(new SchemaImpl().ref("
return mediaType;
}
/**
* HealthCheckResponse:
* type: object
* properties:
* data:
* type: object
* nullable: true
* name:
* type: string
* state:
* $ref: '
*
* @return Schema representing HealthCheckResponse
*/
private Schema createHealthCheckResponse() {
Schema schema = new SchemaImpl("HealthCheckResponse");
schema.setType(Schema.SchemaType.OBJECT);
schema.setProperties(createProperties());
return schema;
}
private Map<String, Schema> createProperties() {
Map<String, Schema> map = new HashMap<>();
map.put("data", createData());
map.put("name", createName());
map.put("state", new SchemaImpl().ref("
return map;
}
private Schema createData() {
Schema schema = new SchemaImpl("data");
schema.setType(Schema.SchemaType.OBJECT);
schema.setNullable(Boolean.TRUE);
return schema;
}
private Schema createName() {
Schema schema = new SchemaImpl("name");
schema.setType(Schema.SchemaType.STRING);
return schema;
}
/**
* State:
* enum:
* - DOWN
* - UP
* type: string
*
* @return Schema representing State
*/
private Schema createState() {
Schema schema = new SchemaImpl("State");
schema.setEnumeration(createStateEnumValues());
schema.setType(Schema.SchemaType.STRING);
return schema;
}
private List<Object> createStateEnumValues() {
List<Object> values = new ArrayList<>();
values.add("DOWN");
values.add("UP");
return values;
}
} |
Is this going to `toString` correctly for the headers? If you want to do associations for URL and status code show it like this: `URL: %s, Status code: %d` | public void listKeySnippets() {
KeyClient keyClient = createClient();
for (KeyBase key : keyClient.listKeys()) {
Key keyWithMaterial = keyClient.getKey(key);
System.out.printf("Received key with name %s and type %s", keyWithMaterial.name(),
keyWithMaterial.keyMaterial().kty());
}
for (KeyBase key : keyClient.listKeys(new Context(key2, value2))) {
Key keyWithMaterial = keyClient.getKey(key);
System.out.printf("Received key with name %s and type %s", keyWithMaterial.name(),
keyWithMaterial.keyMaterial().kty());
}
keyClient.listKeys().iterableByPage().forEach(resp -> {
System.out.printf("Response headers are %s. Url %s and status code %d %n", resp.headers(),
resp.request().url(), resp.statusCode());
resp.items().forEach(value -> {
Key keyWithMaterial = keyClient.getKey(value);
System.out.printf("Received key with name %s and type %s %n", keyWithMaterial.name(),
keyWithMaterial.keyMaterial().kty());
});
});
} | System.out.printf("Response headers are %s. Url %s and status code %d %n", resp.headers(), | public void listKeySnippets() {
KeyClient keyClient = createClient();
for (KeyBase key : keyClient.listKeys()) {
Key keyWithMaterial = keyClient.getKey(key);
System.out.printf("Received key with name %s and type %s", keyWithMaterial.name(),
keyWithMaterial.keyMaterial().kty());
}
for (KeyBase key : keyClient.listKeys(new Context(key2, value2))) {
Key keyWithMaterial = keyClient.getKey(key);
System.out.printf("Received key with name %s and type %s", keyWithMaterial.name(),
keyWithMaterial.keyMaterial().kty());
}
keyClient.listKeys().iterableByPage().forEach(resp -> {
System.out.printf("Got response headers . Url: %s, Status code: %d %n",
resp.request().url(), resp.statusCode());
resp.items().forEach(value -> {
Key keyWithMaterial = keyClient.getKey(value);
System.out.printf("Received key with name %s and type %s %n", keyWithMaterial.name(),
keyWithMaterial.keyMaterial().kty());
});
});
} | class KeyClientJavaDocCodeSnippets {
private String key1 = "key1";
private String key2 = "key2";
private String value1 = "val1";
private String value2 = "val2";
/**
* Generates code sample for creating a {@link KeyClient}
* @return An instance of {@link KeyClient}
*/
public KeyClient createClient() {
KeyClient keyClient = new KeyClientBuilder()
.endpoint("https:
.credential(new DefaultAzureCredentialBuilder().build())
.buildClient();
return keyClient;
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void createKey() {
KeyClient keyClient = createClient();
Key key = keyClient.createKey("keyName", KeyType.EC);
System.out.printf("Key is created with name %s and id %s %n", key.name(), key.id());
KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key optionsKey = keyClient.createKey(keyCreateOptions);
System.out.printf("Key is created with name %s and id %s \n", optionsKey.name(), optionsKey.id());
RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName")
.keySize(2048)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key rsaKey = keyClient.createRsaKey(rsaKeyCreateOptions);
System.out.printf("Key is created with name %s and id %s \n", rsaKey.name(), rsaKey.id());
EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName")
.curve(KeyCurveName.P_384)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key ecKey = keyClient.createEcKey(ecKeyCreateOptions);
System.out.printf("Key is created with name %s and id %s \n", ecKey.name(), ecKey.id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void deleteKeySnippets() {
KeyClient keyClient = createClient();
Key key = keyClient.getKey("keyName");
DeletedKey deletedKey = keyClient.deleteKey("keyName");
System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void getDeletedKeySnippets() {
KeyClient keyClient = createClient();
DeletedKey deletedKey = keyClient.getDeletedKey("keyName");
System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void createKeyWithResponses() {
KeyClient keyClient = createClient();
KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key optionsKey = keyClient.createKeyWithResponse(keyCreateOptions, new Context(key1, value1)).value();
System.out.printf("Key is created with name %s and id %s \n", optionsKey.name(), optionsKey.id());
RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName")
.keySize(2048)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key rsaKey = keyClient.createRsaKeyWithResponse(rsaKeyCreateOptions, new Context(key1, value1)).value();
System.out.printf("Key is created with name %s and id %s \n", rsaKey.name(), rsaKey.id());
EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName")
.curve(KeyCurveName.P_384)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key ecKey = keyClient.createEcKeyWithResponse(ecKeyCreateOptions, new Context(key1, value1)).value();
System.out.printf("Key is created with name %s and id %s \n", ecKey.name(), ecKey.id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void getKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
String keyVersion = "6A385B124DEF4096AF1361A85B16C204";
Key keyWithVersion = keyClient.getKeyWithResponse("keyName", keyVersion,
new Context(key1, value1)).value();
System.out.printf("Key is returned with name %s and id %s \n", keyWithVersion.name(), keyWithVersion.id());
for (KeyBase key : keyClient.listKeys()) {
Key keyResponse = keyClient.getKeyWithResponse(key, new Context(key1, value1)).value();
System.out.printf("Received key with name %s and type %s", keyResponse.name(),
keyResponse.keyMaterial().kty());
}
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void getKeySnippets() {
KeyClient keyClient = createClient();
String keyVersion = "6A385B124DEF4096AF1361A85B16C204";
Key keyWithVersion = keyClient.getKey("keyName", keyVersion);
System.out.printf("Key is returned with name %s and id %s \n", keyWithVersion.name(), keyWithVersion.id());
Key keyWithVersionValue = keyClient.getKey("keyName");
System.out.printf("Key is returned with name %s and id %s \n", keyWithVersionValue.name(), keyWithVersionValue.id());
for (KeyBase key : keyClient.listKeys()) {
Key keyResponse = keyClient.getKey(key);
System.out.printf("Received key with name %s and type %s", keyResponse.name(),
keyResponse.keyMaterial().kty());
}
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void updateKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
Key key = keyClient.getKey("keyName");
key.expires(OffsetDateTime.now().plusDays(60));
KeyBase updatedKeyBase = keyClient.updateKeyWithResponse(key,
new Context(key1, value1), KeyOperation.ENCRYPT, KeyOperation.DECRYPT).value();
Key updatedKey = keyClient.getKey(updatedKeyBase.name());
System.out.printf("Key is updated with name %s and id %s \n", updatedKey.name(), updatedKey.id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void updateKeySnippets() {
KeyClient keyClient = createClient();
Key key = keyClient.getKey("keyName");
key.expires(OffsetDateTime.now().plusDays(60));
KeyBase updatedKeyBase = keyClient.updateKey(key, KeyOperation.ENCRYPT, KeyOperation.DECRYPT);
Key updatedKey = keyClient.getKey(updatedKeyBase.name());
System.out.printf("Key is updated with name %s and id %s \n", updatedKey.name(), updatedKey.id());
Key updateKey = keyClient.getKey("keyName");
key.expires(OffsetDateTime.now().plusDays(60));
KeyBase updatedKeyBaseValue = keyClient.updateKey(updateKey);
Key updatedKeyValue = keyClient.getKey(updatedKeyBaseValue.name());
System.out.printf("Key is updated with name %s and id %s \n", updatedKeyValue.name(), updatedKeyValue.id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void deleteKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
Key key = keyClient.getKey("keyName");
DeletedKey deletedKey = keyClient.deleteKeyWithResponse("keyName", new Context(key1, value1)).value();
System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void getDeleteKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
DeletedKey deletedKey = keyClient.getDeletedKeyWithResponse("keyName", new Context(key1, value1)).value();
System.out.printf("Deleted Key with recovery Id %s \n", deletedKey.recoveryId());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void purgeDeletedKeySnippets() {
KeyClient keyClient = createClient();
VoidResponse purgeResponse = keyClient.purgeDeletedKey("deletedKeyName");
System.out.printf("Purge Status Code: %rsaPrivateExponent", purgeResponse.statusCode());
VoidResponse purgedResponse = keyClient.purgeDeletedKey("deletedKeyName", new Context(key2, value2));
System.out.printf("Purge Status Code: %rsaPrivateExponent", purgedResponse.statusCode());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void recoverDeletedKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
Key recoveredKey = keyClient.recoverDeletedKeyWithResponse("deletedKeyName",
new Context(key2, value2)).value();
System.out.printf("Recovered key with name %s", recoveredKey.name());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void recoverDeletedKeySnippets() {
KeyClient keyClient = createClient();
Key recoveredKey = keyClient.recoverDeletedKey("deletedKeyName");
System.out.printf("Recovered key with name %s", recoveredKey.name());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void backupKeySnippets() {
KeyClient keyClient = createClient();
byte[] keyBackup = keyClient.backupKey("keyName");
System.out.printf("Key's Backup Byte array's length %s", keyBackup.length);
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void backupKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
byte[] keyBackup = keyClient.backupKeyWithResponse("keyName", new Context(key2, value2)).value();
System.out.printf("Key's Backup Byte array's length %s", keyBackup.length);
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void restoreKeySnippets() {
KeyClient keyClient = createClient();
byte[] keyBackupByteArray = {};
Key keyResponse = keyClient.restoreKey(keyBackupByteArray);
System.out.printf("Restored Key with name %s and id %s \n", keyResponse.name(), keyResponse.id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void restoreKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
byte[] keyBackupByteArray = {};
Response<Key> keyResponse = keyClient.restoreKeyWithResponse(keyBackupByteArray, new Context(key1, value1));
System.out.printf("Restored Key with name %s and id %s \n",
keyResponse.value().name(), keyResponse.value().id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
/**
* Generates a code sample for using {@link KeyClient
*/
public void listDeletedKeysSnippets() {
KeyClient keyClient = createClient();
for (DeletedKey deletedKey : keyClient.listDeletedKeys()) {
System.out.printf("Deleted key's recovery Id %s", deletedKey.recoveryId());
}
for (DeletedKey deletedKey : keyClient.listDeletedKeys(new Context(key2, value2))) {
System.out.printf("Deleted key's recovery Id %s", deletedKey.recoveryId());
}
keyClient.listDeletedKeys().iterableByPage().forEach(resp -> {
System.out.printf("Response headers are %s. Url %s and status code %d %n", resp.headers(),
resp.request().url(), resp.statusCode());
resp.items().forEach(value -> {
System.out.printf("Deleted key's recovery Id %s %n", value.recoveryId());
});
});
}
/**
* Generates code sample for using {@link KeyClient
*/
public void listKeyVersions() {
KeyClient keyClient = createClient();
for (KeyBase key : keyClient.listKeyVersions("keyName")) {
Key keyWithMaterial = keyClient.getKey(key);
System.out.printf("Received key's version with name %s, type %s and version %s", keyWithMaterial.name(),
keyWithMaterial.keyMaterial().kty(), keyWithMaterial.version());
}
for (KeyBase key : keyClient.listKeyVersions("keyName", new Context(key2, value2))) {
Key keyWithMaterial = keyClient.getKey(key);
System.out.printf("Received key's version with name %s, type %s and version %s", keyWithMaterial.name(),
keyWithMaterial.keyMaterial().kty(), keyWithMaterial.version());
}
keyClient.listKeyVersions("keyName").iterableByPage().forEach(resp -> {
System.out.printf("Response headers are %s. Url %s and status code %d %n", resp.headers(),
resp.request().url(), resp.statusCode());
resp.items().forEach(value -> {
System.out.printf("Response value is %d %n", value);
});
});
}
/**
* Implementation not provided for this method
* @return {@code null}
*/
private TokenCredential getKeyVaultCredential() {
return null;
}
} | class KeyClientJavaDocCodeSnippets {
private String key1 = "key1";
private String key2 = "key2";
private String value1 = "val1";
private String value2 = "val2";
/**
* Generates code sample for creating a {@link KeyClient}
* @return An instance of {@link KeyClient}
*/
public KeyClient createClient() {
KeyClient keyClient = new KeyClientBuilder()
.endpoint("https:
.credential(new DefaultAzureCredentialBuilder().build())
.buildClient();
return keyClient;
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void createKey() {
KeyClient keyClient = createClient();
Key key = keyClient.createKey("keyName", KeyType.EC);
System.out.printf("Key is created with name %s and id %s %n", key.name(), key.id());
KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key optionsKey = keyClient.createKey(keyCreateOptions);
System.out.printf("Key is created with name %s and id %s \n", optionsKey.name(), optionsKey.id());
RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName")
.keySize(2048)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key rsaKey = keyClient.createRsaKey(rsaKeyCreateOptions);
System.out.printf("Key is created with name %s and id %s \n", rsaKey.name(), rsaKey.id());
EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName")
.curve(KeyCurveName.P_384)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key ecKey = keyClient.createEcKey(ecKeyCreateOptions);
System.out.printf("Key is created with name %s and id %s \n", ecKey.name(), ecKey.id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void deleteKeySnippets() {
KeyClient keyClient = createClient();
Key key = keyClient.getKey("keyName");
DeletedKey deletedKey = keyClient.deleteKey("keyName");
System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void getDeletedKeySnippets() {
KeyClient keyClient = createClient();
DeletedKey deletedKey = keyClient.getDeletedKey("keyName");
System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void createKeyWithResponses() {
KeyClient keyClient = createClient();
KeyCreateOptions keyCreateOptions = new KeyCreateOptions("keyName", KeyType.RSA)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key optionsKey = keyClient.createKeyWithResponse(keyCreateOptions, new Context(key1, value1)).value();
System.out.printf("Key is created with name %s and id %s \n", optionsKey.name(), optionsKey.id());
RsaKeyCreateOptions rsaKeyCreateOptions = new RsaKeyCreateOptions("keyName")
.keySize(2048)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key rsaKey = keyClient.createRsaKeyWithResponse(rsaKeyCreateOptions, new Context(key1, value1)).value();
System.out.printf("Key is created with name %s and id %s \n", rsaKey.name(), rsaKey.id());
EcKeyCreateOptions ecKeyCreateOptions = new EcKeyCreateOptions("keyName")
.curve(KeyCurveName.P_384)
.notBefore(OffsetDateTime.now().plusDays(1))
.expires(OffsetDateTime.now().plusYears(1));
Key ecKey = keyClient.createEcKeyWithResponse(ecKeyCreateOptions, new Context(key1, value1)).value();
System.out.printf("Key is created with name %s and id %s \n", ecKey.name(), ecKey.id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void getKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
String keyVersion = "6A385B124DEF4096AF1361A85B16C204";
Key keyWithVersion = keyClient.getKeyWithResponse("keyName", keyVersion,
new Context(key1, value1)).value();
System.out.printf("Key is returned with name %s and id %s \n", keyWithVersion.name(), keyWithVersion.id());
for (KeyBase key : keyClient.listKeys()) {
Key keyResponse = keyClient.getKeyWithResponse(key, new Context(key1, value1)).value();
System.out.printf("Received key with name %s and type %s", keyResponse.name(),
keyResponse.keyMaterial().kty());
}
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void getKeySnippets() {
KeyClient keyClient = createClient();
String keyVersion = "6A385B124DEF4096AF1361A85B16C204";
Key keyWithVersion = keyClient.getKey("keyName", keyVersion);
System.out.printf("Key is returned with name %s and id %s \n", keyWithVersion.name(), keyWithVersion.id());
Key keyWithVersionValue = keyClient.getKey("keyName");
System.out.printf("Key is returned with name %s and id %s \n", keyWithVersionValue.name(), keyWithVersionValue.id());
for (KeyBase key : keyClient.listKeys()) {
Key keyResponse = keyClient.getKey(key);
System.out.printf("Received key with name %s and type %s", keyResponse.name(),
keyResponse.keyMaterial().kty());
}
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void updateKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
Key key = keyClient.getKey("keyName");
key.expires(OffsetDateTime.now().plusDays(60));
KeyBase updatedKeyBase = keyClient.updateKeyWithResponse(key,
new Context(key1, value1), KeyOperation.ENCRYPT, KeyOperation.DECRYPT).value();
Key updatedKey = keyClient.getKey(updatedKeyBase.name());
System.out.printf("Key is updated with name %s and id %s \n", updatedKey.name(), updatedKey.id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void updateKeySnippets() {
KeyClient keyClient = createClient();
Key key = keyClient.getKey("keyName");
key.expires(OffsetDateTime.now().plusDays(60));
KeyBase updatedKeyBase = keyClient.updateKey(key, KeyOperation.ENCRYPT, KeyOperation.DECRYPT);
Key updatedKey = keyClient.getKey(updatedKeyBase.name());
System.out.printf("Key is updated with name %s and id %s \n", updatedKey.name(), updatedKey.id());
Key updateKey = keyClient.getKey("keyName");
key.expires(OffsetDateTime.now().plusDays(60));
KeyBase updatedKeyBaseValue = keyClient.updateKey(updateKey);
Key updatedKeyValue = keyClient.getKey(updatedKeyBaseValue.name());
System.out.printf("Key is updated with name %s and id %s \n", updatedKeyValue.name(), updatedKeyValue.id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void deleteKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
Key key = keyClient.getKey("keyName");
DeletedKey deletedKey = keyClient.deleteKeyWithResponse("keyName", new Context(key1, value1)).value();
System.out.printf("Deleted Key's Recovery Id %s", deletedKey.recoveryId());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void getDeleteKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
DeletedKey deletedKey = keyClient.getDeletedKeyWithResponse("keyName", new Context(key1, value1)).value();
System.out.printf("Deleted Key with recovery Id %s \n", deletedKey.recoveryId());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void purgeDeletedKeySnippets() {
KeyClient keyClient = createClient();
VoidResponse purgeResponse = keyClient.purgeDeletedKey("deletedKeyName");
System.out.printf("Purge Status Code: %rsaPrivateExponent", purgeResponse.statusCode());
VoidResponse purgedResponse = keyClient.purgeDeletedKey("deletedKeyName", new Context(key2, value2));
System.out.printf("Purge Status Code: %rsaPrivateExponent", purgedResponse.statusCode());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void recoverDeletedKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
Key recoveredKey = keyClient.recoverDeletedKeyWithResponse("deletedKeyName",
new Context(key2, value2)).value();
System.out.printf("Recovered key with name %s", recoveredKey.name());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void recoverDeletedKeySnippets() {
KeyClient keyClient = createClient();
Key recoveredKey = keyClient.recoverDeletedKey("deletedKeyName");
System.out.printf("Recovered key with name %s", recoveredKey.name());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void backupKeySnippets() {
KeyClient keyClient = createClient();
byte[] keyBackup = keyClient.backupKey("keyName");
System.out.printf("Key's Backup Byte array's length %s", keyBackup.length);
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void backupKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
byte[] keyBackup = keyClient.backupKeyWithResponse("keyName", new Context(key2, value2)).value();
System.out.printf("Key's Backup Byte array's length %s", keyBackup.length);
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void restoreKeySnippets() {
KeyClient keyClient = createClient();
byte[] keyBackupByteArray = {};
Key keyResponse = keyClient.restoreKey(keyBackupByteArray);
System.out.printf("Restored Key with name %s and id %s \n", keyResponse.name(), keyResponse.id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
public void restoreKeyWithResponseSnippets() {
KeyClient keyClient = createClient();
byte[] keyBackupByteArray = {};
Response<Key> keyResponse = keyClient.restoreKeyWithResponse(keyBackupByteArray, new Context(key1, value1));
System.out.printf("Restored Key with name %s and id %s \n",
keyResponse.value().name(), keyResponse.value().id());
}
/**
* Generates a code sample for using {@link KeyClient
*/
/**
* Generates a code sample for using {@link KeyClient
*/
public void listDeletedKeysSnippets() {
KeyClient keyClient = createClient();
for (DeletedKey deletedKey : keyClient.listDeletedKeys()) {
System.out.printf("Deleted key's recovery Id %s", deletedKey.recoveryId());
}
for (DeletedKey deletedKey : keyClient.listDeletedKeys(new Context(key2, value2))) {
System.out.printf("Deleted key's recovery Id %s", deletedKey.recoveryId());
}
keyClient.listDeletedKeys().iterableByPage().forEach(resp -> {
System.out.printf("Got response headers . Url: %s, Status code: %d %n",
resp.request().url(), resp.statusCode());
resp.items().forEach(value -> {
System.out.printf("Deleted key's recovery Id %s %n", value.recoveryId());
});
});
}
/**
* Generates code sample for using {@link KeyClient
*/
public void listKeyVersions() {
KeyClient keyClient = createClient();
for (KeyBase key : keyClient.listKeyVersions("keyName")) {
Key keyWithMaterial = keyClient.getKey(key);
System.out.printf("Received key's version with name %s, type %s and version %s", keyWithMaterial.name(),
keyWithMaterial.keyMaterial().kty(), keyWithMaterial.version());
}
for (KeyBase key : keyClient.listKeyVersions("keyName", new Context(key2, value2))) {
Key keyWithMaterial = keyClient.getKey(key);
System.out.printf("Received key's version with name %s, type %s and version %s", keyWithMaterial.name(),
keyWithMaterial.keyMaterial().kty(), keyWithMaterial.version());
}
keyClient.listKeyVersions("keyName").iterableByPage().forEach(resp -> {
System.out.printf("Got response headers . Url: %s, Status code: %d %n",
resp.request().url(), resp.statusCode());
resp.items().forEach(value -> {
System.out.printf("Key name: %s, Key version: %s %n", value.name(), value.version());
});
});
}
/**
* Implementation not provided for this method
* @return {@code null}
*/
private TokenCredential getKeyVaultCredential() {
return null;
}
} |
@geoand: This is needed in order to deal with some `tck` issues which cause `config.getValue().initAndExit` to be always true. There is a comment right above (see lines 30-31) | public void exitIfNeeded() {
boolean initAndExitConfigured = propertyConfigured(QUARKUS_INIT_AND_EXIT);
if (initAndExitConfigured && config.getValue().initAndExit) {
if (ConfigProvider.getConfig().getValue(QUARKUS_INIT_AND_EXIT, boolean.class)) {
preventFurtherRecorderSteps(5, "Error attempting to gracefully shutdown after initialization",
() -> new PreventFurtherStepsException("Gracefully exiting after initialization.", 0));
}
}
} | if (initAndExitConfigured && config.getValue().initAndExit) { | public void exitIfNeeded() {
boolean initAndExitConfigured = propertyConfigured(QUARKUS_INIT_AND_EXIT);
if (initAndExitConfigured && config.getValue().initAndExit) {
if (ConfigProvider.getConfig().getValue(QUARKUS_INIT_AND_EXIT, boolean.class)) {
preventFurtherRecorderSteps(5, "Error attempting to gracefully shutdown after initialization",
() -> new PreventFurtherStepsException("Gracefully exiting after initialization.", 0));
}
}
} | class InitializationTaskRecorder {
private static final String QUARKUS_INIT_AND_EXIT = "quarkus.init-and-exit";
private final RuntimeValue<InitRuntimeConfig> config;
public InitializationTaskRecorder(RuntimeValue<InitRuntimeConfig> config) {
this.config = config;
}
public static void preventFurtherRecorderSteps(int waitSeconds, String waitErrorMessage,
Supplier<PreventFurtherStepsException> supplier) {
CountDownLatch latch = new CountDownLatch(1);
new Thread(new Runnable() {
@Override
public void run() {
Quarkus.blockingExit();
latch.countDown();
}
}).start();
try {
latch.await(waitSeconds, TimeUnit.SECONDS);
} catch (InterruptedException e) {
System.err.println(waitErrorMessage);
}
throw supplier.get();
}
private static String propertyToEnvVar(String property) {
return StringUtil.replaceNonAlphanumericByUnderscores(property).toUpperCase();
}
private static boolean propertyConfigured(String property) {
return StreamSupport.stream(ConfigProvider.getConfig().getPropertyNames().spliterator(), false)
.anyMatch(n -> property.equals(n) || propertyToEnvVar(property).equals(n));
}
} | class InitializationTaskRecorder {
private static final String QUARKUS_INIT_AND_EXIT = "quarkus.init-and-exit";
private final RuntimeValue<InitRuntimeConfig> config;
public InitializationTaskRecorder(RuntimeValue<InitRuntimeConfig> config) {
this.config = config;
}
public static void preventFurtherRecorderSteps(int waitSeconds, String waitErrorMessage,
Supplier<PreventFurtherStepsException> supplier) {
CountDownLatch latch = new CountDownLatch(1);
new Thread(new Runnable() {
@Override
public void run() {
Quarkus.blockingExit();
latch.countDown();
}
}).start();
try {
latch.await(waitSeconds, TimeUnit.SECONDS);
} catch (InterruptedException e) {
System.err.println(waitErrorMessage);
}
throw supplier.get();
}
private static String propertyToEnvVar(String property) {
return StringUtil.replaceNonAlphanumericByUnderscores(property).toUpperCase();
}
private static boolean propertyConfigured(String property) {
return StreamSupport.stream(ConfigProvider.getConfig().getPropertyNames().spliterator(), false)
.anyMatch(n -> property.equals(n) || propertyToEnvVar(property).equals(n));
}
} |
We can optimize the code as follow: ``` return LogicalTypeMerging.findCommonType( Arrays.asList( inputDataType.getLogicalType(), nullReplacementDataType.getLogicalType())) .map(t -> t.copy(nullReplacementDataType.getLogicalType().isNullable())) .map(TypeConversions::fromLogicalToDataType); ``` | public Optional<DataType> inferType(CallContext callContext) {
final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
final DataType inputDataType = argumentDataTypes.get(0);
final DataType nullReplacementDataType = argumentDataTypes.get(1);
if (!inputDataType.getLogicalType().isNullable()) {
return Optional.of(inputDataType);
}
return LogicalTypeMerging.findCommonType(
Arrays.asList(
inputDataType.getLogicalType(),
nullReplacementDataType.getLogicalType()))
.map(
commonType ->
nullReplacementDataType.getLogicalType().isNullable()
? commonType
: commonType.copy(false))
.map(TypeConversions::fromLogicalToDataType);
} | return LogicalTypeMerging.findCommonType( | public Optional<DataType> inferType(CallContext callContext) {
final List<DataType> argumentDataTypes = callContext.getArgumentDataTypes();
final DataType inputDataType = argumentDataTypes.get(0);
final DataType nullReplacementDataType = argumentDataTypes.get(1);
if (!inputDataType.getLogicalType().isNullable()) {
return Optional.of(inputDataType);
}
return LogicalTypeMerging.findCommonType(
Arrays.asList(
inputDataType.getLogicalType(),
nullReplacementDataType.getLogicalType()))
.map(t -> t.copy(nullReplacementDataType.getLogicalType().isNullable()))
.map(TypeConversions::fromLogicalToDataType);
} | class IfNullTypeStrategy implements TypeStrategy {
@Override
} | class IfNullTypeStrategy implements TypeStrategy {
@Override
} |
Same here about simplifying the logic. | public void handle(RoutingContext event) {
HttpServerRequest request = event.request();
HttpServerResponse response = event.response();
if (graphQLRuntimeConfig.enable) {
GraphQLSchema graphQLSchema = CDI.current().select(GraphQLSchema.class).get();
SchemaPrinter schemaPrinter = CDI.current().select(SchemaPrinter.class).get();
String schemaString = schemaPrinter.print(graphQLSchema);
if (request.method().equals(HttpMethod.OPTIONS)) {
response.headers().set(HttpHeaders.ALLOW, ALLOWED_METHODS);
} else if (request.method().equals(HttpMethod.GET)) {
response.headers().set(HttpHeaders.CONTENT_TYPE, CONTENT_TYPE);
response.end(Buffer.buffer(schemaString));
} else {
response.setStatusCode(405).end();
}
} else {
response.setStatusCode(404);
response.end();
}
} | if (graphQLRuntimeConfig.enable) { | public void handle(RoutingContext event) {
HttpServerRequest request = event.request();
HttpServerResponse response = event.response();
GraphQLSchema graphQLSchema = CDI.current().select(GraphQLSchema.class).get();
SchemaPrinter schemaPrinter = CDI.current().select(SchemaPrinter.class).get();
String schemaString = schemaPrinter.print(graphQLSchema);
if (request.method().equals(HttpMethod.OPTIONS)) {
response.headers().set(HttpHeaders.ALLOW, ALLOWED_METHODS);
} else if (request.method().equals(HttpMethod.GET)) {
response.headers().set(HttpHeaders.CONTENT_TYPE, CONTENT_TYPE);
response.end(Buffer.buffer(schemaString));
} else {
response.setStatusCode(405).end();
}
} | class SmallRyeGraphQLSchemaHandler implements Handler<RoutingContext> {
private static final String ALLOWED_METHODS = "GET, OPTIONS";
private static final String CONTENT_TYPE = "text/plain; charset=UTF-8";
private SmallRyeGraphQLRuntimeConfig graphQLRuntimeConfig;
public SmallRyeGraphQLSchemaHandler() {
}
public SmallRyeGraphQLSchemaHandler(SmallRyeGraphQLRuntimeConfig graphQLRuntimeConfig) {
this.graphQLRuntimeConfig = graphQLRuntimeConfig;
}
public SmallRyeGraphQLRuntimeConfig getSmallRyeGraphQLRuntimeConfig() {
return graphQLRuntimeConfig;
}
public void setSmallRyeGraphQLRuntimeConfig(SmallRyeGraphQLRuntimeConfig graphQLRuntimeConfig) {
this.graphQLRuntimeConfig = graphQLRuntimeConfig;
}
@Override
} | class SmallRyeGraphQLSchemaHandler implements Handler<RoutingContext> {
private static final String ALLOWED_METHODS = "GET, OPTIONS";
private static final String CONTENT_TYPE = "text/plain; charset=UTF-8";
@Override
} |
No, it will return an Empty notExistClause*Context, it's EXISTS() method will return null | public ASTNode visitCreateTable(final CreateTableContext ctx) {
CreateTableStatement result = new CreateTableStatement((SimpleTableSegment) visit(ctx.tableName()), null != ctx.notExistClause_().EXISTS());
if (null != ctx.createDefinitionClause()) {
CollectionValue<CreateDefinitionSegment> createDefinitions = (CollectionValue<CreateDefinitionSegment>) visit(ctx.createDefinitionClause());
for (CreateDefinitionSegment each : createDefinitions.getValue()) {
if (each instanceof ColumnDefinitionSegment) {
result.getColumnDefinitions().add((ColumnDefinitionSegment) each);
} else if (each instanceof ConstraintDefinitionSegment) {
result.getConstraintDefinitions().add((ConstraintDefinitionSegment) each);
}
}
}
return result;
} | CreateTableStatement result = new CreateTableStatement((SimpleTableSegment) visit(ctx.tableName()), null != ctx.notExistClause_().EXISTS()); | public ASTNode visitCreateTable(final CreateTableContext ctx) {
CreateTableStatement result = new CreateTableStatement((SimpleTableSegment) visit(ctx.tableName()), null != ctx.notExistClause_());
if (null != ctx.createDefinitionClause()) {
CollectionValue<CreateDefinitionSegment> createDefinitions = (CollectionValue<CreateDefinitionSegment>) visit(ctx.createDefinitionClause());
for (CreateDefinitionSegment each : createDefinitions.getValue()) {
if (each instanceof ColumnDefinitionSegment) {
result.getColumnDefinitions().add((ColumnDefinitionSegment) each);
} else if (each instanceof ConstraintDefinitionSegment) {
result.getConstraintDefinitions().add((ConstraintDefinitionSegment) each);
}
}
}
return result;
} | class MySQLDDLVisitor extends MySQLVisitor implements DDLVisitor {
@Override
public ASTNode visitCreateView(final CreateViewContext ctx) {
return new CreateViewStatement();
}
@Override
public ASTNode visitDropView(final DropViewContext ctx) {
return new DropViewStatement();
}
@Override
public ASTNode visitCreateDatabase(final CreateDatabaseContext ctx) {
return new CreateDatabaseStatement(ctx.schemaName().getText());
}
@Override
public ASTNode visitAlterDatabase(final AlterDatabaseContext ctx) {
return new AlterDatabaseStatement();
}
@Override
public ASTNode visitDropDatabase(final DropDatabaseContext ctx) {
return new DropDatabaseStatement(ctx.schemaName().getText());
}
@Override
public ASTNode visitRenameTableSpecification(final RenameTableSpecificationContext ctx) {
return new RenameTableStatement();
}
@SuppressWarnings("unchecked")
@Override
@Override
public ASTNode visitCreateDefinitionClause(final CreateDefinitionClauseContext ctx) {
CollectionValue<CreateDefinitionSegment> result = new CollectionValue<>();
for (CreateDefinitionContext each : ctx.createDefinition()) {
if (null != each.columnDefinition()) {
result.getValue().add((ColumnDefinitionSegment) visit(each.columnDefinition()));
}
if (null != each.constraintDefinition()) {
result.getValue().add((ConstraintDefinitionSegment) visit(each.constraintDefinition()));
}
if (null != each.checkConstraintDefinition()) {
result.getValue().add((ConstraintDefinitionSegment) visit(each.checkConstraintDefinition()));
}
if (null != each.indexDefinition_()) {
result.getValue().add((ConstraintDefinitionSegment) visit(each.indexDefinition_()));
}
}
return result;
}
@Override
public ASTNode visitIndexDefinition_(final IndexDefinition_Context ctx) {
ConstraintDefinitionSegment result = new ConstraintDefinitionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex());
CollectionValue<ColumnSegment> columnSegments = (CollectionValue<ColumnSegment>) visit(ctx.keyParts_());
result.getIndexColumns().addAll(columnSegments.getValue());
if (null != ctx.indexName()) {
result.setIndexName((IndexSegment) visit(ctx.indexName()));
}
return result;
}
@Override
public ASTNode visitCreateLikeClause(final CreateLikeClauseContext ctx) {
return visit(ctx.tableName());
}
@SuppressWarnings("unchecked")
@Override
public ASTNode visitAlterTable(final AlterTableContext ctx) {
AlterTableStatement result = new AlterTableStatement((SimpleTableSegment) visit(ctx.tableName()));
if (null != ctx.alterDefinitionClause()) {
for (AlterDefinitionSegment each : ((CollectionValue<AlterDefinitionSegment>) visit(ctx.alterDefinitionClause())).getValue()) {
if (each instanceof AddColumnDefinitionSegment) {
result.getAddColumnDefinitions().add((AddColumnDefinitionSegment) each);
} else if (each instanceof ModifyColumnDefinitionSegment) {
result.getModifyColumnDefinitions().add((ModifyColumnDefinitionSegment) each);
} else if (each instanceof DropColumnDefinitionSegment) {
result.getDropColumnDefinitions().add((DropColumnDefinitionSegment) each);
} else if (each instanceof ConstraintDefinitionSegment) {
result.getAddConstraintDefinitions().add((ConstraintDefinitionSegment) each);
}
}
}
return result;
}
@Override
public ASTNode visitAlterDefinitionClause(final AlterDefinitionClauseContext ctx) {
CollectionValue<AlterDefinitionSegment> result = new CollectionValue<>();
for (AlterSpecificationContext each : ctx.alterSpecification()) {
if (null != each.addColumnSpecification()) {
result.getValue().add((AddColumnDefinitionSegment) visit(each.addColumnSpecification()));
}
if (null != each.addConstraintSpecification()) {
result.getValue().add((ConstraintDefinitionSegment) visit(each.addConstraintSpecification().constraintDefinition()));
}
if (null != each.changeColumnSpecification()) {
ModifyColumnDefinitionSegment modifyColumnDefinition = new ModifyColumnDefinitionSegment(
each.changeColumnSpecification().getStart().getStartIndex(), each.changeColumnSpecification().getStop().getStopIndex(),
(ColumnDefinitionSegment) visit(each.changeColumnSpecification().columnDefinition()));
if (null != each.changeColumnSpecification().firstOrAfterColumn()) {
modifyColumnDefinition.setColumnPosition((ColumnPositionSegment) visit(each.changeColumnSpecification().firstOrAfterColumn()));
}
result.getValue().add(modifyColumnDefinition);
}
if (null != each.modifyColumnSpecification()) {
ModifyColumnDefinitionSegment modifyColumnDefinition = new ModifyColumnDefinitionSegment(
each.modifyColumnSpecification().getStart().getStartIndex(), each.modifyColumnSpecification().getStop().getStopIndex(),
(ColumnDefinitionSegment) visit(each.modifyColumnSpecification().columnDefinition()));
if (null != each.modifyColumnSpecification().firstOrAfterColumn()) {
modifyColumnDefinition.setColumnPosition((ColumnPositionSegment) visit(each.modifyColumnSpecification().firstOrAfterColumn()));
}
result.getValue().add(modifyColumnDefinition);
}
if (null != each.dropColumnSpecification()) {
result.getValue().add((DropColumnDefinitionSegment) visit(each.dropColumnSpecification()));
}
}
return result;
}
@Override
public ASTNode visitAddColumnSpecification(final AddColumnSpecificationContext ctx) {
Collection<ColumnDefinitionSegment> columnDefinitions = new LinkedList<>();
for (ColumnDefinitionContext each : ctx.columnDefinition()) {
columnDefinitions.add((ColumnDefinitionSegment) visit(each));
}
AddColumnDefinitionSegment result = new AddColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnDefinitions);
if (null != ctx.firstOrAfterColumn()) {
Preconditions.checkState(1 == columnDefinitions.size());
result.setColumnPosition(getColumnPositionSegment(columnDefinitions.iterator().next(), (ColumnPositionSegment) visit(ctx.firstOrAfterColumn())));
}
return result;
}
@Override
public ASTNode visitColumnDefinition(final ColumnDefinitionContext ctx) {
ColumnSegment column = (ColumnSegment) visit(ctx.columnName());
DataTypeSegment dataTypeSegment = (DataTypeSegment) visit(ctx.dataType());
boolean isPrimaryKey = isPrimaryKey(ctx);
ColumnDefinitionSegment result = new ColumnDefinitionSegment(
ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), column, dataTypeSegment, isPrimaryKey);
result.getReferencedTables().addAll(getReferencedTables(ctx));
return result;
}
private Collection<SimpleTableSegment> getReferencedTables(final ColumnDefinitionContext ctx) {
Collection<SimpleTableSegment> result = new LinkedList<>();
for (StorageOptionContext each : ctx.storageOption()) {
if (null != each.dataTypeGenericOption() && null != each.dataTypeGenericOption().referenceDefinition()) {
result.add((SimpleTableSegment) visit(each.dataTypeGenericOption().referenceDefinition()));
}
}
for (GeneratedOptionContext each : ctx.generatedOption()) {
if (null != each.dataTypeGenericOption() && null != each.dataTypeGenericOption().referenceDefinition()) {
result.add((SimpleTableSegment) visit(each.dataTypeGenericOption().referenceDefinition()));
}
}
return result;
}
private boolean isPrimaryKey(final ColumnDefinitionContext ctx) {
for (StorageOptionContext each : ctx.storageOption()) {
if (null != each.dataTypeGenericOption() && null != each.dataTypeGenericOption().primaryKey()) {
return true;
}
}
for (GeneratedOptionContext each : ctx.generatedOption()) {
if (null != each.dataTypeGenericOption() && null != each.dataTypeGenericOption().primaryKey()) {
return true;
}
}
return false;
}
@SuppressWarnings("unchecked")
@Override
public ASTNode visitConstraintDefinition(final ConstraintDefinitionContext ctx) {
ConstraintDefinitionSegment result = new ConstraintDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
if (null != ctx.primaryKeyOption()) {
result.getPrimaryKeyColumns().addAll(((CollectionValue<ColumnSegment>) visit(ctx.primaryKeyOption().keyParts_())).getValue());
}
if (null != ctx.foreignKeyOption()) {
result.setReferencedTable((SimpleTableSegment) visit(ctx.foreignKeyOption()));
}
if (null != ctx.uniqueOption_()) {
CollectionValue<ColumnSegment> columnSegments = (CollectionValue<ColumnSegment>) visit(ctx.uniqueOption_().keyParts_());
result.getIndexColumns().addAll(columnSegments.getValue());
if (null != ctx.uniqueOption_().indexName()) {
result.setIndexName(new IndexSegment(ctx.uniqueOption_().indexName().start.getStartIndex(), ctx.uniqueOption_().indexName().stop.getStopIndex(),
(IdentifierValue) visit(ctx.uniqueOption_().indexName())));
}
}
return result;
}
@Override
public ASTNode visitCheckConstraintDefinition(final CheckConstraintDefinitionContext ctx) {
return new ConstraintDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
}
@Override
public ASTNode visitChangeColumnSpecification(final ChangeColumnSpecificationContext ctx) {
return extractModifyColumnDefinition(ctx.getStart(), ctx.getStop(), ctx.columnDefinition(), ctx.firstOrAfterColumn());
}
@Override
public ASTNode visitDropColumnSpecification(final DropColumnSpecificationContext ctx) {
return new DropColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), Collections.singletonList((ColumnSegment) visit(ctx.columnName())));
}
@Override
public ASTNode visitDropPrimaryKeySpecification(final DropPrimaryKeySpecificationContext ctx) {
return new DropPrimaryKeySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
}
@Override
public ASTNode visitModifyColumnSpecification(final ModifyColumnSpecificationContext ctx) {
return extractModifyColumnDefinition(ctx.getStart(), ctx.getStop(), ctx.columnDefinition(), ctx.firstOrAfterColumn());
}
@Override
public ASTNode visitRenameColumnSpecification(final RenameColumnSpecificationContext ctx) {
return new RenameColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(),
(ColumnSegment) visit(ctx.columnName(0)), (ColumnSegment) visit(ctx.columnName(1)));
}
@Override
public ASTNode visitReferenceDefinition(final ReferenceDefinitionContext ctx) {
return visit(ctx.tableName());
}
@Override
public ASTNode visitForeignKeyOption(final ForeignKeyOptionContext ctx) {
return visit(ctx.referenceDefinition());
}
private ModifyColumnDefinitionSegment extractModifyColumnDefinition(final Token start, final Token stop,
final ColumnDefinitionContext columnDefinition, final FirstOrAfterColumnContext firstOrAfterColumn) {
ModifyColumnDefinitionSegment result = new ModifyColumnDefinitionSegment(start.getStartIndex(), stop.getStopIndex(),
(ColumnDefinitionSegment) visit(columnDefinition));
if (null != firstOrAfterColumn) {
result.setColumnPosition(getColumnPositionSegment(result.getColumnDefinition(), (ColumnPositionSegment) visit(firstOrAfterColumn)));
}
return result;
}
@Override
public ASTNode visitFirstOrAfterColumn(final FirstOrAfterColumnContext ctx) {
ColumnSegment columnName = null;
if (null != ctx.columnName()) {
columnName = (ColumnSegment) visit(ctx.columnName());
}
return null == ctx.columnName() ? new ColumnFirstPositionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnName)
: new ColumnAfterPositionSegment(
ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnName);
}
private ColumnPositionSegment getColumnPositionSegment(final ColumnDefinitionSegment columnDefinition, final ColumnPositionSegment columnPosition) {
return columnPosition instanceof ColumnFirstPositionSegment
? new ColumnFirstPositionSegment(columnPosition.getStartIndex(), columnPosition.getStopIndex(), columnPosition.getColumnName())
: new ColumnAfterPositionSegment(columnPosition.getStartIndex(), columnPosition.getStopIndex(), columnPosition.getColumnName());
}
@SuppressWarnings("unchecked")
@Override
public ASTNode visitDropTable(final DropTableContext ctx) {
DropTableStatement result = new DropTableStatement();
result.getTables().addAll(((CollectionValue<SimpleTableSegment>) visit(ctx.tableNames())).getValue());
return result;
}
@Override
public ASTNode visitTruncateTable(final TruncateTableContext ctx) {
TruncateStatement result = new TruncateStatement();
result.getTables().add((SimpleTableSegment) visit(ctx.tableName()));
return result;
}
@Override
public ASTNode visitCreateIndex(final CreateIndexContext ctx) {
CreateIndexStatement result = new CreateIndexStatement();
result.setTable((SimpleTableSegment) visit(ctx.tableName()));
return result;
}
@Override
public ASTNode visitDropIndex(final DropIndexContext ctx) {
DropIndexStatement result = new DropIndexStatement();
result.setTable((SimpleTableSegment) visit(ctx.tableName()));
return result;
}
@Override
public ASTNode visitKeyParts_(final KeyParts_Context ctx) {
CollectionValue<ColumnSegment> result = new CollectionValue<>();
List<KeyPart_Context> keyParts = ctx.keyPart_();
for (KeyPart_Context each : keyParts) {
if (null != each.columnName()) {
result.getValue().add((ColumnSegment) visit(each.columnName()));
}
}
return result;
}
@Override
public ASTNode visitCreateProcedure(final CreateProcedureContext ctx) {
return new CreateProcedureStatement();
}
@Override
public ASTNode visitAlterProcedure(final AlterProcedureContext ctx) {
return new AlterProcedureStatement();
}
@Override
public ASTNode visitDropProcedure(final DropProcedureContext ctx) {
return new DropProcedureStatement();
}
@Override
public ASTNode visitCreateFunction(final CreateFunctionContext ctx) {
return new CreateFunctionStatement();
}
@Override
public ASTNode visitAlterFunction(final AlterFunctionContext ctx) {
return new AlterFunctionStatement();
}
@Override
public ASTNode visitDropFunction(final DropFunctionContext ctx) {
return new DropFunctionStatement();
}
@Override
public ASTNode visitCreateEvent(final CreateEventContext ctx) {
return new CreateEventStatement();
}
@Override
public ASTNode visitAlterEvent(final AlterEventContext ctx) {
return new AlterEventStatement();
}
@Override
public ASTNode visitDropEvent(final DropEventContext ctx) {
return new DropEventStatement();
}
@Override
public ASTNode visitAlterInstance(final AlterInstanceContext ctx) {
return new AlterInstanceStatement();
}
@Override
public ASTNode visitCreateLogfileGroup(final CreateLogfileGroupContext ctx) {
return new CreateLogfileGroupStatement();
}
@Override
public ASTNode visitAlterLogfileGroup(final AlterLogfileGroupContext ctx) {
return new AlterLogfileGroupStatement();
}
@Override
public ASTNode visitDropLogfileGroup(final DropLogfileGroupContext ctx) {
return new DropLogfileGroupStatement();
}
@Override
public ASTNode visitCreateServer(final CreateServerContext ctx) {
return new CreateServerStatement();
}
@Override
public ASTNode visitAlterServer(final AlterServerContext ctx) {
return new AlterServerStatement();
}
@Override
public ASTNode visitDropServer(final DropServerContext ctx) {
return new DropServerStatement();
}
@Override
public ASTNode visitCreateTrigger(final CreateTriggerContext ctx) {
return new CreateTriggerStatement();
}
@Override
public ASTNode visitDropTrigger(final DropTriggerContext ctx) {
return new DropTriggerStatement();
}
} | class MySQLDDLVisitor extends MySQLVisitor implements DDLVisitor {
@Override
public ASTNode visitCreateView(final CreateViewContext ctx) {
return new CreateViewStatement();
}
@Override
public ASTNode visitDropView(final DropViewContext ctx) {
return new DropViewStatement();
}
@Override
public ASTNode visitCreateDatabase(final CreateDatabaseContext ctx) {
return new CreateDatabaseStatement(ctx.schemaName().getText());
}
@Override
public ASTNode visitAlterDatabase(final AlterDatabaseContext ctx) {
return new AlterDatabaseStatement();
}
@Override
public ASTNode visitDropDatabase(final DropDatabaseContext ctx) {
return new DropDatabaseStatement(ctx.schemaName().getText());
}
@Override
public ASTNode visitRenameTableSpecification(final RenameTableSpecificationContext ctx) {
return new RenameTableStatement();
}
@SuppressWarnings("unchecked")
@Override
@Override
public ASTNode visitCreateDefinitionClause(final CreateDefinitionClauseContext ctx) {
CollectionValue<CreateDefinitionSegment> result = new CollectionValue<>();
for (CreateDefinitionContext each : ctx.createDefinition()) {
if (null != each.columnDefinition()) {
result.getValue().add((ColumnDefinitionSegment) visit(each.columnDefinition()));
}
if (null != each.constraintDefinition()) {
result.getValue().add((ConstraintDefinitionSegment) visit(each.constraintDefinition()));
}
if (null != each.checkConstraintDefinition()) {
result.getValue().add((ConstraintDefinitionSegment) visit(each.checkConstraintDefinition()));
}
if (null != each.indexDefinition_()) {
result.getValue().add((ConstraintDefinitionSegment) visit(each.indexDefinition_()));
}
}
return result;
}
@Override
public ASTNode visitIndexDefinition_(final IndexDefinition_Context ctx) {
ConstraintDefinitionSegment result = new ConstraintDefinitionSegment(ctx.start.getStartIndex(), ctx.stop.getStopIndex());
CollectionValue<ColumnSegment> columnSegments = (CollectionValue<ColumnSegment>) visit(ctx.keyParts_());
result.getIndexColumns().addAll(columnSegments.getValue());
if (null != ctx.indexName()) {
result.setIndexName((IndexSegment) visit(ctx.indexName()));
}
return result;
}
@Override
public ASTNode visitCreateLikeClause(final CreateLikeClauseContext ctx) {
return visit(ctx.tableName());
}
@SuppressWarnings("unchecked")
@Override
public ASTNode visitAlterTable(final AlterTableContext ctx) {
AlterTableStatement result = new AlterTableStatement((SimpleTableSegment) visit(ctx.tableName()));
if (null != ctx.alterDefinitionClause()) {
for (AlterDefinitionSegment each : ((CollectionValue<AlterDefinitionSegment>) visit(ctx.alterDefinitionClause())).getValue()) {
if (each instanceof AddColumnDefinitionSegment) {
result.getAddColumnDefinitions().add((AddColumnDefinitionSegment) each);
} else if (each instanceof ModifyColumnDefinitionSegment) {
result.getModifyColumnDefinitions().add((ModifyColumnDefinitionSegment) each);
} else if (each instanceof DropColumnDefinitionSegment) {
result.getDropColumnDefinitions().add((DropColumnDefinitionSegment) each);
} else if (each instanceof ConstraintDefinitionSegment) {
result.getAddConstraintDefinitions().add((ConstraintDefinitionSegment) each);
}
}
}
return result;
}
@Override
public ASTNode visitAlterDefinitionClause(final AlterDefinitionClauseContext ctx) {
CollectionValue<AlterDefinitionSegment> result = new CollectionValue<>();
for (AlterSpecificationContext each : ctx.alterSpecification()) {
if (null != each.addColumnSpecification()) {
result.getValue().add((AddColumnDefinitionSegment) visit(each.addColumnSpecification()));
}
if (null != each.addConstraintSpecification()) {
result.getValue().add((ConstraintDefinitionSegment) visit(each.addConstraintSpecification().constraintDefinition()));
}
if (null != each.changeColumnSpecification()) {
ModifyColumnDefinitionSegment modifyColumnDefinition = new ModifyColumnDefinitionSegment(
each.changeColumnSpecification().getStart().getStartIndex(), each.changeColumnSpecification().getStop().getStopIndex(),
(ColumnDefinitionSegment) visit(each.changeColumnSpecification().columnDefinition()));
if (null != each.changeColumnSpecification().firstOrAfterColumn()) {
modifyColumnDefinition.setColumnPosition((ColumnPositionSegment) visit(each.changeColumnSpecification().firstOrAfterColumn()));
}
result.getValue().add(modifyColumnDefinition);
}
if (null != each.modifyColumnSpecification()) {
ModifyColumnDefinitionSegment modifyColumnDefinition = new ModifyColumnDefinitionSegment(
each.modifyColumnSpecification().getStart().getStartIndex(), each.modifyColumnSpecification().getStop().getStopIndex(),
(ColumnDefinitionSegment) visit(each.modifyColumnSpecification().columnDefinition()));
if (null != each.modifyColumnSpecification().firstOrAfterColumn()) {
modifyColumnDefinition.setColumnPosition((ColumnPositionSegment) visit(each.modifyColumnSpecification().firstOrAfterColumn()));
}
result.getValue().add(modifyColumnDefinition);
}
if (null != each.dropColumnSpecification()) {
result.getValue().add((DropColumnDefinitionSegment) visit(each.dropColumnSpecification()));
}
}
return result;
}
@Override
public ASTNode visitAddColumnSpecification(final AddColumnSpecificationContext ctx) {
Collection<ColumnDefinitionSegment> columnDefinitions = new LinkedList<>();
for (ColumnDefinitionContext each : ctx.columnDefinition()) {
columnDefinitions.add((ColumnDefinitionSegment) visit(each));
}
AddColumnDefinitionSegment result = new AddColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnDefinitions);
if (null != ctx.firstOrAfterColumn()) {
Preconditions.checkState(1 == columnDefinitions.size());
result.setColumnPosition(getColumnPositionSegment(columnDefinitions.iterator().next(), (ColumnPositionSegment) visit(ctx.firstOrAfterColumn())));
}
return result;
}
@Override
public ASTNode visitColumnDefinition(final ColumnDefinitionContext ctx) {
ColumnSegment column = (ColumnSegment) visit(ctx.columnName());
DataTypeSegment dataTypeSegment = (DataTypeSegment) visit(ctx.dataType());
boolean isPrimaryKey = isPrimaryKey(ctx);
ColumnDefinitionSegment result = new ColumnDefinitionSegment(
ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), column, dataTypeSegment, isPrimaryKey);
result.getReferencedTables().addAll(getReferencedTables(ctx));
return result;
}
private Collection<SimpleTableSegment> getReferencedTables(final ColumnDefinitionContext ctx) {
Collection<SimpleTableSegment> result = new LinkedList<>();
for (StorageOptionContext each : ctx.storageOption()) {
if (null != each.dataTypeGenericOption() && null != each.dataTypeGenericOption().referenceDefinition()) {
result.add((SimpleTableSegment) visit(each.dataTypeGenericOption().referenceDefinition()));
}
}
for (GeneratedOptionContext each : ctx.generatedOption()) {
if (null != each.dataTypeGenericOption() && null != each.dataTypeGenericOption().referenceDefinition()) {
result.add((SimpleTableSegment) visit(each.dataTypeGenericOption().referenceDefinition()));
}
}
return result;
}
private boolean isPrimaryKey(final ColumnDefinitionContext ctx) {
for (StorageOptionContext each : ctx.storageOption()) {
if (null != each.dataTypeGenericOption() && null != each.dataTypeGenericOption().primaryKey()) {
return true;
}
}
for (GeneratedOptionContext each : ctx.generatedOption()) {
if (null != each.dataTypeGenericOption() && null != each.dataTypeGenericOption().primaryKey()) {
return true;
}
}
return false;
}
@SuppressWarnings("unchecked")
@Override
public ASTNode visitConstraintDefinition(final ConstraintDefinitionContext ctx) {
ConstraintDefinitionSegment result = new ConstraintDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
if (null != ctx.primaryKeyOption()) {
result.getPrimaryKeyColumns().addAll(((CollectionValue<ColumnSegment>) visit(ctx.primaryKeyOption().keyParts_())).getValue());
}
if (null != ctx.foreignKeyOption()) {
result.setReferencedTable((SimpleTableSegment) visit(ctx.foreignKeyOption()));
}
if (null != ctx.uniqueOption_()) {
CollectionValue<ColumnSegment> columnSegments = (CollectionValue<ColumnSegment>) visit(ctx.uniqueOption_().keyParts_());
result.getIndexColumns().addAll(columnSegments.getValue());
if (null != ctx.uniqueOption_().indexName()) {
result.setIndexName(new IndexSegment(ctx.uniqueOption_().indexName().start.getStartIndex(), ctx.uniqueOption_().indexName().stop.getStopIndex(),
(IdentifierValue) visit(ctx.uniqueOption_().indexName())));
}
}
return result;
}
@Override
public ASTNode visitCheckConstraintDefinition(final CheckConstraintDefinitionContext ctx) {
return new ConstraintDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
}
@Override
public ASTNode visitChangeColumnSpecification(final ChangeColumnSpecificationContext ctx) {
return extractModifyColumnDefinition(ctx.getStart(), ctx.getStop(), ctx.columnDefinition(), ctx.firstOrAfterColumn());
}
@Override
public ASTNode visitDropColumnSpecification(final DropColumnSpecificationContext ctx) {
return new DropColumnDefinitionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), Collections.singletonList((ColumnSegment) visit(ctx.columnName())));
}
@Override
public ASTNode visitDropPrimaryKeySpecification(final DropPrimaryKeySpecificationContext ctx) {
return new DropPrimaryKeySegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex());
}
@Override
public ASTNode visitModifyColumnSpecification(final ModifyColumnSpecificationContext ctx) {
return extractModifyColumnDefinition(ctx.getStart(), ctx.getStop(), ctx.columnDefinition(), ctx.firstOrAfterColumn());
}
@Override
public ASTNode visitRenameColumnSpecification(final RenameColumnSpecificationContext ctx) {
return new RenameColumnSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(),
(ColumnSegment) visit(ctx.columnName(0)), (ColumnSegment) visit(ctx.columnName(1)));
}
@Override
public ASTNode visitReferenceDefinition(final ReferenceDefinitionContext ctx) {
return visit(ctx.tableName());
}
@Override
public ASTNode visitForeignKeyOption(final ForeignKeyOptionContext ctx) {
return visit(ctx.referenceDefinition());
}
private ModifyColumnDefinitionSegment extractModifyColumnDefinition(final Token start, final Token stop,
final ColumnDefinitionContext columnDefinition, final FirstOrAfterColumnContext firstOrAfterColumn) {
ModifyColumnDefinitionSegment result = new ModifyColumnDefinitionSegment(start.getStartIndex(), stop.getStopIndex(),
(ColumnDefinitionSegment) visit(columnDefinition));
if (null != firstOrAfterColumn) {
result.setColumnPosition(getColumnPositionSegment(result.getColumnDefinition(), (ColumnPositionSegment) visit(firstOrAfterColumn)));
}
return result;
}
@Override
public ASTNode visitFirstOrAfterColumn(final FirstOrAfterColumnContext ctx) {
ColumnSegment columnName = null;
if (null != ctx.columnName()) {
columnName = (ColumnSegment) visit(ctx.columnName());
}
return null == ctx.columnName() ? new ColumnFirstPositionSegment(ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnName)
: new ColumnAfterPositionSegment(
ctx.getStart().getStartIndex(), ctx.getStop().getStopIndex(), columnName);
}
private ColumnPositionSegment getColumnPositionSegment(final ColumnDefinitionSegment columnDefinition, final ColumnPositionSegment columnPosition) {
return columnPosition instanceof ColumnFirstPositionSegment
? new ColumnFirstPositionSegment(columnPosition.getStartIndex(), columnPosition.getStopIndex(), columnPosition.getColumnName())
: new ColumnAfterPositionSegment(columnPosition.getStartIndex(), columnPosition.getStopIndex(), columnPosition.getColumnName());
}
@SuppressWarnings("unchecked")
@Override
public ASTNode visitDropTable(final DropTableContext ctx) {
DropTableStatement result = new DropTableStatement();
result.getTables().addAll(((CollectionValue<SimpleTableSegment>) visit(ctx.tableNames())).getValue());
return result;
}
@Override
public ASTNode visitTruncateTable(final TruncateTableContext ctx) {
TruncateStatement result = new TruncateStatement();
result.getTables().add((SimpleTableSegment) visit(ctx.tableName()));
return result;
}
@Override
public ASTNode visitCreateIndex(final CreateIndexContext ctx) {
CreateIndexStatement result = new CreateIndexStatement();
result.setTable((SimpleTableSegment) visit(ctx.tableName()));
return result;
}
@Override
public ASTNode visitDropIndex(final DropIndexContext ctx) {
DropIndexStatement result = new DropIndexStatement();
result.setTable((SimpleTableSegment) visit(ctx.tableName()));
return result;
}
@Override
public ASTNode visitKeyParts_(final KeyParts_Context ctx) {
CollectionValue<ColumnSegment> result = new CollectionValue<>();
List<KeyPart_Context> keyParts = ctx.keyPart_();
for (KeyPart_Context each : keyParts) {
if (null != each.columnName()) {
result.getValue().add((ColumnSegment) visit(each.columnName()));
}
}
return result;
}
@Override
public ASTNode visitCreateProcedure(final CreateProcedureContext ctx) {
return new CreateProcedureStatement();
}
@Override
public ASTNode visitAlterProcedure(final AlterProcedureContext ctx) {
return new AlterProcedureStatement();
}
@Override
public ASTNode visitDropProcedure(final DropProcedureContext ctx) {
return new DropProcedureStatement();
}
@Override
public ASTNode visitCreateFunction(final CreateFunctionContext ctx) {
return new CreateFunctionStatement();
}
@Override
public ASTNode visitAlterFunction(final AlterFunctionContext ctx) {
return new AlterFunctionStatement();
}
@Override
public ASTNode visitDropFunction(final DropFunctionContext ctx) {
return new DropFunctionStatement();
}
@Override
public ASTNode visitCreateEvent(final CreateEventContext ctx) {
return new CreateEventStatement();
}
@Override
public ASTNode visitAlterEvent(final AlterEventContext ctx) {
return new AlterEventStatement();
}
@Override
public ASTNode visitDropEvent(final DropEventContext ctx) {
return new DropEventStatement();
}
@Override
public ASTNode visitAlterInstance(final AlterInstanceContext ctx) {
return new AlterInstanceStatement();
}
@Override
public ASTNode visitCreateLogfileGroup(final CreateLogfileGroupContext ctx) {
return new CreateLogfileGroupStatement();
}
@Override
public ASTNode visitAlterLogfileGroup(final AlterLogfileGroupContext ctx) {
return new AlterLogfileGroupStatement();
}
@Override
public ASTNode visitDropLogfileGroup(final DropLogfileGroupContext ctx) {
return new DropLogfileGroupStatement();
}
@Override
public ASTNode visitCreateServer(final CreateServerContext ctx) {
return new CreateServerStatement();
}
@Override
public ASTNode visitAlterServer(final AlterServerContext ctx) {
return new AlterServerStatement();
}
@Override
public ASTNode visitDropServer(final DropServerContext ctx) {
return new DropServerStatement();
}
@Override
public ASTNode visitCreateTrigger(final CreateTriggerContext ctx) {
return new CreateTriggerStatement();
}
@Override
public ASTNode visitDropTrigger(final DropTriggerContext ctx) {
return new DropTriggerStatement();
}
} |
Ok, no problem, I didn't know it. I'm gonna do like this. | private void detectAndLogSpecificSpringPropertiesIfExist() {
Config config = ConfigProvider.getConfig();
Map<String, String> springJpaToQuarkusOrmPropertiesMap = new HashMap<>();
springJpaToQuarkusOrmPropertiesMap.put("spring.jpa.show-sql", "quarkus.hibernate-orm.log.sql");
springJpaToQuarkusOrmPropertiesMap.put("spring.jpa.properties.hibernate.dialect ", "quarkus.hibernate-orm.dialect");
springJpaToQuarkusOrmPropertiesMap.put("spring.jpa.properties.hibernate.dialect.storage_engine",
"quarkus.hibernate-orm.dialect.storage-engine");
springJpaToQuarkusOrmPropertiesMap.put("spring.jpa.generate-ddl", "quarkus.hibernate-orm.database.generation");
Iterable<String> iterablePropertyNames = config.getPropertyNames();
List<String> propertyNames = new ArrayList<String>();
iterablePropertyNames.forEach(propertyNames::add);
Pattern pattern = Pattern.compile("spring\\.jpa\\..*");
Matcher matcher = pattern.matcher("");
List<String> springProperties = propertyNames.stream().filter(s -> matcher.reset(s).matches()).collect(toList());
if (!springProperties.isEmpty()) {
String warningLog = "Quarkus does not support the ";
for (String springProperty : springProperties) {
String quarkusProperty = springJpaToQuarkusOrmPropertiesMap.get(springProperty);
if (quarkusProperty != null) {
warningLog = warningLog + springProperty + " property " + "you may try to use the Quarkus equivalent one : "
+ quarkusProperty + ".";
}
LOGGER.warn(warningLog + springProperty + " property. ");
}
}
} | LOGGER.warn(warningLog + springProperty + " property. "); | private void detectAndLogSpecificSpringPropertiesIfExist() {
Config config = ConfigProvider.getConfig();
Iterable<String> iterablePropertyNames = config.getPropertyNames();
List<String> propertyNames = new ArrayList<String>();
iterablePropertyNames.forEach(propertyNames::add);
List<String> springProperties = propertyNames.stream().filter(s -> pattern.matcher(s).matches()).collect(toList());
String notSupportedProperties = "";
if (!springProperties.isEmpty()) {
for (String sp : springProperties) {
switch (sp) {
case SPRING_JPA_SHOW_SQL:
notSupportedProperties = notSupportedProperties + "\t- " + SPRING_JPA_SHOW_SQL
+ " should be replaced by " + QUARKUS_HIBERNATE_ORM_LOG_SQL + "\n";
break;
case SPRING_JPA_PROPERTIES_HIBERNATE_DIALECT:
notSupportedProperties = notSupportedProperties + "\t- " + SPRING_JPA_PROPERTIES_HIBERNATE_DIALECT
+ " should be replaced by " + QUARKUS_HIBERNATE_ORM_DIALECT + "\n";
break;
case SPRING_JPA_PROPERTIES_HIBERNATE_DIALECT_STORAGE_ENGINE:
notSupportedProperties = notSupportedProperties + "\t- "
+ SPRING_JPA_PROPERTIES_HIBERNATE_DIALECT_STORAGE_ENGINE + " should be replaced by "
+ QUARKUS_HIBERNATE_ORM_DIALECT_STORAGE_ENGINE + "\n";
break;
case SPRING_JPA_GENERATE_DDL:
notSupportedProperties = notSupportedProperties + "\t- " + SPRING_JPA_GENERATE_DDL
+ " should be replaced by " + QUARKUS_HIBERNATE_ORM_DATABASE_GENERATION + "\n";
break;
default:
notSupportedProperties = notSupportedProperties + "\t- " + sp + "\n";
break;
}
}
LOGGER.warnf(
"Quarkus does not support the following Spring Boot configuration properties: %n%s",
notSupportedProperties);
}
} | class SpringDataJPAProcessor {
private static final Logger LOGGER = Logger.getLogger(SpringDataJPAProcessor.class.getName());
@BuildStep
FeatureBuildItem registerFeature() {
return new FeatureBuildItem(FeatureBuildItem.SPRING_DATA_JPA);
}
@BuildStep
IgnorableNonIndexedClasses ignorable() {
Set<String> ignorable = new HashSet<>();
ignorable.add(Auditable.class.getName());
ignorable.add(Persistable.class.getName());
return new IgnorableNonIndexedClasses(ignorable);
}
@BuildStep
void build(CombinedIndexBuildItem index,
BuildProducer<GeneratedClassBuildItem> generatedClasses,
BuildProducer<GeneratedBeanBuildItem> generatedBeans,
BuildProducer<AdditionalBeanBuildItem> additionalBeans, BuildProducer<ReflectiveClassBuildItem> reflectiveClasses) {
detectAndLogSpecificSpringPropertiesIfExist();
IndexView indexIndex = index.getIndex();
List<ClassInfo> interfacesExtendingCrudRepository = getAllInterfacesExtending(DotNames.SUPPORTED_REPOSITORIES,
indexIndex);
removeNoRepositoryBeanClasses(interfacesExtendingCrudRepository);
implementCrudRepositories(generatedBeans, generatedClasses, additionalBeans, reflectiveClasses,
interfacesExtendingCrudRepository, indexIndex);
}
private void removeNoRepositoryBeanClasses(List<ClassInfo> interfacesExtendingCrudRepository) {
Iterator<ClassInfo> iterator = interfacesExtendingCrudRepository.iterator();
while (iterator.hasNext()) {
ClassInfo next = iterator.next();
if (next.classAnnotation(DotNames.SPRING_DATA_NO_REPOSITORY_BEAN) != null) {
iterator.remove();
}
}
}
private List<ClassInfo> getAllInterfacesExtending(Collection<DotName> targets, IndexView index) {
List<ClassInfo> result = new ArrayList<>();
Collection<ClassInfo> knownClasses = index.getKnownClasses();
for (ClassInfo clazz : knownClasses) {
if (!Modifier.isInterface(clazz.flags())) {
continue;
}
List<DotName> interfaceNames = clazz.interfaceNames();
boolean found = false;
for (DotName interfaceName : interfaceNames) {
if (targets.contains(interfaceName)) {
found = true;
break;
}
}
if (found) {
result.add(clazz);
}
}
return result;
}
private void implementCrudRepositories(BuildProducer<GeneratedBeanBuildItem> generatedBeans,
BuildProducer<GeneratedClassBuildItem> generatedClasses,
BuildProducer<AdditionalBeanBuildItem> additionalBeans,
BuildProducer<ReflectiveClassBuildItem> reflectiveClasses,
List<ClassInfo> crudRepositoriesToImplement, IndexView index) {
ClassOutput beansClassOutput = new GeneratedBeanGizmoAdaptor(generatedBeans);
ClassOutput otherClassOutput = new GeneratedClassGizmoAdaptor(generatedClasses, true);
Indexer indexer = new Indexer();
Set<DotName> additionalIndex = new HashSet<>();
indexRepositoryInterface(index, indexer, additionalIndex, Repository.class);
indexRepositoryInterface(index, indexer, additionalIndex, CrudRepository.class);
indexRepositoryInterface(index, indexer, additionalIndex, PagingAndSortingRepository.class);
indexRepositoryInterface(index, indexer, additionalIndex, JpaRepository.class);
indexRepositoryInterface(index, indexer, additionalIndex, QueryByExampleExecutor.class);
CompositeIndex compositeIndex = CompositeIndex.create(index, indexer.complete());
SpringDataRepositoryCreator repositoryCreator = new SpringDataRepositoryCreator(beansClassOutput, otherClassOutput,
compositeIndex, (n) -> {
additionalBeans.produce(AdditionalBeanBuildItem.unremovableOf(n));
},
(className -> {
reflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, className));
}));
for (ClassInfo crudRepositoryToImplement : crudRepositoriesToImplement) {
repositoryCreator.implementCrudRepository(crudRepositoryToImplement);
}
}
private void indexRepositoryInterface(IndexView index, Indexer indexer, Set<DotName> additionalIndex,
Class<?> repoClass) {
IndexingUtil.indexClass(repoClass.getName(), indexer, index, additionalIndex,
SpringDataJPAProcessor.class.getClassLoader());
}
} | class SpringDataJPAProcessor {
private static final Logger LOGGER = Logger.getLogger(SpringDataJPAProcessor.class.getName());
private static final Pattern pattern = Pattern.compile("spring\\.jpa\\..*");
public static final String SPRING_JPA_SHOW_SQL = "spring.jpa.show-sql";
public static final String SPRING_JPA_PROPERTIES_HIBERNATE_DIALECT = "spring.jpa.properties.hibernate.dialect";
public static final String SPRING_JPA_PROPERTIES_HIBERNATE_DIALECT_STORAGE_ENGINE = "spring.jpa.properties.hibernate.dialect.storage_engine";
public static final String SPRING_JPA_GENERATE_DDL = "spring.jpa.generate-ddl";
public static final String QUARKUS_HIBERNATE_ORM_DIALECT = "quarkus.hibernate-orm.dialect";
public static final String QUARKUS_HIBERNATE_ORM_LOG_SQL = "quarkus.hibernate-orm.log.sql";
public static final String QUARKUS_HIBERNATE_ORM_DIALECT_STORAGE_ENGINE = "quarkus.hibernate-orm.dialect.storage-engine";
public static final String QUARKUS_HIBERNATE_ORM_DATABASE_GENERATION = "quarkus.hibernate-orm.database.generation";
@BuildStep
FeatureBuildItem registerFeature() {
return new FeatureBuildItem(FeatureBuildItem.SPRING_DATA_JPA);
}
@BuildStep
IgnorableNonIndexedClasses ignorable() {
Set<String> ignorable = new HashSet<>();
ignorable.add(Auditable.class.getName());
ignorable.add(Persistable.class.getName());
return new IgnorableNonIndexedClasses(ignorable);
}
@BuildStep
void build(CombinedIndexBuildItem index,
BuildProducer<GeneratedClassBuildItem> generatedClasses,
BuildProducer<GeneratedBeanBuildItem> generatedBeans,
BuildProducer<AdditionalBeanBuildItem> additionalBeans, BuildProducer<ReflectiveClassBuildItem> reflectiveClasses) {
detectAndLogSpecificSpringPropertiesIfExist();
IndexView indexIndex = index.getIndex();
List<ClassInfo> interfacesExtendingCrudRepository = getAllInterfacesExtending(DotNames.SUPPORTED_REPOSITORIES,
indexIndex);
removeNoRepositoryBeanClasses(interfacesExtendingCrudRepository);
implementCrudRepositories(generatedBeans, generatedClasses, additionalBeans, reflectiveClasses,
interfacesExtendingCrudRepository, indexIndex);
}
private void removeNoRepositoryBeanClasses(List<ClassInfo> interfacesExtendingCrudRepository) {
Iterator<ClassInfo> iterator = interfacesExtendingCrudRepository.iterator();
while (iterator.hasNext()) {
ClassInfo next = iterator.next();
if (next.classAnnotation(DotNames.SPRING_DATA_NO_REPOSITORY_BEAN) != null) {
iterator.remove();
}
}
}
private List<ClassInfo> getAllInterfacesExtending(Collection<DotName> targets, IndexView index) {
List<ClassInfo> result = new ArrayList<>();
Collection<ClassInfo> knownClasses = index.getKnownClasses();
for (ClassInfo clazz : knownClasses) {
if (!Modifier.isInterface(clazz.flags())) {
continue;
}
List<DotName> interfaceNames = clazz.interfaceNames();
boolean found = false;
for (DotName interfaceName : interfaceNames) {
if (targets.contains(interfaceName)) {
found = true;
break;
}
}
if (found) {
result.add(clazz);
}
}
return result;
}
private void implementCrudRepositories(BuildProducer<GeneratedBeanBuildItem> generatedBeans,
BuildProducer<GeneratedClassBuildItem> generatedClasses,
BuildProducer<AdditionalBeanBuildItem> additionalBeans,
BuildProducer<ReflectiveClassBuildItem> reflectiveClasses,
List<ClassInfo> crudRepositoriesToImplement, IndexView index) {
ClassOutput beansClassOutput = new GeneratedBeanGizmoAdaptor(generatedBeans);
ClassOutput otherClassOutput = new GeneratedClassGizmoAdaptor(generatedClasses, true);
Indexer indexer = new Indexer();
Set<DotName> additionalIndex = new HashSet<>();
indexRepositoryInterface(index, indexer, additionalIndex, Repository.class);
indexRepositoryInterface(index, indexer, additionalIndex, CrudRepository.class);
indexRepositoryInterface(index, indexer, additionalIndex, PagingAndSortingRepository.class);
indexRepositoryInterface(index, indexer, additionalIndex, JpaRepository.class);
indexRepositoryInterface(index, indexer, additionalIndex, QueryByExampleExecutor.class);
CompositeIndex compositeIndex = CompositeIndex.create(index, indexer.complete());
SpringDataRepositoryCreator repositoryCreator = new SpringDataRepositoryCreator(beansClassOutput, otherClassOutput,
compositeIndex, (n) -> {
additionalBeans.produce(AdditionalBeanBuildItem.unremovableOf(n));
},
(className -> {
reflectiveClasses.produce(new ReflectiveClassBuildItem(true, false, className));
}));
for (ClassInfo crudRepositoryToImplement : crudRepositoriesToImplement) {
repositoryCreator.implementCrudRepository(crudRepositoryToImplement);
}
}
private void indexRepositoryInterface(IndexView index, Indexer indexer, Set<DotName> additionalIndex,
Class<?> repoClass) {
IndexingUtil.indexClass(repoClass.getName(), indexer, index, additionalIndex,
SpringDataJPAProcessor.class.getClassLoader());
}
} |
Shall we use `configValueMap.get(v1).getValue()` inside the assertion calls? Creating new variables can be redundant if we don't use them multiple times. | public void testCliWhenUnsupportedTypesWithinToml() {
ArrayType arrayType = TypeCreator.createArrayType(TYPE_STRING);
VariableKey v1 = new VariableKey(module, "v1", PredefinedTypes.TYPE_INT, true);
Type v2Type = new BIntersectionType(module, new Type[]{arrayType, PredefinedTypes.TYPE_READONLY},
arrayType, 0, true);
VariableKey v2 = new VariableKey(module, "v2", v2Type, true);
RuntimeDiagnosticLog diagnosticLog = new RuntimeDiagnosticLog();
ConfigResolver configResolver =
new ConfigResolver(Map.ofEntries(Map.entry(module, new VariableKey[]{v1, v2})), diagnosticLog,
List.of(new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.v1=87"),
new TomlFileProvider(ROOT_MODULE,
getConfigPath("UnsupportedCLITypeConfig.toml"), Set.of(module))));
Map<VariableKey, ConfigValue> configValueMap = configResolver.resolveConfigs();
Object v1Value = configValueMap.get(v1).getValue();
Object v2Value = configValueMap.get(v2).getValue();
Assert.assertEquals(v1Value, 87L);
Assert.assertEquals(v2Value.toString(), "[\"hello\",\"world\"]");
Assert.assertEquals(0, diagnosticLog.getErrorCount());
} | Object v2Value = configValueMap.get(v2).getValue(); | public void testCliWhenUnsupportedTypesWithinToml() {
ArrayType arrayType = TypeCreator.createArrayType(TYPE_STRING);
VariableKey v1 = new VariableKey(module, "v1", PredefinedTypes.TYPE_INT, true);
Type v2Type = new BIntersectionType(module, new Type[]{arrayType, PredefinedTypes.TYPE_READONLY},
arrayType, 0, true);
VariableKey v2 = new VariableKey(module, "v2", v2Type, true);
RuntimeDiagnosticLog diagnosticLog = new RuntimeDiagnosticLog();
ConfigResolver configResolver =
new ConfigResolver(Map.ofEntries(Map.entry(module, new VariableKey[]{v1, v2})), diagnosticLog,
List.of(new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.v1=87"),
new TomlFileProvider(ROOT_MODULE,
getConfigPath("UnsupportedCLITypeConfig.toml"), Set.of(module))));
Map<VariableKey, ConfigValue> configValueMap = configResolver.resolveConfigs();
Assert.assertEquals(configValueMap.get(v1).getValue(), 87L);
Assert.assertEquals(configValueMap.get(v2).getValue().toString(), "[\"hello\",\"world\"]");
Assert.assertEquals(0, diagnosticLog.getErrorCount());
} | class ConfigTest {
private static final Module module = new Module("myOrg", "test_module", "1");
private static final Module ROOT_MODULE = new Module("rootOrg", "mod12", "1");
private static final List<Type> COLOR_ENUM_MEMBERS = List.of(
new BFiniteType("COLOR_RED", Set.of(StringUtils.fromString("RED")), 0),
new BFiniteType("COLOR_GREEN", Set.of(StringUtils.fromString("GREEN")), 0));
public static final Type COLOR_ENUM_UNION = new BUnionType(COLOR_ENUM_MEMBERS, "Colors", ROOT_MODULE,
0, false, SymbolFlags.ENUM);
public static final Type COLOR_ENUM = new BIntersectionType(module, new Type[]{}, COLOR_ENUM_UNION, 0, true);
public static final Type AMBIGUOUS_UNION = new BUnionType(Arrays.asList(TypeCreator.createMapType(TYPE_ANYDATA),
TypeCreator.createMapType(TYPE_STRING)), true);
private final Set<Module> moduleSet = Set.of(module);
@Test(dataProvider = "simple-type-values-data-provider")
public void testTomlConfigProviderWithSimpleTypes(VariableKey key, Class<?> expectedJClass,
Object expectedValue, ConfigProvider... configProvider) {
RuntimeDiagnosticLog diagnosticLog = new RuntimeDiagnosticLog();
Map<Module, VariableKey[]> configVarMap = new HashMap<>();
VariableKey[] keys = {key};
configVarMap.put(module, keys);
ConfigResolver configResolver = new ConfigResolver(configVarMap, diagnosticLog,
Arrays.asList(configProvider));
Map<VariableKey, ConfigValue> configValueMap = configResolver.resolveConfigs();
Assert.assertTrue(expectedJClass.isInstance(configValueMap.get(key).getValue()),
"Invalid value provided for variable : " + key.variable);
Assert.assertEquals(configValueMap.get(key).getValue(), expectedValue);
}
@DataProvider(name = "simple-type-values-data-provider")
public Object[][] simpleTypeConfigProviders() {
return new Object[][]{
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 42L,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "byteVar", PredefinedTypes.TYPE_BYTE, true), Integer.class, 5,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "floatVar", PredefinedTypes.TYPE_FLOAT, true), Double.class, 3.5,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "stringVar", PredefinedTypes.TYPE_STRING, true), BString.class,
StringUtils.fromString("abc"), new TomlFileProvider(ROOT_MODULE,
getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "booleanVar", PredefinedTypes.TYPE_BOOLEAN, true), Boolean.class, true,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "decimalVar", PredefinedTypes.TYPE_DECIMAL, true), DecimalValue.class,
new DecimalValue("24.87"), new TomlFileProvider(ROOT_MODULE,
getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 123L,
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.intVar=123")},
{new VariableKey(module, "byteVar", PredefinedTypes.TYPE_BYTE, true), Integer.class, 7,
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.byteVar=7")},
{new VariableKey(module, "floatVar", PredefinedTypes.TYPE_FLOAT, true), Double.class, 99.9,
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.floatVar=99.9")},
{new VariableKey(module, "stringVar", PredefinedTypes.TYPE_STRING, true), BString.class,
StringUtils.fromString("efg"),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.stringVar=efg")},
{new VariableKey(module, "booleanVar", PredefinedTypes.TYPE_BOOLEAN, true), Boolean.class, false,
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.booleanVar=0")},
{new VariableKey(module, "decimalVar", PredefinedTypes.TYPE_DECIMAL, true), DecimalValue.class,
new DecimalValue("876.54"),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.decimalVar=876.54")},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 42L,
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.intVar=13579"),
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 13579L,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.intVar=13579")},
{new VariableKey(module, "color", COLOR_ENUM,
true), BString.class, StringUtils.fromString("GREEN"),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.color=GREEN")},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 123L,
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_INTVAR", "123"))},
{new VariableKey(module, "byteVar", PredefinedTypes.TYPE_BYTE, true), Integer.class, 7,
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_BYTEVAR", "7"))},
{new VariableKey(module, "floatVar", PredefinedTypes.TYPE_FLOAT, true), Double.class, 99.9,
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_FLOATVAR", "99.9"))},
{new VariableKey(module, "stringVar", PredefinedTypes.TYPE_STRING, true), BString.class,
StringUtils.fromString("efg"),
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_STRINGVAR", "efg"))},
{new VariableKey(module, "booleanVar", PredefinedTypes.TYPE_BOOLEAN, true), Boolean.class, false,
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_BOOLEANVAR", "0"))},
{new VariableKey(module, "decimalVar", PredefinedTypes.TYPE_DECIMAL, true), DecimalValue.class,
new DecimalValue("876.54"),
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_DECIMALVAR",
"876.54"))},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 42L,
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_INTVAR", "13579")),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.intVar=13677"),
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 13579L,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.intVar=13677"),
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_INTVAR", "13579"))},
{new VariableKey(module, "color", COLOR_ENUM,
true), BString.class, StringUtils.fromString("GREEN"),
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_COLOR", "GREEN"))},
};
}
@Test(dataProvider = "not-supported-provider")
public void testCLIArgUnsupportedErrors(String variableName, Type type, String expectedValue, int errors) {
RuntimeDiagnosticLog diagnosticLog = new RuntimeDiagnosticLog();
Map<Module, VariableKey[]> configVarMap = new HashMap<>();
VariableKey variableKey = new VariableKey(ROOT_MODULE, variableName, type, null, true);
configVarMap.put(ROOT_MODULE,
new VariableKey[]{new VariableKey(ROOT_MODULE, "intVar", PredefinedTypes.TYPE_INT, null
, false), variableKey});
ConfigResolver configResolver = new ConfigResolver(configVarMap, diagnosticLog,
List.of(new CliProvider(ROOT_MODULE, "-CintVar=22"),
new TomlFileProvider(ROOT_MODULE, getConfigPathForNegativeCases(
"UnsupportedCliConfig.toml"), Set.of(ROOT_MODULE))));
Map<VariableKey, ConfigValue> valueMap = configResolver.resolveConfigs();
Assert.assertEquals(diagnosticLog.getWarningCount(), 0);
Assert.assertEquals(diagnosticLog.getErrorCount(), errors);
ConfigValue configValue = valueMap.get(variableKey);
if (configValue == null) {
Assert.assertNull(expectedValue);
} else {
Assert.assertEquals(configValue.getValue().toString(), expectedValue);
}
}
@DataProvider(name = "not-supported-provider")
public Object[][] notSupportedDataProvider() {
ArrayType arrayType = TypeCreator.createArrayType(PredefinedTypes.TYPE_INT);
TupleType tupleType =
TypeCreator.createTupleType(List.of(PredefinedTypes.TYPE_INT, PredefinedTypes.TYPE_STRING),
null, 0, true);
Field name = TypeCreator.createField(PredefinedTypes.TYPE_STRING, "name", SymbolFlags.REQUIRED);
Map<String, Field> fields = Map.ofEntries(Map.entry("name", name));
RecordType recordType = TypeCreator.createRecordType("Person", ROOT_MODULE, SymbolFlags.READONLY, fields,
null, true, 6);
MapType mapType = TypeCreator.createMapType(PredefinedTypes.TYPE_INT, true);
TableType tableType = TypeCreator.createTableType(mapType, true);
return new Object[][]{
{"a", new BIntersectionType(ROOT_MODULE, new Type[]{arrayType, PredefinedTypes.TYPE_READONLY},
arrayType, 0, true), "[2,3,4]", 5},
{"b", new BIntersectionType(ROOT_MODULE, new Type[]{tupleType, PredefinedTypes.TYPE_READONLY},
tupleType, 0, true), "[5,\"hello\"]", 5},
{"c", new BIntersectionType(ROOT_MODULE, new Type[]{recordType, PredefinedTypes.TYPE_READONLY},
recordType, 0, true), null, 7},
{"d", new BIntersectionType(ROOT_MODULE, new Type[]{mapType, PredefinedTypes.TYPE_READONLY},
mapType, 0, true), "{\"a\":1}", 4},
{"e", new BIntersectionType(ROOT_MODULE, new Type[]{tableType, PredefinedTypes.TYPE_READONLY},
tableType, 0, true), "[{\"aa\":2}]", 4},
};
}
@Test(dataProvider = "not-supported-provider")
public void testEnvVarUnsupportedErrors(String variableName, Type type, String expectedValue, int errors) {
RuntimeDiagnosticLog diagnosticLog = new RuntimeDiagnosticLog();
Map<Module, VariableKey[]> configVarMap = new HashMap<>();
VariableKey variableKey = new VariableKey(ROOT_MODULE, variableName, type, null, true);
configVarMap.put(ROOT_MODULE,
new VariableKey[]{new VariableKey(ROOT_MODULE, "intVar", PredefinedTypes.TYPE_INT, null
, false), variableKey});
ConfigResolver configResolver = new ConfigResolver(configVarMap, diagnosticLog,
List.of(new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_INTVAR", "22")),
new TomlFileProvider(ROOT_MODULE, getConfigPathForNegativeCases(
"UnsupportedCliConfig.toml"), Set.of(ROOT_MODULE))));
Map<VariableKey, ConfigValue> valueMap = configResolver.resolveConfigs();
Assert.assertEquals(diagnosticLog.getWarningCount(), 0);
Assert.assertEquals(diagnosticLog.getErrorCount(), errors);
ConfigValue configValue = valueMap.get(variableKey);
if (configValue == null) {
Assert.assertNull(expectedValue);
} else {
Assert.assertEquals(configValue.getValue().toString(), expectedValue);
}
}
@Test
} | class ConfigTest {
private static final Module module = new Module("myOrg", "test_module", "1");
private static final Module ROOT_MODULE = new Module("rootOrg", "mod12", "1");
private static final List<Type> COLOR_ENUM_MEMBERS = List.of(
new BFiniteType("COLOR_RED", Set.of(StringUtils.fromString("RED")), 0),
new BFiniteType("COLOR_GREEN", Set.of(StringUtils.fromString("GREEN")), 0));
public static final Type COLOR_ENUM_UNION = new BUnionType(COLOR_ENUM_MEMBERS, "Colors", ROOT_MODULE,
0, false, SymbolFlags.ENUM);
public static final Type COLOR_ENUM = new BIntersectionType(module, new Type[]{}, COLOR_ENUM_UNION, 0, true);
public static final Type AMBIGUOUS_UNION = new BUnionType(Arrays.asList(TypeCreator.createMapType(TYPE_ANYDATA),
TypeCreator.createMapType(TYPE_STRING)), true);
private final Set<Module> moduleSet = Set.of(module);
@Test(dataProvider = "simple-type-values-data-provider")
public void testTomlConfigProviderWithSimpleTypes(VariableKey key, Class<?> expectedJClass,
Object expectedValue, ConfigProvider... configProvider) {
RuntimeDiagnosticLog diagnosticLog = new RuntimeDiagnosticLog();
Map<Module, VariableKey[]> configVarMap = new HashMap<>();
VariableKey[] keys = {key};
configVarMap.put(module, keys);
ConfigResolver configResolver = new ConfigResolver(configVarMap, diagnosticLog,
Arrays.asList(configProvider));
Map<VariableKey, ConfigValue> configValueMap = configResolver.resolveConfigs();
Assert.assertTrue(expectedJClass.isInstance(configValueMap.get(key).getValue()),
"Invalid value provided for variable : " + key.variable);
Assert.assertEquals(configValueMap.get(key).getValue(), expectedValue);
}
@DataProvider(name = "simple-type-values-data-provider")
public Object[][] simpleTypeConfigProviders() {
return new Object[][]{
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 42L,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "byteVar", PredefinedTypes.TYPE_BYTE, true), Integer.class, 5,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "floatVar", PredefinedTypes.TYPE_FLOAT, true), Double.class, 3.5,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "stringVar", PredefinedTypes.TYPE_STRING, true), BString.class,
StringUtils.fromString("abc"), new TomlFileProvider(ROOT_MODULE,
getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "booleanVar", PredefinedTypes.TYPE_BOOLEAN, true), Boolean.class, true,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "decimalVar", PredefinedTypes.TYPE_DECIMAL, true), DecimalValue.class,
new DecimalValue("24.87"), new TomlFileProvider(ROOT_MODULE,
getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 123L,
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.intVar=123")},
{new VariableKey(module, "byteVar", PredefinedTypes.TYPE_BYTE, true), Integer.class, 7,
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.byteVar=7")},
{new VariableKey(module, "floatVar", PredefinedTypes.TYPE_FLOAT, true), Double.class, 99.9,
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.floatVar=99.9")},
{new VariableKey(module, "stringVar", PredefinedTypes.TYPE_STRING, true), BString.class,
StringUtils.fromString("efg"),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.stringVar=efg")},
{new VariableKey(module, "booleanVar", PredefinedTypes.TYPE_BOOLEAN, true), Boolean.class, false,
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.booleanVar=0")},
{new VariableKey(module, "decimalVar", PredefinedTypes.TYPE_DECIMAL, true), DecimalValue.class,
new DecimalValue("876.54"),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.decimalVar=876.54")},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 42L,
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.intVar=13579"),
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 13579L,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.intVar=13579")},
{new VariableKey(module, "color", COLOR_ENUM,
true), BString.class, StringUtils.fromString("GREEN"),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.color=GREEN")},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 123L,
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_INTVAR", "123"))},
{new VariableKey(module, "byteVar", PredefinedTypes.TYPE_BYTE, true), Integer.class, 7,
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_BYTEVAR", "7"))},
{new VariableKey(module, "floatVar", PredefinedTypes.TYPE_FLOAT, true), Double.class, 99.9,
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_FLOATVAR", "99.9"))},
{new VariableKey(module, "stringVar", PredefinedTypes.TYPE_STRING, true), BString.class,
StringUtils.fromString("efg"),
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_STRINGVAR", "efg"))},
{new VariableKey(module, "booleanVar", PredefinedTypes.TYPE_BOOLEAN, true), Boolean.class, false,
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_BOOLEANVAR", "0"))},
{new VariableKey(module, "decimalVar", PredefinedTypes.TYPE_DECIMAL, true), DecimalValue.class,
new DecimalValue("876.54"),
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_DECIMALVAR",
"876.54"))},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 42L,
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_INTVAR", "13579")),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.intVar=13677"),
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet)},
{new VariableKey(module, "intVar", PredefinedTypes.TYPE_INT, true), Long.class, 13579L,
new TomlFileProvider(ROOT_MODULE, getConfigPath("SimpleTypesConfig.toml"), moduleSet),
new CliProvider(ROOT_MODULE, "-CmyOrg.test_module.intVar=13677"),
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_INTVAR", "13579"))},
{new VariableKey(module, "color", COLOR_ENUM,
true), BString.class, StringUtils.fromString("GREEN"),
new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_MYORG_TEST_MODULE_COLOR", "GREEN"))},
};
}
@Test(dataProvider = "not-supported-provider")
public void testCLIArgUnsupportedErrors(String variableName, Type type, String expectedValue, int errors) {
RuntimeDiagnosticLog diagnosticLog = new RuntimeDiagnosticLog();
Map<Module, VariableKey[]> configVarMap = new HashMap<>();
VariableKey variableKey = new VariableKey(ROOT_MODULE, variableName, type, null, true);
configVarMap.put(ROOT_MODULE,
new VariableKey[]{new VariableKey(ROOT_MODULE, "intVar", PredefinedTypes.TYPE_INT, null
, false), variableKey});
ConfigResolver configResolver = new ConfigResolver(configVarMap, diagnosticLog,
List.of(new CliProvider(ROOT_MODULE, "-CintVar=22"),
new TomlFileProvider(ROOT_MODULE, getConfigPathForNegativeCases(
"UnsupportedCliConfig.toml"), Set.of(ROOT_MODULE))));
Map<VariableKey, ConfigValue> valueMap = configResolver.resolveConfigs();
Assert.assertEquals(diagnosticLog.getWarningCount(), 0);
Assert.assertEquals(diagnosticLog.getErrorCount(), errors);
ConfigValue configValue = valueMap.get(variableKey);
if (configValue == null) {
Assert.assertNull(expectedValue);
} else {
Assert.assertEquals(configValue.getValue().toString(), expectedValue);
}
}
@DataProvider(name = "not-supported-provider")
public Object[][] notSupportedDataProvider() {
ArrayType arrayType = TypeCreator.createArrayType(PredefinedTypes.TYPE_INT);
TupleType tupleType =
TypeCreator.createTupleType(List.of(PredefinedTypes.TYPE_INT, PredefinedTypes.TYPE_STRING),
null, 0, true);
Field name = TypeCreator.createField(PredefinedTypes.TYPE_STRING, "name", SymbolFlags.REQUIRED);
Map<String, Field> fields = Map.ofEntries(Map.entry("name", name));
RecordType recordType = TypeCreator.createRecordType("Person", ROOT_MODULE, SymbolFlags.READONLY, fields,
null, true, 6);
MapType mapType = TypeCreator.createMapType(PredefinedTypes.TYPE_INT, true);
TableType tableType = TypeCreator.createTableType(mapType, true);
return new Object[][]{
{"a", new BIntersectionType(ROOT_MODULE, new Type[]{arrayType, PredefinedTypes.TYPE_READONLY},
arrayType, 0, true), "[2,3,4]", 5},
{"b", new BIntersectionType(ROOT_MODULE, new Type[]{tupleType, PredefinedTypes.TYPE_READONLY},
tupleType, 0, true), "[5,\"hello\"]", 5},
{"c", new BIntersectionType(ROOT_MODULE, new Type[]{recordType, PredefinedTypes.TYPE_READONLY},
recordType, 0, true), null, 7},
{"d", new BIntersectionType(ROOT_MODULE, new Type[]{mapType, PredefinedTypes.TYPE_READONLY},
mapType, 0, true), "{\"a\":1}", 4},
{"e", new BIntersectionType(ROOT_MODULE, new Type[]{tableType, PredefinedTypes.TYPE_READONLY},
tableType, 0, true), "[{\"aa\":2}]", 4},
};
}
@Test(dataProvider = "not-supported-provider")
public void testEnvVarUnsupportedErrors(String variableName, Type type, String expectedValue, int errors) {
RuntimeDiagnosticLog diagnosticLog = new RuntimeDiagnosticLog();
Map<Module, VariableKey[]> configVarMap = new HashMap<>();
VariableKey variableKey = new VariableKey(ROOT_MODULE, variableName, type, null, true);
configVarMap.put(ROOT_MODULE,
new VariableKey[]{new VariableKey(ROOT_MODULE, "intVar", PredefinedTypes.TYPE_INT, null
, false), variableKey});
ConfigResolver configResolver = new ConfigResolver(configVarMap, diagnosticLog,
List.of(new EnvVarProvider(ROOT_MODULE, Map.of("BAL_CONFIG_VAR_INTVAR", "22")),
new TomlFileProvider(ROOT_MODULE, getConfigPathForNegativeCases(
"UnsupportedCliConfig.toml"), Set.of(ROOT_MODULE))));
Map<VariableKey, ConfigValue> valueMap = configResolver.resolveConfigs();
Assert.assertEquals(diagnosticLog.getWarningCount(), 0);
Assert.assertEquals(diagnosticLog.getErrorCount(), errors);
ConfigValue configValue = valueMap.get(variableKey);
if (configValue == null) {
Assert.assertNull(expectedValue);
} else {
Assert.assertEquals(configValue.getValue().toString(), expectedValue);
}
}
@Test
} |
Due to the concurrent access the synchronization is actually quite hard. I think I have pushed a good solution now. | public void setCurrentKey(Object key) {
if (stateful && usesTimers) {
synchronized (getKeyedStateBackend()) {
super.setCurrentKey(key);
}
} else if (usesTimers) {
super.setCurrentKey(key);
} else {
throw new UnsupportedOperationException(
"Current key for state backend can only be set by state requests from SDK workers or when processing timers.");
}
} | if (stateful && usesTimers) { | public void setCurrentKey(Object key) {
if (!usesTimers) {
throw new UnsupportedOperationException(
"Current key for state backend can only be set by state requests from SDK workers or when processing timers.");
}
} | class BagUserStateFactory
implements StateRequestHandlers.BagUserStateHandlerFactory {
private final StateInternals stateInternals;
private final KeyedStateBackend<ByteBuffer> keyedStateBackend;
private BagUserStateFactory(
StateInternals stateInternals, KeyedStateBackend<ByteBuffer> keyedStateBackend) {
this.stateInternals = stateInternals;
this.keyedStateBackend = keyedStateBackend;
}
@Override
public <K, V, W extends BoundedWindow>
StateRequestHandlers.BagUserStateHandler<K, V, W> forUserState(
String pTransformId,
String userStateId,
Coder<K> keyCoder,
Coder<V> valueCoder,
Coder<W> windowCoder) {
return new StateRequestHandlers.BagUserStateHandler<K, V, W>() {
@Override
public Iterable<V> get(K key, W window) {
synchronized (keyedStateBackend) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
return bagState.read();
}
}
@Override
public void append(K key, W window, Iterator<V> values) {
synchronized (keyedStateBackend) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
while (values.hasNext()) {
bagState.add(values.next());
}
}
}
@Override
public void clear(K key, W window) {
synchronized (keyedStateBackend) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
bagState.clear();
}
}
private void prepareStateBackend(K key, Coder<K> keyCoder) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
keyCoder.encode(key, baos);
} catch (IOException e) {
throw new RuntimeException("Failed to encode key for Flink state backend", e);
}
keyedStateBackend.setCurrentKey(ByteBuffer.wrap(baos.toByteArray()));
}
};
}
} | class BagUserStateFactory
implements StateRequestHandlers.BagUserStateHandlerFactory {
private final StateInternals stateInternals;
private final KeyedStateBackend<ByteBuffer> keyedStateBackend;
private BagUserStateFactory(
StateInternals stateInternals, KeyedStateBackend<ByteBuffer> keyedStateBackend) {
this.stateInternals = stateInternals;
this.keyedStateBackend = keyedStateBackend;
}
@Override
public <K, V, W extends BoundedWindow>
StateRequestHandlers.BagUserStateHandler<K, V, W> forUserState(
String pTransformId,
String userStateId,
Coder<K> keyCoder,
Coder<V> valueCoder,
Coder<W> windowCoder) {
return new StateRequestHandlers.BagUserStateHandler<K, V, W>() {
@Override
public Iterable<V> get(K key, W window) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
return bagState.read();
}
@Override
public void append(K key, W window, Iterator<V> values) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
while (values.hasNext()) {
bagState.add(values.next());
}
}
@Override
public void clear(K key, W window) {
prepareStateBackend(key, keyCoder);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
bagState.clear();
}
private void prepareStateBackend(K key, Coder<K> keyCoder) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
keyCoder.encode(key, baos);
} catch (IOException e) {
throw new RuntimeException("Failed to encode key for Flink state backend", e);
}
keyedStateBackend.setCurrentKey(ByteBuffer.wrap(baos.toByteArray()));
}
};
}
} |
Type of some `StatementExpressions` when created during desugar can be null. | private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner, foreach.pos,
VIRTUAL);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol);
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
BLangExpression expr = valueAccessExpr.expr;
valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType);
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
whileNode.body.stmts.add(1, resultAssignment);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(resultVariableDefinition);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.setBType(type);
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
if (whileNode.onFailClause != null) {
BLangOnFailClause onFailClause = whileNode.onFailClause;
whileNode.onFailClause = null;
whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);
result = rewrite(doStmt, env);
} else {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
}
private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,
BLangOnFailClause onFailClause) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);
doBlock.scope = new Scope(env.scope.owner);
bLDo.body = doBlock;
bLDo.pos = location;
bLDo.onFailClause = onFailClause;
bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
doBlock.stmts.add(statement);
return bLDo;
}
@Override
public void visit(BLangLock lockNode) {
BLangOnFailClause currentOnFailClause = this.onFailClause;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
if (lockNode.onFailClause != null) {
blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
rewrite(lockNode.onFailClause, env);
}
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.setBType(symTable.nilType);
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.setBType(nillableError);
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError,
this.env.scope.owner, lockNode.pos, VIRTUAL);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr,
nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
unLockStmt.relatedLock = lockStmt;
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
this.onFailClause = currentOnFailClause;
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,
BLangSimpleVarRef shouldRetryRef) {
BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
trxOnFailClause.pos = pos;
trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);
trxOnFailClause.body.scope = new Scope(env.scope.owner);
trxOnFailClause.isInternal = true;
BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,
"$trxError$", symTable.errorType, null, trxOnFailErrorSym);
trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
trxOnFailError);
trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);
transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,
trxBlockId, shouldRetryRef);
BLangGroupExpr shouldNotPanic = new BLangGroupExpr();
shouldNotPanic.setBType(symTable.booleanType);
shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);
BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);
BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = caughtError;
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);
trxOnFailClause.body.stmts.add(exitIf);
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = caughtError;
failBlock.stmts.add(failStmt);
trxOnFailClause.bodyContainsFail = true;
return trxOnFailClause;
}
@Override
public void visit(BLangTransaction transactionNode) {
if (transactionNode.onFailClause != null) {
BLangOnFailClause onFailClause = transactionNode.onFailClause;
transactionNode.onFailClause = null;
transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);
result = rewrite(doStmt, env);
} else {
BLangLiteral currentTrxBlockId = this.trxBlockId;
String uniqueId = String.valueOf(++transactionBlockCount);
this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);
boolean currShouldReturnErrors = this.shouldReturnErrors;
this.shouldReturnErrors = true;
BLangOnFailClause currOnFailClause = this.onFailClause;
BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);
BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"),
env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);
shouldPanicVarSymbol.closure = true;
BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,
"$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);
BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,
shouldPanicVariable);
BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,
shouldPanicVarSymbol);
BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,
this.shouldRetryRef);
enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);
boolean userDefinedOnFailAvbl = this.onFailClause != null;
analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);
BLangBlockStmt transactionStmtBlock =
transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);
transactionStmtBlock.stmts.add(0, shouldPanicDef);
transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);
transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?
BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
result = rewrite(transactionStmtBlock, this.env);
this.shouldReturnErrors = currShouldReturnErrors;
this.trxBlockId = currentTrxBlockId;
swapAndResetEnclosingOnFail(currOnFailClause);
}
}
@Override
public void visit(BLangRollback rollbackNode) {
BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);
result = rewrite(rollbackStmtExpr, env);
}
private BLangOnFailClause createRetryInternalOnFail(Location pos,
BLangSimpleVarRef retryResultRef,
BLangSimpleVarRef retryManagerRef,
BLangSimpleVarRef shouldRetryRef,
BLangSimpleVarRef continueLoopRef,
BLangSimpleVarRef returnResult) {
BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
internalOnFail.pos = pos;
internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);
internalOnFail.body.scope = new Scope(env.scope.owner);
BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,
"$caughtError$", symTable.errorType, null, caughtErrorSym);
internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
caughtError);
env.scope.define(caughtErrorSym.name, caughtErrorSym);
BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);
BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);
internalOnFail.body.stmts.add(errorAssignment);
BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
internalOnFail.body.stmts.add(continueLoopTrue);
BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,
retryManagerRef, caughtErrorRef);
BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,
shouldRetryInvocation);
internalOnFail.body.stmts.add(shouldRetryAssignment);
BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();
shouldNotRetryCheck.setBType(symTable.booleanType);
shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);
BLangGroupExpr exitCheck = new BLangGroupExpr();
exitCheck.setBType(symTable.booleanType);
exitCheck.expression = shouldNotRetryCheck;
BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);
if (this.onFailClause != null) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = retryResultRef;
exitLogicBlock.stmts.add(failStmt);
internalOnFail.bodyContainsFail = true;
internalOnFail.body.stmts.add(exitIf);
BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();
loopContinueStmt.pos = pos;
internalOnFail.body.stmts.add(loopContinueStmt);
} else {
BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
exitLogicBlock.stmts.add(returnErrorTrue);
internalOnFail.body.stmts.add(exitIf);
}
return internalOnFail;
}
BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {
List<BType> paramTypes = new ArrayList<>();
paramTypes.add(symTable.booleanType);
BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,
null);
BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(
names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,
symTable.builtinPos, VIRTUAL);
return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,
OperatorKind.NOT, notOperatorSymbol);
}
BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null));
return lambdaFunction;
}
protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope bodyScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = bodyScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
this.forceCastReturnType = ((BLangType) returnType).getBType();
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
this.forceCastReturnType = null;
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
TypeNode returnType) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
func.desugared = false;
lambdaFunction.pos = pos;
return lambdaFunction;
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.getBType().tag == TypeTags.ARRAY
&& ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.setBType(literalExpr.getBType());
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replaceAll(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
if (listConstructor.getBType().tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructor.getBType().tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));
result = rewriteExpr(expr);
} else if (getElementType(listConstructor.getBType()).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructor.getBType().tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
rewriteExprs(tableConstructorExpr.recordLiteralList);
result = tableConstructorExpr;
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
if (arrayLiteral.getBType().tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));
return;
} else if (getElementType(arrayLiteral.getBType()).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
return;
}
tupleLiteral.exprs.forEach(expr -> {
BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();
types.setImplicitCastExpr(expr, expType, symTable.anyType);
});
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
if (groupExpr.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = groupExpr.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
result = rewriteExpr(groupExpr.expression);
}
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.setBType(symTable.stringType);
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType()));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;
BLangLockStmt lockStmt = enclLocks.peek();
lockStmt.addLockVariable(symbol);
lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));
}
}
genVarRefExpr.setBType(varRefExpr.getBType());
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.isLValue)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.isLValue = varRefExpr.isLValue;
genVarRefExpr.setBType(varRefExpr.symbol.type);
result = genVarRefExpr;
return;
}
genVarRefExpr.isLValue = varRefExpr.isLValue;
BType targetType = genVarRefExpr.getBType();
genVarRefExpr.setBType(genVarRefExpr.symbol.type);
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,
StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value));
int varRefTypeTag = varRefType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
result = rewriteObjectMemberAccessAsField(fieldAccessExpr);
return;
} else {
boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;
if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {
BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) varRefType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;
if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||
(initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {
isStoreOnCreation = true;
}
}
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false,
isStoreOnCreation);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);
}
} else if (types.isLax(varRefType)) {
if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) {
if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
xmlAccessInvocation.setBType(fieldAccessExpr.getBType());
result = xmlAccessInvocation;
return;
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.isStoreOnCreation);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.isLValue = fieldAccessExpr.isLValue;
targetVarRef.setBType(fieldAccessExpr.getBType());
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {
Location pos = fieldAccessExpr.pos;
BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;
BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();
String funcName = "$annon$method$delegate$" + lambdaFunctionCount++;
BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),
names.fromString(funcName),
env.enclPkg.packageID, originalMemberFuncSymbol.type, env.scope.owner, pos, VIRTUAL);
funcSymbol.retType = originalMemberFuncSymbol.retType;
funcSymbol.bodyExist = true;
funcSymbol.params = new ArrayList<>();
funcSymbol.scope = new Scope(funcSymbol);
func.pos = pos;
func.name = createIdentifier(pos, funcName);
func.flagSet.add(Flag.LAMBDA);
func.flagSet.add(Flag.ANONYMOUS);
func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
func.symbol = funcSymbol;
func.setBType(funcSymbol.type);
func.closureVarSymbols = new LinkedHashSet<>();
BLangExpression receiver = fieldAccessExpr.expr;
BLangSimpleVariableDef intermediateObjDef = null;
if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;
receiverSymbol.closure = true;
func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));
} else {
BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(),
receiver, pos);
intermediateObjDef = varDef;
varDef.var.symbol.closure = true;
env.scope.define(varDef.var.symbol.name, varDef.var.symbol);
BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);
func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));
receiver = variableRef;
}
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
for (BVarSymbol param : originalMemberFuncSymbol.params) {
BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,
VIRTUAL);
fParam.pos = pos;
fParam.name = createIdentifier(pos, param.name.value);
fParam.setBType(param.type);
func.requiredParams.add(fParam);
funcSymbol.params.add(fParam.symbol);
funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);
BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);
requiredArgs.add(paramRef);
}
ArrayList<BLangExpression> restArgs = new ArrayList<>();
if (originalMemberFuncSymbol.restParam != null) {
BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
func.restParam = restParam;
BVarSymbol restSym = originalMemberFuncSymbol.restParam;
restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);
restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,
VIRTUAL);
restParam.pos = pos;
restParam.setBType(restSym.type);
funcSymbol.restParam = restParam.symbol;
funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);
BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);
BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression();
restArgExpr.expr = restArg;
restArgExpr.pos = pos;
restArgExpr.setBType(restSym.type);
restArgExpr.expectedType = restArgExpr.getBType();
restArgs.add(restArgExpr);
}
BLangIdentifier field = fieldAccessExpr.field;
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.expr = createObjectMethodInvocation(
receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);
((BLangBlockFunctionBody) func.body).addStatement(retStmt);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = func;
lambdaFunction.capturedClosureEnv = env.createClone();
env.enclPkg.functions.add(func);
env.enclPkg.topLevelNodes.add(func);
lambdaFunction.parent = env.enclInvokable;
lambdaFunction.setBType(func.getBType());
if (intermediateObjDef == null) {
return rewrite(lambdaFunction, env);
} else {
BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));
expr.setBType(lambdaFunction.getBType());
return rewrite(expr, env);
}
}
private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,
BSymbol invocableSymbol,
List<BLangExpression> requiredArgs,
List<BLangExpression> restArgs) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.name = field;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = receiver;
invocationNode.symbol = invocableSymbol;
invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = restArgs;
return invocationNode;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);
Location pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.setBType(fieldAccessType);
statementExpression.setBType(fieldAccessType);
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);
mapAccessExpr.setBType(xmlOrNil);
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
namedArgs.add(message);
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
BLangSimpleVariableDef errorDef =
createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangIndexBasedAccess targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr,
(BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.TABLE) {
if (targetVarRef.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY) {
BLangTupleLiteral listConstructorExpr = new BLangTupleLiteral();
listConstructorExpr.exprs = ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs;
List<BType> memberTypes = new ArrayList<>();
((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs.
forEach(expression -> memberTypes.add(expression.getBType()));
listConstructorExpr.setBType(new BTupleType(memberTypes));
indexAccessExpr.indexExpr = listConstructorExpr;
}
targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.isLValue = indexAccessExpr.isLValue;
targetVarRef.setBType(indexAccessExpr.getBType());
result = targetVarRef;
}
@Override
public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) {
rewriteExprs(tableMultiKeyExpr.multiKeyIndexExprs);
result = tableMultiKeyExpr;
}
@Override
public void visit(BLangInvocation iExpr) {
rewriteInvocation(iExpr, false);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.size() == 1) {
errorConstructorExpr.positionalArgs.add(createNilLiteral());
}
errorConstructorExpr.positionalArgs.set(1,
addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));
rewriteExprs(errorConstructorExpr.positionalArgs);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,
((BErrorType) errorConstructorExpr.getBType()).detailType);
if (errorConstructorExpr.namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());
} else {
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (recordLiteral.getBType().tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());
}
recordLiteral.fields.add(member);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),
((BErrorType) errorConstructorExpr.getBType()).detailType);
}
errorConstructorExpr.errorDetail = errorDetail;
result = errorConstructorExpr;
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {
if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);
}
rewriteInvocation(actionInvocation, actionInvocation.async);
}
private void rewriteInvocation(BLangInvocation invocation, boolean async) {
BLangInvocation invRef = invocation;
if (!enclLocks.isEmpty()) {
BLangLockStmt lock = enclLocks.peek();
lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);
}
reorderArguments(invocation);
invocation.requiredArgs = rewriteExprs(invocation.requiredArgs);
fixStreamTypeCastsInInvocationParams(invocation);
fixNonRestArgTypeCastInTypeParamInvocation(invocation);
invocation.restArgs = rewriteExprs(invocation.restArgs);
annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,
invocation.symbol.pkgID, invocation.symbol.owner, env);
if (invocation.functionPointerInvocation) {
visitFunctionPointerInvocation(invocation);
return;
}
invocation.expr = rewriteExpr(invocation.expr);
result = invRef;
BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;
if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {
BType retType = unifier.build(invSym.retType);
invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);
}
if (invocation.expr == null) {
fixTypeCastInTypeParamInvocation(invocation, invRef);
if (invocation.exprSymbol == null) {
return;
}
invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);
invocation.expr = rewriteExpr(invocation.expr);
}
switch (invocation.expr.getBType().tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!invocation.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs);
argExprs.add(0, invocation.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,
invocation.symbol, invocation.getBType(),
invocation.expr, async);
attachedFunctionInvocation.name = invocation.name;
attachedFunctionInvocation.annAttachments = invocation.annAttachments;
result = invRef = attachedFunctionInvocation;
}
break;
}
fixTypeCastInTypeParamInvocation(invocation, invRef);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 0; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
/* This function is a workaround and need improvement
* Notes for improvement :
* 1. Both arguments are same.
* 2. Due to current type param logic we put type param flag on the original type.
* 3. Error type having Cloneable type with type param flag, change expression type by this code.
* 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest
* added never to CloneableType type param
* @typeParam type
* CloneableType Cloneable|never;
*
*/
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;
if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {
return;
}
BType originalInvType = genIExpr.getBType();
if (!genIExpr.async) {
genIExpr.setBType(returnTypeOfInvokable);
}
this.result = addConversionExprIfRequired(genIExpr, originalInvType);
}
private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
if (!params.isEmpty()) {
for (int i = 0; i < requiredArgs.size(); i++) {
BVarSymbol param = params.get(i);
if (param.type.tag == TypeTags.STREAM) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type));
}
}
}
}
private BLangLiteral createNilLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = null;
literal.setBType(symTable.nilType);
return literal;
}
public void visit(BLangTypeInit typeInitExpr) {
if (typeInitExpr.getBType().tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.getBType());
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
objVarDef.var.name.pos = symTable.builtinPos;
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol;
typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
if (typeInitExpr.initInvocation.getBType().tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitExpr.initInvocation;
typeInitExpr.initInvocation.name.value = Names.GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.setBType(objVarRef.symbol.type);
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.getBType(),
typeInitExpr.initInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultVarRef.symbol.type);
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint;
BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);
BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();
constraintTdExpr.resolvedType = constraintType;
constraintTdExpr.setBType(constraintTdType);
BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType;
BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);
BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();
completionTdExpr.resolvedType = completionType;
completionTdExpr.setBType(completionTdType);
List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));
if (!typeInitExpr.argsExpr.isEmpty()) {
args.add(typeInitExpr.argsExpr.get(0));
}
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, args, symResolver);
streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr,
Location location) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner, location, VIRTUAL);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);
objVarDef.var = objVar;
objVarDef.setBType(objVar.getBType());
return objVarDef;
}
private BType getObjectType(BType type) {
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorType);
errorTypeNode.pos = symTable.builtinPos;
return errorTypeNode;
}
BLangErrorType getErrorOrNillTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorOrNilType);
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef =
createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(ternaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (trapExpr.expr.getBType().tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (isNullableBinaryExpr(binaryExpr)) {
BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);
result = rewrite(stmtExpr, env);
return;
}
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = binaryExpr.rhsExpr.getBType().tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.getBType().tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (lhsExprTypeTag == TypeTags.INT && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && rhsExprTypeTag == TypeTags.INT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);
if (lhsExprTypeTag == rhsExprTypeTag) {
if (!isBinaryShiftOperator && !isArithmeticOperator) {
return;
}
if (types.isValueType(binaryExpr.lhsExpr.getBType())) {
return;
}
}
if (TypeTags.isStringTypeTag(lhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) {
if (TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (TypeTags.isStringTypeTag(rhsExprTypeTag) && binaryExpr.opKind == OperatorKind.ADD) {
if (TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (isArithmeticOperator) {
createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (isBinaryShiftOperator) {
createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {
createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
}
}
private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangExpression tempExpr;
BUnionType exprBType = (BUnionType) binaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
BType nullableType = BUnionType.create(null, nilLiftType, symTable.nilType);
((BUnionType) nullableType).setNullable(true);
BLangSimpleVariableDef tempOne;
if (binaryExpr.lhsExpr.getBType().isNullable()) {
tempExpr = rewriteExpr(binaryExpr.lhsExpr);
tempOne = createVarDef(String.format("$temp_1"), nullableType, tempExpr, binaryExpr.pos);
} else {
tempExpr = binaryExpr.lhsExpr;
tempOne = createVarDef(String.format("$temp_1"), tempExpr.getBType(), tempExpr, binaryExpr.pos);
}
BLangSimpleVarRef tempOneRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempOne.var.symbol);
blockStmt.addStatement(tempOne);
if (tempOneRef.getBType().isNullable()) {
((BUnionType) tempOne.getBType()).setNullable(false);
}
BLangSimpleVariableDef tempVarDef = createVarDef(String.format("result"),
binaryExpr.getBType(), createNilLiteral(), binaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangSimpleVariableDef tempTwo;
if (binaryExpr.rhsExpr.getBType().isNullable()) {
tempTwo = createVarDef(String.format("$temp_2"),
nullableType, binaryExpr.rhsExpr, binaryExpr.pos);
} else {
tempTwo = createVarDef(String.format("$temp_2"),
binaryExpr.rhsExpr.getBType(), binaryExpr.rhsExpr, binaryExpr.pos);
}
BLangSimpleVarRef tempTwoRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempTwo.var.symbol);
blockStmt.addStatement(tempTwo);
BLangTypeTestExpr typeTestExprOne = ASTBuilderUtil.createTypeTestExpr(binaryExpr.pos, tempOneRef,
new BLangValueType(TypeKind.NIL));
typeTestExprOne.setBType(symTable.booleanType);
typeTestExprOne.typeNode.setBType(symTable.nilType);
BLangTypeTestExpr typeTestExprTwo = ASTBuilderUtil.createTypeTestExpr(binaryExpr.pos,
tempTwoRef, new BLangValueType(TypeKind.NIL));
typeTestExprTwo.setBType(symTable.booleanType);
typeTestExprTwo.typeNode.setBType(symTable.nilType);
BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,
typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
if (tempTwoRef.getBType().tag == TypeTags.UNION) {
((BUnionType) tempTwoRef.getBType()).setNullable(false);
}
BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, tempOneRef,
tempTwoRef, nilLiftType, binaryExpr.opKind, binaryExpr.opSymbol);
bLangAssignmentElse.expr = createTypeCastExpr(newBinaryExpr, binaryExpr.getBType());
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);
ifStatement.expr = ifBlockCondition;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(binaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&
((binaryExpr.rhsExpr.getBType().isNullable() &&
binaryExpr.rhsExpr.getBType().getKind() == TypeKind.UNION) ||
(binaryExpr.lhsExpr.getBType().isNullable() &&
binaryExpr.lhsExpr.getBType().getKind() == TypeKind.UNION))) {
switch (binaryExpr.getOperatorKind()) {
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
return true;
}
}
return false;
}
private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||
(TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||
(TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {
return;
}
if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);
return;
}
if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());
}
private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
}
private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&
rhsExprTypeTag == TypeTags.BYTE)) {
return;
}
if (isLhsIntegerType && !isRhsIntegerType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (!isLhsIntegerType && isRhsIntegerType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);
boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);
if (isLhsStringType && isRhsStringType) {
return;
}
if (isLhsStringType && !isRhsStringType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.stringType);
return;
}
if (!isLhsStringType && isRhsStringType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.stringType);
}
}
private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,
BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.setBType(symTable.intRangeType);
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = binaryExpr.rhsExpr.getBType().tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.getBType().tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr);
matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos,
rewriteExpr(elvisExpr.rhsExpr)));
matchExpr.setBType(elvisExpr.getBType());
matchExpr.pos = elvisExpr.pos;
result = rewriteExpr(matchExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (isNullableUnaryExpr(unaryExpr)) {
BLangStatementExpression statementExpression = createStmtExprForNullableUnaryExpr(unaryExpr);
result = rewrite(statementExpression, env);
return;
}
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final Location pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == unaryExpr.getBType().tag) {
binaryExpr.setBType(symTable.byteType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.setBType(symTable.intType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
private BLangStatementExpression createStmtExprForNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangExpression tempExpr;
BUnionType exprBType = (BUnionType) unaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
BType nullableType = BUnionType.create(null, nilLiftType, symTable.nilType);
((BUnionType) nullableType).setNullable(true);
tempExpr = rewriteExpr(unaryExpr.expr);
BLangSimpleVariableDef tempOne = createVarDef(String.format("$temp_1"), nullableType, tempExpr, unaryExpr.pos);
BLangSimpleVarRef tempOneRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempOne.var.symbol);
blockStmt.addStatement(tempOne);
if (tempOneRef.getBType().isNullable()) {
((BUnionType) tempOne.getBType()).setNullable(false);
}
BLangSimpleVariableDef tempVarDef = createVarDef(String.format("$result"),
unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil.createTypeTestExpr(unaryExpr.pos, tempOneRef,
new BLangValueType(TypeKind.NIL));
typeTestExpr.setBType(symTable.booleanType);
typeTestExpr.typeNode.setBType(symTable.nilType);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangUnaryExpr newUnaryExpr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, tempOneRef,
nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);
newUnaryExpr.expr = createTypeCastExpr(newUnaryExpr.expr, nilLiftType);
bLangAssignmentElse.expr = createTypeCastExpr(newUnaryExpr, unaryExpr.getBType());
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);
ifStatement.expr = typeTestExpr;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(unaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable() &&
unaryExpr.getBType().getKind() == TypeKind.UNION) {
switch (unaryExpr.operator) {
case ADD:
case SUB:
case BITWISE_COMPLEMENT:
return true;
}
}
return false;
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
BType targetType = conversionExpr.targetType;
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.setBType(bLangArrowFunction.body.expr.getBType());
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.setBType(bLangArrowFunction.funcType);
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value),
new Name(funcNode.name.originalValue),
env.enclPkg.symbol.pkgID,
bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true,
bLangArrowFunction.pos, VIRTUAL);
funcSymbol.originalName = new Name(funcNode.name.originalValue);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.getBType();
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.setBType(
new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null));
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
bLangArrowFunction.function = lambdaFunction.function;
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {
for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {
rewriteExpr(xmlItem);
}
result = xmlSequenceLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
/**
* The raw template literal gets desugared to a type init expression. For each literal, a new object class type
* def is generated from the object type. The type init expression creates an instance of this generated object
* type. For example, consider the following statements:
* string name = "Pubudu";
* 'object:RawTemplate rt = `Hello ${name}!`;
*
* The raw template literal above is desugared to:
* type RawTemplate$Impl$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
*
*
* 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);
*
* @param rawTemplateLiteral The raw template literal to be desugared.
*/
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
Location pos = rawTemplateLiteral.pos;
BObjectType objType = (BObjectType) rawTemplateLiteral.getBType();
BLangClassDefinition objClassDef =
desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);
BObjectType classObjType = (BObjectType) objClassDef.getBType();
BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol;
BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);
insertionsList.exprs.addAll(rawTemplateLiteral.insertions);
insertionsList.expectedType = insertionsSym.type;
BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);
typeNewExpr.argsExpr.add(insertionsList);
typeNewExpr.initInvocation.argExprs.add(insertionsList);
typeNewExpr.initInvocation.requiredArgs.add(insertionsList);
result = rewriteExpr(typeNewExpr);
}
/**
* This method desugars a raw template literal object class for the provided raw template object type as follows.
* A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;
* is desugared to,
* type $anonType$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
* @param strings The string portions of the literal
* @param objectType The abstract object type for which an object class needs to be generated
* @param pos The diagnostic position info for the type node
* @return Returns the generated concrete object class def
*/
private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType,
Location pos) {
BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;
Name objectClassName = names.fromString(
anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));
BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,
env.enclPkg.packageID, null, env.enclPkg.symbol,
pos, VIRTUAL, false);
classTSymbol.flags |= Flags.CLASS;
BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);
objectClassType.fields = objectType.fields;
classTSymbol.type = objectClassType;
objectClassType.typeIdSet.add(objectType.typeIdSet);
BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);
classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);
BType stringsType = objectClassType.fields.get("strings").symbol.type;
BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);
stringsList.exprs.addAll(strings);
stringsList.expectedType = stringsType;
classDef.fields.get(0).expr = stringsList;
BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);
classDef.initFunction = userDefinedInitFunction;
env.enclPkg.functions.add(userDefinedInitFunction);
env.enclPkg.topLevelNodes.add(userDefinedInitFunction);
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env);
classDef.generatedInitFunction = tempGeneratedInitFunction;
env.enclPkg.functions.add(classDef.generatedInitFunction);
env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);
return rewrite(classDef, env);
}
/**
* Creates a user-defined init() method for the provided object type node. If there are fields without default
* values specified in the type node, this will add parameters for those fields in the init() method and assign the
* param values to the respective fields in the method body.
*
* @param classDefn The object type node for which the init() method is generated
* @param env The symbol env for the object type node
* @return The generated init() method
*/
private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env,
names, Names.USER_DEFINED_INIT_SUFFIX,
symTable, classDefn.getBType());
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), classDefn.pos);
classDefn.initFunction = initFunction;
initFunction.returnTypeNode.setBType(symTable.nilType);
BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;
BInvokableType initFnType = (BInvokableType) initFunction.getBType();
for (BLangSimpleVariable field : classDefn.fields) {
if (field.expr != null) {
continue;
}
BVarSymbol fieldSym = field.symbol;
BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,
initFunction.symbol, classDefn.pos, VIRTUAL);
BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,
fieldSym.type, null, paramSym);
param.flagSet.add(Flag.FINAL);
initFunction.symbol.scope.define(paramSym.name, paramSym);
initFunction.symbol.params.add(paramSym);
initFnType.paramTypes.add(param.getBType());
initFunction.requiredParams.add(param);
BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);
BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),
initFunction.receiver.symbol, field.name);
initFuncBody.addStatement(fieldInit);
}
return initFunction;
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType());
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
BInvokableSymbol isTransactionalSymbol =
(BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);
result = ASTBuilderUtil
.createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),
Collections.emptyList(), symResolver);
}
@Override
public void visit(BLangCommitExpr commitExpr) {
BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr);
xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr);
if (xmlAttributeAccessExpr.indexExpr != null
&& xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) {
((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true;
}
xmlAttributeAccessExpr.desugared = true;
if (xmlAttributeAccessExpr.isLValue || xmlAttributeAccessExpr.indexExpr != null) {
result = xmlAttributeAccessExpr;
} else {
result = rewriteExpr(xmlAttributeAccessExpr);
}
}
@Override
public void visit(BLangFail failNode) {
if (this.onFailClause != null) {
if (this.onFailClause.bodyContainsFail) {
result = rewriteNestedOnFail(this.onFailClause, failNode);
} else {
result = createOnFailInvocation(onFailClause, failNode);
}
} else {
BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));
stmt.desugared = true;
result = stmt;
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTableAccessExpr tableKeyAccessExpr) {
result = tableKeyAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType());
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
if (!intRangeExpression.includeStart) {
intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr);
}
if (!intRangeExpression.includeEnd) {
intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr);
}
intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr);
intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr);
result = intRangeExpression;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
addMatchExprDefaultCase(bLangMatchExpression);
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(bLangMatchExpression.pos, matchTempResultVarName,
bLangMatchExpression.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID,
bLangMatchExpression.getBType(),
this.env.scope.owner, bLangMatchExpression.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef =
ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar);
tempResultVarDef.desugared = true;
BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef));
List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>();
for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
pattern.expr = rewriteExpr(pattern.expr);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt));
patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody));
}
stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr,
patternClauses));
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef);
statementExpr.setBType(bLangMatchExpression.getBType());
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
String checkedExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable checkedExprVar =
ASTBuilderUtil.createVariable(checkedExpr.pos, checkedExprVarName, checkedExpr.getBType(), null,
new BVarSymbol(0, names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExpr.getBType(),
this.env.scope.owner, checkedExpr.pos, VIRTUAL));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar);
checkedExprVarDef.desugared = true;
BLangMatchTypedBindingPatternClause patternSuccessCase =
getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true,
checkedExprVar.symbol, null);
BLangMatchTypedBindingPatternClause patternErrorCase =
getSafeAssignErrorPattern(checkedExpr.pos, this.env.enclInvokable.symbol,
checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr,
new ArrayList<BLangMatchTypedBindingPatternClause>() {{
add(patternSuccessCase);
add(patternErrorCase);
}});
BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos,
new ArrayList<BLangStatement>() {{
add(checkedExprVarDef);
add(matchStmt);
}});
BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef(
checkedExpr.pos, checkedExprVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(
generatedStmtBlock, tempCheckedExprVarRef);
statementExpr.setBType(checkedExpr.getBType());
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceClass.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.getBType())) {
addConversionExprIfRequired(expr, symTable.anyType);
}
if (typeTestExpr.isNegation) {
BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,
typeTestExpr.expr, typeTestExpr.typeNode);
BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
bLangGroupExpr.expression = bLangTypeTestExpr;
bLangGroupExpr.setBType(typeTestExpr.getBType());
BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,
typeTestExpr.getBType(),
OperatorKind.NOT, null);
result = rewriteExpr(unaryExpr);
return;
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.setBType(annotAccessExpr.getBType());
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),
binaryExpr.rhsExpr.getBType()),
annotAccessExpr.getBType(), null), null,
symTable.builtinPos, VIRTUAL);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangQueryAction queryAction) {
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
if (constSymbol.literalType.tag != TypeTags.NIL && constSymbol.value.value == null) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangDynamicArgExpr dynamicParamExpr) {
dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);
dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);
result = dynamicParamExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.setBType(iteratorInvokableSymbol.retType);
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangAssignment getIteratorNextAssignment(Location pos,
BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
nextInvocation.expr.setBType(types.getSafeType(nextInvocation.expr.getBType(), true, false));
return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false);
}
BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.setBType(nextFuncSymbol.retType);
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,
BVarSymbol resultSymbol) {
return getFieldAccessExpression(location, "value", varType, resultSymbol);
}
BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,
BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.setBType(varType);
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.setBType(retType);
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,
names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.setBType(new BArrayType(symTable.anyType));
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangValueExpression expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.setBType(iExpr.symbol.type);
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(),
expr.getBType(),
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatementLink link = new BLangStatementLink();
link.parent = currentLink;
currentLink = link;
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
link.statement = stmt;
stmt.statementLink = link;
currentLink = link.parent;
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(Location pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.intType);
return literal;
}
private BLangLiteral createByteLiteral(Location pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
if (types.isSameType(expr.getBType(), targetType)) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.setBType(targetType);
conversionExpr.targetType = targetType;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.workers.size() == 0 && invokableNode.symbol.type.getReturnType().isNullable()
&& (funcBody.stmts.size() < 1 ||
funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
Location invPos = invokableNode.pos;
Location returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(),
invPos.lineRange().endLine().line(),
invPos.lineRange().endLine().line(),
invPos.lineRange().startLine().offset(),
invPos.lineRange().startLine().offset());
BLangReturn returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangSimpleVarRef varargRef = null;
BLangBlockStmt blockStmt = null;
BType varargVarType = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
Location varargExpPos = expr.pos;
varargVarType = expr.getBType();
String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.setBType(varargVarType);
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.setBType(arrayType);
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.setBType(firstNonRestArg.getBType());
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
restArgs.remove(0);
return;
}
BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
if (restArgsExpression.getBType().tag == TypeTags.RECORD) {
BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);
restArgs.add(expr);
return;
}
Location pos = restArgsExpression.pos;
BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();
newArrayLiteral.setBType(restParamType);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i",
foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.getBType(),
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);
if (varargVarType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) varargVarType;
if (arrayType.state == BArrayState.CLOSED &&
arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {
valueExpr.setBType(restParamType.eType);
} else {
valueExpr.setBType(arrayType.eType);
}
} else {
valueExpr.setBType(symTable.anyOrErrorType);
}
BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
List.of(pushExpr),
restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
foreach.body = foreachBody;
BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);
newArrayBlockStmt.addStatement(varDef);
newArrayBlockStmt.addStatement(foreach);
BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);
newArrayStmtExpression.setBType(restParamType);
restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.setBType(restParamType);
BType elemType = restParamType.eType;
Location pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.setBType(restParamType);
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new LinkedHashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>();
BLangRecordLiteral incRecordParamAllowAdditionalFields = null;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = varargRef.getBType();
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.remove(param.name.value));
} else if (param.getFlags().contains(Flag.INCLUDED)) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
BType paramType = param.type;
recordLiteral.setBType(paramType);
args.add(recordLiteral);
incRecordLiterals.add(recordLiteral);
if (((BRecordType) paramType).restFieldType != symTable.noType) {
incRecordParamAllowAdditionalFields = recordLiteral;
}
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.setBType(param.type);
args.add(expr);
} else {
if (varargRef.getBType().tag == TypeTags.RECORD) {
if (param.isDefaultable) {
BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,
List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);
BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));
BLangIndexBasedAccess memberAccessExpr =
ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);
BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);
BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type,
hasKeyInvocation, memberAccessExpr, ignoreExpr);
args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr));
} else {
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(varargRef,
ASTBuilderUtil.createIdentifier(param.pos, param.name.value));
fieldBasedAccessExpression.setBType(param.type);
args.add(fieldBasedAccessExpression);
}
} else {
BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));
BType memberAccessExprType = tupleTypedVararg ?
((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType;
args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType,
varargRef, indexExpr), param.type));
varargIndex++;
}
}
}
if (namedArgs.size() > 0) {
setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);
}
iExpr.requiredArgs = args;
}
private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs,
List<BLangRecordLiteral> incRecordLiterals,
BLangRecordLiteral incRecordParamAllowAdditionalFields) {
for (String name : namedArgs.keySet()) {
boolean isAdditionalField = true;
BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);
for (BLangRecordLiteral recordLiteral : incRecordLiterals) {
LinkedHashMap<String, BField> fields = ((BRecordType) recordLiteral.getBType()).fields;
if (fields.containsKey(name) && fields.get(name).type.tag != TypeTags.NEVER) {
isAdditionalField = false;
createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);
break;
}
}
if (isAdditionalField) {
createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);
}
}
}
private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,
BLangNamedArgsExpression expr) {
BLangSimpleVarRef varRef = new BLangSimpleVarRef();
varRef.variableName = expr.name;
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.
createBLangRecordKeyValue(varRef, expr.expr);
recordLiteral.fields.add(recordKeyValueField);
}
private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(Location location,
BSymbol invokableSymbol,
List<BType> equivalentErrorTypes,
boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType;
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<BType>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable patternFailureCaseVar =
ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType, null,
new BVarSymbol(0, names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, location, VIRTUAL));
BLangVariableReference patternFailureCaseVarRef =
ASTBuilderUtil.createVariableRef(location, patternFailureCaseVar.symbol);
BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode();
patternBlockFailureCase.pos = location;
if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = location;
failStmt.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(failStmt);
if (returnOnError && this.shouldReturnErrors) {
BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location,
rewrite(patternFailureCaseVarRef, env));
errorReturn.desugared = true;
failStmt.exprStmt = errorReturn;
}
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = location;
panicNode.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(panicNode);
}
return ASTBuilderUtil.createMatchStatementPattern(location, patternFailureCaseVar, patternBlockFailureCase);
}
private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(Location location,
BType lhsType, boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) {
String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match";
BLangSimpleVariable patternSuccessCaseVar =
ASTBuilderUtil.createVariable(location, patternSuccessCaseVarName, lhsType, null,
new BVarSymbol(0, names.fromString(patternSuccessCaseVarName),
this.env.scope.owner.pkgID, lhsType,
this.env.scope.owner, location, VIRTUAL));
BLangExpression varRefExpr;
if (isVarDef) {
varRefExpr = ASTBuilderUtil.createVariableRef(location, varSymbol);
} else {
varRefExpr = lhsExpr;
}
BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(location,
patternSuccessCaseVar.symbol);
BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(location,
varRefExpr, patternSuccessCaseVarRef, false);
BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(location,
new ArrayList<BLangStatement>() {{
add(assignmentStmtSuccessCase);
}});
return ASTBuilderUtil.createMatchStatementPattern(location,
patternSuccessCaseVar, patternBlockSuccessCase);
}
private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) {
List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses;
BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar);
BLangIf currentIfNode = parentIfNode;
for (int i = 1; i < patterns.size(); i++) {
BLangMatchBindingPatternClause patternClause = patterns.get(i);
if (i == patterns.size() - 1 && patternClause.isLastPattern) {
currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar);
} else {
currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar);
currentIfNode = (BLangIf) currentIfNode.elseStmt;
}
}
return parentIfNode;
}
/**
* Generate an if-else statement from the given match statement.
*
* @param pattern match pattern statement node
* @param matchExprVar variable node of the match expression
* @return if else statement node
*/
private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) {
BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol);
if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) {
BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar);
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null);
}
BType expectedType = matchExprVar.getBType();
if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern;
expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType);
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol);
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
if (structuredPattern.typeGuardExpr != null) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos);
blockStmt.addStatement(varDef);
blockStmt.addStatement(varDefStmt);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt,
structuredPattern.typeGuardExpr);
stmtExpr.setBType(symTable.booleanType);
ifCondition = ASTBuilderUtil
.createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.AND, symTable.booleanType,
symTable.booleanType));
} else {
structuredPattern.body.stmts.add(0, varDef);
structuredPattern.body.stmts.add(1, varDefStmt);
}
}
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null);
}
private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body;
BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern;
if (patternClause.variable.name.value.equals(Names.IGNORE.value)) {
return patternClause.body;
}
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos,
matchExprVar.symbol);
BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef,
patternClause.variable.getBType());
BLangSimpleVariable patternVar =
ASTBuilderUtil.createVariable(patternClause.pos, "", patternClause.variable.getBType(),
patternVarExpr, patternClause.variable.symbol);
BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar);
patternClause.body.stmts.add(0, patternVarDef);
body = patternClause.body;
return body;
}
private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body = pattern.body;
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol);
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
structuredPattern.body.stmts.add(0, varDefStmt);
body = structuredPattern.body;
}
return body;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.getBType();
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
return expr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.setBType(lhsType);
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
conversionExpr.internal = true;
return conversionExpr;
}
private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol) {
BType patternType;
switch (patternClause.getKind()) {
case MATCH_STATIC_PATTERN_CLAUSE:
BLangMatchStaticBindingPatternClause staticPattern =
(BLangMatchStaticBindingPatternClause) patternClause;
patternType = staticPattern.literal.getBType();
break;
case MATCH_STRUCTURED_PATTERN_CLAUSE:
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) patternClause;
patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable);
break;
default:
BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause;
patternType = simplePattern.variable.getBType();
break;
}
BLangExpression binaryExpr;
BType[] memberTypes;
if (patternType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) patternType;
memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
} else {
memberTypes = new BType[1];
memberTypes[0] = patternType;
}
if (memberTypes.length == 1) {
binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
} else {
BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]);
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.getBType(), rhsExpr.getBType()));
for (int i = 2; i < memberTypes.length; i++) {
lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]);
rhsExpr = binaryExpr;
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.getBType(), rhsExpr.getBType()));
}
}
return binaryExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,
VIRTUAL);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,
recordSymbol, bindingPatternVariable.pos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BRecordType) ((BLangSimpleVariable) recordVariable.restParam).getBType()).restFieldType :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++),
env.enclPkg.symbol.pkgID,
null, null, errorVariable.pos, VIRTUAL);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,
errorVariable.pos);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
recordTypeNode.initFunction = TypeDefBuilderHelper
.createInitFunctionForRecordType(recordTypeNode, env, names, symTable);
TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.getBType();
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID, symTable.pureType, null,
field.valueBindingPattern.pos, VIRTUAL);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.getBType(),
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo, Location pos) {
BRecordType detailRecordType = createAnonRecordType(pos);
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,
detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);
detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordType.tsymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BRecordType createAnonRecordType(Location pos) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),
env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(errorType);
return errorTypeNode;
}
private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol, BType patternType) {
Location pos = patternClause.pos;
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) {
BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause;
return createBinaryExpression(pos, varRef, pattern.literal);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) {
return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType);
}
if (patternType == symTable.nilType) {
BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null);
return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType,
OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL,
symTable.anyType, symTable.nilType));
} else {
return createIsAssignableExpression(pos, varSymbol, patternType);
}
}
private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.setBType(symTable.anyType);
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),
expression.getBType());
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),
binaryExpr, env);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsAssignableExpr createIsAssignableExpression(Location pos,
BVarSymbol varSymbol,
BType patternType) {
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names,
symTable.builtinPos);
}
private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.setBType(variable.getBType());
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol selfSymbol) {
return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,
variable.name);
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,
BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,
BLangIdentifier fieldName) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);
fieldAccess.symbol = fieldSymbol;
fieldAccess.setBType(fieldType);
fieldAccess.isStoreOnCreation = true;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = expr;
assignmentStmt.pos = function.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes;
List<BType> unmatchedTypes = new ArrayList<>();
if (bLangMatchExpression.expr.getBType().tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.getBType();
exprTypes = new ArrayList<>(unionType.getMemberTypes());
} else {
exprTypes = Lists.of(bLangMatchExpression.getBType());
}
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
if (this.types.isAssignable(type, pattern.variable.getBType())) {
assignable = true;
break;
}
}
if (!assignable) {
unmatchedTypes.add(type);
}
}
if (unmatchedTypes.isEmpty()) {
return;
}
BType defaultPatternType;
if (unmatchedTypes.size() == 1) {
defaultPatternType = unmatchedTypes.get(0);
} else {
defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes));
}
String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default";
BLangSimpleVariable patternMatchCaseVar =
ASTBuilderUtil.createVariable(bLangMatchExpression.pos, patternCaseVarName, defaultPatternType, null,
new BVarSymbol(0, names.fromString(patternCaseVarName),
this.env.scope.owner.pkgID, defaultPatternType,
this.env.scope.owner, bLangMatchExpression.pos, VIRTUAL));
BLangMatchExprPatternClause defaultPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
defaultPattern.variable = patternMatchCaseVar;
defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol);
defaultPattern.pos = bLangMatchExpression.pos;
bLangMatchExpression.patternClauses.add(defaultPattern);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.isLValue || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.getBType();
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.getBType(),
this.env.scope.owner, accessExpr.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar);
BLangMatch matcEXpr = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.setBType(originalExprType);
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successPattern = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = accessExpr.originalType;
if (TypeTags.isXMLTypeTag(originalType.tag)) {
accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));
} else {
accessExpr.setBType(originalType);
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType());
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>());
boolean isAllTypesRecords = false;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>();
if (accessExpr.expr.getBType().tag == TypeTags.UNION) {
memTypes = new LinkedHashSet<>(((BUnionType) accessExpr.expr.getBType()).getMemberTypes());
isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);
}
if (accessExpr.nilSafeNavigation) {
matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar));
matchStmt.setBType(type);
memTypes.remove(symTable.nilType);
}
if (accessExpr.errorSafeNavigation) {
matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar));
matchStmt.setBType(type);
matchStmt.pos = accessExpr.pos;
memTypes.remove(symTable.errorType);
}
BLangMatchTypedBindingPatternClause successPattern = null;
Name field = getFieldName(accessExpr);
if (field == Names.EMPTY) {
successPattern = getSuccessPattern(accessExpr.expr.getBType(), accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
pushToMatchStatementStack(matchStmt, accessExpr, successPattern);
return;
}
if (isAllTypesRecords) {
for (BType memberType : memTypes) {
BRecordType recordType = (BRecordType) memberType;
if (recordType.fields.containsKey(field.value) || !recordType.sealed) {
successPattern = getSuccessPattern(memberType, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
}
}
matchStmt.patternClauses.add(getMatchAllAndNilReturnPattern(accessExpr, tempResultVar));
pushToMatchStatementStack(matchStmt, accessExpr, successPattern);
return;
}
successPattern =
getSuccessPattern(accessExpr.expr.getBType(), accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
pushToMatchStatementStack(matchStmt, accessExpr, successPattern);
}
private void pushToMatchStatementStack(BLangMatch matchStmt, BLangAccessExpression accessExpr,
BLangMatchTypedBindingPatternClause successPattern) {
this.matchStmtStack.push(matchStmt);
if (this.successPattern != null) {
this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt));
}
this.successPattern = successPattern;
}
private Name getFieldName(BLangAccessExpression accessExpr) {
Name field = Names.EMPTY;
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);
} else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;
if (indexBasedExpression.getKind() == NodeKind.LITERAL) {
field = new Name(((BLangLiteral) indexBasedExpression).value.toString());
}
}
return field;
}
private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) {
for (BType memType : memTypes) {
int typeTag = memType.tag;
if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {
return false;
}
}
return true;
}
private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
BLangSimpleVariable errorPatternVar =
ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName, symTable.errorType, null,
new BVarSymbol(0, names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, expr.pos, VIRTUAL));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, errorPatternVar, patternBody);
return errorPattern;
}
private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(Location pos,
BLangExpression expr) {
String nullPatternVarName = IGNORE.toString();
BLangSimpleVariable errorPatternVar =
ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType, null,
new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType,
this.env.scope.owner, pos, VIRTUAL));
BLangMatchExprPatternClause nullPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
nullPattern.variable = errorPatternVar;
nullPattern.expr = expr;
nullPattern.pos = pos;
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
BLangSimpleVariable nullPatternVar =
ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName, symTable.nilType, null,
new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType,
this.env.scope.owner, expr.pos, VIRTUAL));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, nullPatternVar, patternBody);
return nullPattern;
}
private BLangMatchStaticBindingPatternClause getMatchAllAndNilReturnPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, createLiteral(expr.pos,
symTable.nilType, Names.NIL_VALUE), false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchStaticBindingPatternClause matchAllPattern =
(BLangMatchStaticBindingPatternClause) TreeBuilder.createMatchStatementStaticBindingPattern();
String matchAllVarName = "_";
matchAllPattern.literal =
ASTBuilderUtil.createVariableRef(expr.pos, new BVarSymbol(0, names.fromString(matchAllVarName),
this.env.scope.owner.pkgID, symTable.anyType,
this.env.scope.owner, expr.pos, VIRTUAL));
matchAllPattern.body = patternBody;
return matchAllPattern;
}
private BLangMatchTypedBindingPatternClause getSuccessPattern(BType type, BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar,
boolean liftError) {
type = types.getSafeType(type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
BVarSymbol successPatternSymbol;
if (type.tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner,
accessExpr.pos, VIRTUAL);
} else {
successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner,
accessExpr.pos, VIRTUAL);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;
}
if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;
}
tempAccessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol);
tempAccessExpr.errorSafeNavigation = false;
tempAccessExpr.nilSafeNavigation = false;
accessExpr.cloneRef = null;
if (TypeTags.isXMLTypeTag(tempAccessExpr.expr.getBType().tag)) {
tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,
symTable.nilType));
} else {
tempAccessExpr.setBType(accessExpr.originalType);
}
tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause successPattern =
ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody);
this.safeNavigationAssignment = assignmentStmt;
return successPattern;
}
private boolean safeNavigateLHS(BLangExpression expr) {
if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
return false;
}
BLangExpression varRef = ((BLangAccessExpression) expr).expr;
if (varRef.getBType().isNullable()) {
return true;
}
return safeNavigateLHS(varRef);
}
private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr,
boolean safeAssignment) {
this.accessExprStack = new Stack<>();
List<BLangStatement> stmts = new ArrayList<>();
createLHSSafeNavigation(stmts, accessExpr.expr);
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos,
cloneExpression(accessExpr), rhsExpr);
stmts.add(assignment);
return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts);
}
private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) {
NodeKind kind = expr.getKind();
boolean root = false;
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
BLangAccessExpression accessExpr = (BLangAccessExpression) expr;
createLHSSafeNavigation(stmts, accessExpr.expr);
accessExpr.expr = accessExprStack.pop();
} else {
root = true;
}
if (expr.getKind() == NodeKind.INVOCATION) {
BLangInvocation invocation = (BLangInvocation) expr;
BVarSymbol interMediateSymbol = new BVarSymbol(0,
names.fromString(GEN_VAR_PREFIX.value + "i_intermediate"),
this.env.scope.owner.pkgID, invocation.getBType(),
this.env.scope.owner, expr.pos, VIRTUAL);
BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos,
interMediateSymbol.name.value,
invocation.getBType(), invocation,
interMediateSymbol);
BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos,
intermediateVariable);
stmts.add(intermediateVariableDefinition);
expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol);
}
if (expr.getBType().isNullable()) {
BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode());
isNillTest.setBType(symTable.booleanType);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos);
expr = cloneExpression(expr);
expr.setBType(types.getSafeType(expr.getBType(), true, false));
if (isDefaultableMappingType(expr.getBType()) && !root) {
BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
jsonLiteral.setBType(expr.getBType());
jsonLiteral.pos = expr.pos;
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos,
expr, jsonLiteral);
thenStmt.addStatement(assignment);
} else {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = ERROR_REASON_NULL_REFERENCE_ERROR;
literal.setBType(symTable.stringType);
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
errorConstructorExpr.pos = expr.pos;
List<BLangExpression> positionalArgs = new ArrayList<>();
positionalArgs.add(literal);
errorConstructorExpr.positionalArgs = positionalArgs;
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.expr = errorConstructorExpr;
panicNode.pos = expr.pos;
thenStmt.addStatement(panicNode);
}
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null);
stmts.add(ifelse);
}
accessExprStack.push(expr);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.setBType(symTable.nilType);
return nillTypeNode;
}
private BLangValueExpression cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangExpression varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.isLValue = originalAccessExpr.isLValue;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.setBType(originalAccessExpr.originalType);
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.booleanType);
literal.pos = symTable.builtinPos;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
default:
return false;
}
}
private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {
BType returnType = symTable.nilType;
BLangFunction userDefinedInitMethod = classDefinition.initFunction;
if (userDefinedInitMethod != null) {
returnType = userDefinedInitMethod.getBType().getReturnType();
}
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefinition.pos, classDefinition.symbol,
env, names, Names.GENERATED_INIT_SUFFIX,
classDefinition.getBType(), returnType);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(Names.GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(),
classDefinition.pos);
classDefinition.generatedInitFunction = initFunction;
initFunction.returnTypeNode.setBType(returnType);
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null,
symTable.builtinPos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(binaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
/**
* Split packahe init function into several smaller functions.
*
* @param packageNode package node
* @param env symbol environment
* @return initial init function but trimmed in size
*/
private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) {
int methodSize = INIT_METHOD_SPLIT_SIZE;
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body;
if (!isJvmTarget) {
return packageNode.initFunction;
}
BLangFunction initFunction = packageNode.initFunction;
List<BLangFunction> generatedFunctions = new ArrayList<>();
List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts);
funcBody.stmts.clear();
BLangFunction newFunc = initFunction;
BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body;
int varDefIndex = 0;
for (int i = 0; i < stmts.size(); i++) {
BLangStatement statement = stmts.get(i);
if (statement.getKind() == NodeKind.VARIABLE_DEF) {
break;
}
varDefIndex++;
if (i > 0 && (i % methodSize == 0 || isAssignmentWithInitOrRecordLiteralExpr(statement))) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
List<BLangStatement> chunkStmts = new ArrayList<>();
for (int i = varDefIndex; i < stmts.size(); i++) {
BLangStatement stmt = stmts.get(i);
chunkStmts.add(stmt);
varDefIndex++;
if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) &&
(newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) {
if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.addAll(chunkStmts);
chunkStmts.clear();
} else if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) &&
Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags,
Flags.LISTENER)
) {
break;
}
}
newFuncBody.stmts.addAll(chunkStmts);
for (int i = varDefIndex; i < stmts.size(); i++) {
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
generatedFunctions.add(newFunc);
for (int j = 0; j < generatedFunctions.size() - 1; j++) {
BLangFunction thisFunction = generatedFunctions.get(j);
BLangCheckedExpr checkedExpr =
ASTBuilderUtil.createCheckExpr(initFunction.pos,
createInvocationNode(generatedFunctions.get(j + 1).name.value,
new ArrayList<>(), symTable.errorOrNilType),
symTable.nilType);
checkedExpr.equivalentErrorTypeList.add(symTable.errorType);
BLangExpressionStmt expressionStmt = ASTBuilderUtil
.createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body);
expressionStmt.expr = checkedExpr;
expressionStmt.expr.pos = initFunction.pos;
if (j > 0) {
thisFunction = rewrite(thisFunction, env);
packageNode.functions.add(thisFunction);
packageNode.topLevelNodes.add(thisFunction);
}
}
if (generatedFunctions.size() > 1) {
BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1);
lastFunc = rewrite(lastFunc, env);
packageNode.functions.add(lastFunc);
packageNode.topLevelNodes.add(lastFunc);
}
return generatedFunctions.get(0);
}
private boolean isAssignmentWithInitOrRecordLiteralExpr(BLangStatement statement) {
if (statement.getKind() == NodeKind.ASSIGNMENT) {
return isMappingOrObjectConstructorOrObjInit(((BLangAssignment) statement).getExpression());
}
return false;
}
protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {
switch (expression.getKind()) {
case TYPE_INIT_EXPR:
case RECORD_LITERAL_EXPR:
case OBJECT_CTOR_EXPRESSION:
return true;
case CHECK_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);
case TYPE_CONVERSION_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);
default:
return false;
}
}
/**
* Create an intermediate package init function.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
BLangFunction initFunction = ASTBuilderUtil
.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
new Name(Names.INIT_FUNCTION_SUFFIX.value
+ this.initFuncIndex++), symTable);
createInvokableSymbol(initFunction, env);
return initFunction;
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.getBType();
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.getBType();
Location pos = mappingConstructorExpr.pos;
List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size());
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression origKey = key.expr;
BLangExpression keyExpr;
if (key.computedKey) {
keyExpr = origKey;
} else {
keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,
StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :
((BLangLiteral) origKey);
}
BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =
ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),
rewriteExpr(keyValueField.valueExpr));
rewrittenField.pos = keyValueField.pos;
rewrittenField.key.pos = key.pos;
rewrittenFields.add(rewrittenField);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(
rewriteExpr(createStringLiteral(pos,
StringEscapeUtils.unescapeJava(varRefField.variableName.value))),
rewriteExpr(varRefField)));
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
spreadOpField.expr = rewriteExpr(spreadOpField.expr);
rewrittenFields.add(spreadOpField);
}
}
fields.clear();
return type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type, rewrittenFields) :
new BLangMapLiteral(pos, type, rewrittenFields);
}
protected void addTransactionInternalModuleImport() {
PackageID packageID = new PackageID(Names.BALLERINA_INTERNAL_ORG, Lists.of(Names.TRANSACTION),
Names.TRANSACTION_INTERNAL_VERSION);
if (!env.enclPkg.packageID.equals(packageID)) {
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));
importDcl.pkgNameComps = pkgNameComps;
importDcl.pos = env.enclPkg.symbol.pos;
importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);
importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx");
importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "");
importDcl.symbol = symTable.internalTransactionModuleSymbol;
env.enclPkg.imports.add(importDcl);
env.enclPkg.symbol.imports.add(importDcl.symbol);
}
}
} | if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable() && | private BLangBlockStmt desugarForeachToWhile(BLangForeach foreach, BLangSimpleVariableDef varDef) {
BVarSymbol iteratorSymbol = varDef.var.symbol;
BVarSymbol resultSymbol = new BVarSymbol(0, names.fromString("$result$"), this.env.scope.owner.pkgID,
foreach.nillableResultType, this.env.scope.owner, foreach.pos,
VIRTUAL);
BLangSimpleVariableDef resultVariableDefinition = getIteratorNextVariableDefinition(foreach.pos,
foreach.nillableResultType, iteratorSymbol, resultSymbol);
BLangType userDefineType = getUserDefineTypeNode(foreach.resultType);
BLangSimpleVarRef resultReferenceInWhile = ASTBuilderUtil.createVariableRef(foreach.pos, resultSymbol);
BLangTypeTestExpr typeTestExpr = ASTBuilderUtil
.createTypeTestExpr(foreach.pos, resultReferenceInWhile, userDefineType);
BLangWhile whileNode = (BLangWhile) TreeBuilder.createWhileNode();
whileNode.pos = foreach.pos;
whileNode.expr = typeTestExpr;
whileNode.body = foreach.body;
BLangAssignment resultAssignment = getIteratorNextAssignment(foreach.pos, iteratorSymbol, resultSymbol);
VariableDefinitionNode variableDefinitionNode = foreach.variableDefinitionNode;
BLangFieldBasedAccess valueAccessExpr = getValueAccessExpression(foreach.pos, foreach.varType, resultSymbol);
BLangExpression expr = valueAccessExpr.expr;
valueAccessExpr.expr = addConversionExprIfRequired(expr, symTable.mapAllType);
variableDefinitionNode.getVariable()
.setInitialExpression(addConversionExprIfRequired(valueAccessExpr, foreach.varType));
whileNode.body.stmts.add(0, (BLangStatement) variableDefinitionNode);
whileNode.body.stmts.add(1, resultAssignment);
BLangBlockStmt blockNode = ASTBuilderUtil.createBlockStmt(foreach.pos);
blockNode.addStatement(varDef);
blockNode.addStatement(resultVariableDefinition);
blockNode.addStatement(whileNode);
return blockNode;
}
private BLangType getUserDefineTypeNode(BType type) {
BLangUserDefinedType recordType =
new BLangUserDefinedType(ASTBuilderUtil.createIdentifier(null, ""),
ASTBuilderUtil.createIdentifier(null, ""));
recordType.setBType(type);
return recordType;
}
@Override
public void visit(BLangWhile whileNode) {
if (whileNode.onFailClause != null) {
BLangOnFailClause onFailClause = whileNode.onFailClause;
whileNode.onFailClause = null;
whileNode.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(whileNode.pos, whileNode, onFailClause);
result = rewrite(doStmt, env);
} else {
whileNode.expr = rewriteExpr(whileNode.expr);
whileNode.body = rewrite(whileNode.body, env);
result = whileNode;
}
}
private BLangDo wrapStatementWithinDo(Location location, BLangStatement statement,
BLangOnFailClause onFailClause) {
BLangDo bLDo = (BLangDo) TreeBuilder.createDoNode();
BLangBlockStmt doBlock = ASTBuilderUtil.createBlockStmt(location);
doBlock.scope = new Scope(env.scope.owner);
bLDo.body = doBlock;
bLDo.pos = location;
bLDo.onFailClause = onFailClause;
bLDo.body.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
doBlock.stmts.add(statement);
return bLDo;
}
@Override
public void visit(BLangLock lockNode) {
BLangOnFailClause currentOnFailClause = this.onFailClause;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(lockNode.pos);
if (lockNode.onFailClause != null) {
blockStmt.failureBreakMode = BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
rewrite(lockNode.onFailClause, env);
}
BLangLockStmt lockStmt = new BLangLockStmt(lockNode.pos);
blockStmt.addStatement(lockStmt);
enclLocks.push(lockStmt);
BLangLiteral nilLiteral = ASTBuilderUtil.createLiteral(lockNode.pos, symTable.nilType, Names.NIL_VALUE);
BType nillableError = BUnionType.create(null, symTable.errorType, symTable.nilType);
BLangStatementExpression statementExpression = createStatementExpression(lockNode.body, nilLiteral);
statementExpression.setBType(symTable.nilType);
BLangTrapExpr trapExpr = (BLangTrapExpr) TreeBuilder.createTrapExpressionNode();
trapExpr.setBType(nillableError);
trapExpr.expr = statementExpression;
BVarSymbol nillableErrorVarSymbol = new BVarSymbol(0, names.fromString("$errorResult"),
this.env.scope.owner.pkgID, nillableError,
this.env.scope.owner, lockNode.pos, VIRTUAL);
BLangSimpleVariable simpleVariable = ASTBuilderUtil.createVariable(lockNode.pos, "$errorResult",
nillableError, trapExpr,
nillableErrorVarSymbol);
BLangSimpleVariableDef simpleVariableDef = ASTBuilderUtil.createVariableDef(lockNode.pos, simpleVariable);
blockStmt.addStatement(simpleVariableDef);
BLangUnLockStmt unLockStmt = new BLangUnLockStmt(lockNode.pos);
unLockStmt.relatedLock = lockStmt;
blockStmt.addStatement(unLockStmt);
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(lockNode.pos, nillableErrorVarSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(lockNode.pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = lockNode.pos;
panicNode.expr = addConversionExprIfRequired(varRef, symTable.errorType);
ifBody.addStatement(panicNode);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(lockNode.pos, varRef, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(lockNode.pos, isErrorTest, ifBody, null);
blockStmt.addStatement(ifelse);
result = rewrite(blockStmt, env);
enclLocks.pop();
this.onFailClause = currentOnFailClause;
}
@Override
public void visit(BLangLockStmt lockStmt) {
result = lockStmt;
}
@Override
public void visit(BLangUnLockStmt unLockStmt) {
result = unLockStmt;
}
private BLangOnFailClause createTrxInternalOnFail(Location pos, BLangSimpleVarRef shouldPanicRef,
BLangSimpleVarRef shouldRetryRef) {
BLangOnFailClause trxOnFailClause = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
trxOnFailClause.pos = pos;
trxOnFailClause.body = ASTBuilderUtil.createBlockStmt(pos);
trxOnFailClause.body.scope = new Scope(env.scope.owner);
trxOnFailClause.isInternal = true;
BVarSymbol trxOnFailErrorSym = new BVarSymbol(0, names.fromString("$trxError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable trxOnFailError = ASTBuilderUtil.createVariable(pos,
"$trxError$", symTable.errorType, null, trxOnFailErrorSym);
trxOnFailClause.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
trxOnFailError);
trxOnFailClause.body.scope.define(trxOnFailErrorSym.name, trxOnFailErrorSym);
transactionDesugar.createRollbackIfFailed(pos, trxOnFailClause.body, trxOnFailErrorSym,
trxBlockId, shouldRetryRef);
BLangGroupExpr shouldNotPanic = new BLangGroupExpr();
shouldNotPanic.setBType(symTable.booleanType);
shouldNotPanic.expression = createNotBinaryExpression(pos, shouldPanicRef);
BLangSimpleVarRef caughtError = ASTBuilderUtil.createVariableRef(pos, trxOnFailErrorSym);
BLangBlockStmt failBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = pos;
panicNode.expr = caughtError;
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, shouldNotPanic, failBlock, panicNode);
trxOnFailClause.body.stmts.add(exitIf);
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = caughtError;
failBlock.stmts.add(failStmt);
trxOnFailClause.bodyContainsFail = true;
return trxOnFailClause;
}
@Override
public void visit(BLangTransaction transactionNode) {
if (transactionNode.onFailClause != null) {
BLangOnFailClause onFailClause = transactionNode.onFailClause;
transactionNode.onFailClause = null;
transactionNode.transactionBody.failureBreakMode = BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE;
BLangDo doStmt = wrapStatementWithinDo(transactionNode.pos, transactionNode, onFailClause);
result = rewrite(doStmt, env);
} else {
BLangLiteral currentTrxBlockId = this.trxBlockId;
String uniqueId = String.valueOf(++transactionBlockCount);
this.trxBlockId = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.stringType, uniqueId);
boolean currShouldReturnErrors = this.shouldReturnErrors;
this.shouldReturnErrors = true;
BLangOnFailClause currOnFailClause = this.onFailClause;
BLangLiteral falseLiteral = ASTBuilderUtil.createLiteral(transactionNode.pos, symTable.booleanType, false);
BVarSymbol shouldPanicVarSymbol = new BVarSymbol(0, names.fromString("$shouldPanic$"),
env.scope.owner.pkgID, symTable.booleanType, this.env.scope.owner, transactionNode.pos, VIRTUAL);
shouldPanicVarSymbol.closure = true;
BLangSimpleVariable shouldPanicVariable = ASTBuilderUtil.createVariable(transactionNode.pos,
"$shouldPanic$", symTable.booleanType, falseLiteral, shouldPanicVarSymbol);
BLangSimpleVariableDef shouldPanicDef = ASTBuilderUtil.createVariableDef(transactionNode.pos,
shouldPanicVariable);
BLangSimpleVarRef shouldPanicRef = ASTBuilderUtil.createVariableRef(transactionNode.pos,
shouldPanicVarSymbol);
BLangOnFailClause trxInternalOnFail = createTrxInternalOnFail(transactionNode.pos, shouldPanicRef,
this.shouldRetryRef);
enclosingShouldPanic.put(trxInternalOnFail, shouldPanicRef);
boolean userDefinedOnFailAvbl = this.onFailClause != null;
analyzeOnFailClause(trxInternalOnFail, transactionNode.transactionBody);
BLangBlockStmt transactionStmtBlock =
transactionDesugar.rewrite(transactionNode, trxBlockId, env, uniqueId);
transactionStmtBlock.stmts.add(0, shouldPanicDef);
transactionStmtBlock.scope.define(shouldPanicVarSymbol.name, shouldPanicVarSymbol);
transactionStmtBlock.failureBreakMode = userDefinedOnFailAvbl ?
BLangBlockStmt.FailureBreakMode.NOT_BREAKABLE :
BLangBlockStmt.FailureBreakMode.BREAK_TO_OUTER_BLOCK;
result = rewrite(transactionStmtBlock, this.env);
this.shouldReturnErrors = currShouldReturnErrors;
this.trxBlockId = currentTrxBlockId;
swapAndResetEnclosingOnFail(currOnFailClause);
}
}
@Override
public void visit(BLangRollback rollbackNode) {
BLangBlockStmt rollbackStmtExpr = transactionDesugar.desugar(rollbackNode, trxBlockId, this.shouldRetryRef);
result = rewrite(rollbackStmtExpr, env);
}
private BLangOnFailClause createRetryInternalOnFail(Location pos,
BLangSimpleVarRef retryResultRef,
BLangSimpleVarRef retryManagerRef,
BLangSimpleVarRef shouldRetryRef,
BLangSimpleVarRef continueLoopRef,
BLangSimpleVarRef returnResult) {
BLangOnFailClause internalOnFail = (BLangOnFailClause) TreeBuilder.createOnFailClauseNode();
internalOnFail.pos = pos;
internalOnFail.body = ASTBuilderUtil.createBlockStmt(pos);
internalOnFail.body.scope = new Scope(env.scope.owner);
BVarSymbol caughtErrorSym = new BVarSymbol(0, names.fromString("$caughtError$"),
env.scope.owner.pkgID, symTable.errorType, env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable caughtError = ASTBuilderUtil.createVariable(pos,
"$caughtError$", symTable.errorType, null, caughtErrorSym);
internalOnFail.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos,
caughtError);
env.scope.define(caughtErrorSym.name, caughtErrorSym);
BLangSimpleVarRef caughtErrorRef = ASTBuilderUtil.createVariableRef(pos, caughtErrorSym);
BLangAssignment errorAssignment = ASTBuilderUtil.createAssignmentStmt(pos, retryResultRef, caughtErrorRef);
internalOnFail.body.stmts.add(errorAssignment);
BLangAssignment continueLoopTrue = ASTBuilderUtil.createAssignmentStmt(pos, continueLoopRef,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
internalOnFail.body.stmts.add(continueLoopTrue);
BLangInvocation shouldRetryInvocation = createRetryManagerShouldRetryInvocation(pos,
retryManagerRef, caughtErrorRef);
BLangAssignment shouldRetryAssignment = ASTBuilderUtil.createAssignmentStmt(pos, shouldRetryRef,
shouldRetryInvocation);
internalOnFail.body.stmts.add(shouldRetryAssignment);
BLangGroupExpr shouldNotRetryCheck = new BLangGroupExpr();
shouldNotRetryCheck.setBType(symTable.booleanType);
shouldNotRetryCheck.expression = createNotBinaryExpression(pos, shouldRetryRef);
BLangGroupExpr exitCheck = new BLangGroupExpr();
exitCheck.setBType(symTable.booleanType);
exitCheck.expression = shouldNotRetryCheck;
BLangBlockStmt exitLogicBlock = ASTBuilderUtil.createBlockStmt(pos);
BLangIf exitIf = ASTBuilderUtil.createIfElseStmt(pos, exitCheck, exitLogicBlock, null);
if (this.onFailClause != null) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = pos;
failStmt.expr = retryResultRef;
exitLogicBlock.stmts.add(failStmt);
internalOnFail.bodyContainsFail = true;
internalOnFail.body.stmts.add(exitIf);
BLangContinue loopContinueStmt = (BLangContinue) TreeBuilder.createContinueNode();
loopContinueStmt.pos = pos;
internalOnFail.body.stmts.add(loopContinueStmt);
} else {
BLangAssignment returnErrorTrue = ASTBuilderUtil.createAssignmentStmt(pos, returnResult,
ASTBuilderUtil.createLiteral(pos, symTable.booleanType, true));
exitLogicBlock.stmts.add(returnErrorTrue);
internalOnFail.body.stmts.add(exitIf);
}
return internalOnFail;
}
BLangUnaryExpr createNotBinaryExpression(Location pos, BLangExpression expression) {
List<BType> paramTypes = new ArrayList<>();
paramTypes.add(symTable.booleanType);
BInvokableType type = new BInvokableType(paramTypes, symTable.booleanType,
null);
BOperatorSymbol notOperatorSymbol = new BOperatorSymbol(
names.fromString(OperatorKind.NOT.value()), symTable.rootPkgSymbol.pkgID, type, symTable.rootPkgSymbol,
symTable.builtinPos, VIRTUAL);
return ASTBuilderUtil.createUnaryExpr(pos, expression, symTable.booleanType,
OperatorKind.NOT, notOperatorSymbol);
}
BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, BLangFunctionBody lambdaBody) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.requiredParams.addAll(lambdaFunctionVariable);
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
lambdaFunctionVariable = func.requiredParams;
func.body = lambdaBody;
func.desugared = false;
lambdaFunction.pos = pos;
List<BType> paramTypes = new ArrayList<>();
lambdaFunctionVariable.forEach(variable -> paramTypes.add(variable.symbol.type));
lambdaFunction.setBType(new BInvokableType(paramTypes, func.symbol.type.getReturnType(),
null));
return lambdaFunction;
}
protected BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
List<BLangSimpleVariable> lambdaFunctionVariable,
TypeNode returnType, List<BLangStatement> fnBodyStmts,
SymbolEnv env, Scope bodyScope) {
BLangBlockFunctionBody body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
body.scope = bodyScope;
SymbolEnv bodyEnv = SymbolEnv.createFuncBodyEnv(body, env);
this.forceCastReturnType = ((BLangType) returnType).getBType();
body.stmts = rewriteStmt(fnBodyStmts, bodyEnv);
this.forceCastReturnType = null;
return createLambdaFunction(pos, functionNamePrefix, lambdaFunctionVariable, returnType, body);
}
private BLangLambdaFunction createLambdaFunction(Location pos, String functionNamePrefix,
TypeNode returnType) {
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
BLangFunction func =
ASTBuilderUtil.createFunction(pos, functionNamePrefix + UNDERSCORE + lambdaFunctionCount++);
lambdaFunction.function = func;
func.setReturnTypeNode(returnType);
func.desugaredReturnType = true;
defineFunction(func, env.enclPkg);
func.desugared = false;
lambdaFunction.pos = pos;
return lambdaFunction;
}
private void defineFunction(BLangFunction funcNode, BLangPackage targetPkg) {
final BPackageSymbol packageSymbol = targetPkg.symbol;
final SymbolEnv packageEnv = this.symTable.pkgEnvMap.get(packageSymbol);
symbolEnter.defineNode(funcNode, packageEnv);
packageEnv.enclPkg.functions.add(funcNode);
packageEnv.enclPkg.topLevelNodes.add(funcNode);
}
@Override
public void visit(BLangForkJoin forkJoin) {
result = forkJoin;
}
@Override
public void visit(BLangLiteral literalExpr) {
if (literalExpr.getBType().tag == TypeTags.ARRAY
&& ((BArrayType) literalExpr.getBType()).eType.tag == TypeTags.BYTE) {
result = rewriteBlobLiteral(literalExpr);
return;
}
result = literalExpr;
}
private BLangNode rewriteBlobLiteral(BLangLiteral literalExpr) {
String[] result = getBlobTextValue((String) literalExpr.value);
byte[] values;
if (BASE_64.equals(result[0])) {
values = Base64.getDecoder().decode(result[1].getBytes(StandardCharsets.UTF_8));
} else {
values = hexStringToByteArray(result[1]);
}
BLangArrayLiteral arrayLiteralNode = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteralNode.setBType(literalExpr.getBType());
arrayLiteralNode.pos = literalExpr.pos;
arrayLiteralNode.exprs = new ArrayList<>();
for (byte b : values) {
arrayLiteralNode.exprs.add(createByteLiteral(literalExpr.pos, b));
}
return arrayLiteralNode;
}
private String[] getBlobTextValue(String blobLiteralNodeText) {
String nodeText = blobLiteralNodeText.replaceAll(" ", "");
String[] result = new String[2];
result[0] = nodeText.substring(0, nodeText.indexOf('`'));
result[1] = nodeText.substring(nodeText.indexOf('`') + 1, nodeText.lastIndexOf('`'));
return result;
}
private static byte[] hexStringToByteArray(String str) {
int len = str.length();
byte[] data = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
data[i / 2] = (byte) ((Character.digit(str.charAt(i), 16) << 4) + Character.digit(str.charAt(i + 1), 16));
}
return data;
}
@Override
public void visit(BLangListConstructorExpr listConstructor) {
listConstructor.exprs = rewriteExprs(listConstructor.exprs);
BLangExpression expr;
if (listConstructor.getBType().tag == TypeTags.TUPLE) {
expr = new BLangTupleLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructor.getBType().tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, new BArrayType(listConstructor.getBType()));
result = rewriteExpr(expr);
} else if (getElementType(listConstructor.getBType()).tag == TypeTags.JSON) {
expr = new BLangJSONArrayLiteral(listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
} else if (listConstructor.getBType().tag == TypeTags.TYPEDESC) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = listConstructor.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
expr = new BLangArrayLiteral(listConstructor.pos, listConstructor.exprs, listConstructor.getBType());
result = rewriteExpr(expr);
}
}
@Override
public void visit(BLangTableConstructorExpr tableConstructorExpr) {
rewriteExprs(tableConstructorExpr.recordLiteralList);
result = tableConstructorExpr;
}
@Override
public void visit(BLangArrayLiteral arrayLiteral) {
arrayLiteral.exprs = rewriteExprs(arrayLiteral.exprs);
if (arrayLiteral.getBType().tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, new BArrayType(arrayLiteral.getBType()));
return;
} else if (getElementType(arrayLiteral.getBType()).tag == TypeTags.JSON) {
result = new BLangJSONArrayLiteral(arrayLiteral.exprs, arrayLiteral.getBType());
return;
}
result = arrayLiteral;
}
@Override
public void visit(BLangTupleLiteral tupleLiteral) {
if (tupleLiteral.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = tupleLiteral.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
return;
}
List<BLangExpression> exprs = tupleLiteral.exprs;
BTupleType tupleType = (BTupleType) tupleLiteral.getBType();
List<BType> tupleMemberTypes = tupleType.tupleTypes;
int tupleMemberTypeSize = tupleMemberTypes.size();
int tupleExprSize = exprs.size();
for (int i = 0; i < tupleExprSize; i++) {
BLangExpression expr = exprs.get(i);
BType expType = expr.impConversionExpr == null ? expr.getBType() : expr.impConversionExpr.getBType();
BType targetType = i < tupleMemberTypeSize ? tupleMemberTypes.get(i) : tupleType.restType;
types.setImplicitCastExpr(expr, expType, targetType);
}
tupleLiteral.exprs = rewriteExprs(tupleLiteral.exprs);
result = tupleLiteral;
}
@Override
public void visit(BLangGroupExpr groupExpr) {
if (groupExpr.isTypedescExpr) {
final BLangTypedescExpr typedescExpr = new BLangTypedescExpr();
typedescExpr.resolvedType = groupExpr.typedescType;
typedescExpr.setBType(symTable.typeDesc);
result = rewriteExpr(typedescExpr);
} else {
result = rewriteExpr(groupExpr.expression);
}
}
@Override
public void visit(BLangRecordLiteral recordLiteral) {
List<RecordLiteralNode.RecordField> fields = recordLiteral.fields;
fields.sort((v1, v2) -> Boolean.compare(isComputedKey(v1), isComputedKey(v2)));
result = rewriteExpr(rewriteMappingConstructor(recordLiteral));
}
@Override
public void visit(BLangSimpleVarRef varRefExpr) {
BLangSimpleVarRef genVarRefExpr = varRefExpr;
if (varRefExpr.pkgSymbol != null && varRefExpr.pkgSymbol.tag == SymTag.XMLNS) {
BLangXMLQName qnameExpr = new BLangXMLQName(varRefExpr.variableName);
qnameExpr.nsSymbol = (BXMLNSSymbol) varRefExpr.pkgSymbol;
qnameExpr.localname = varRefExpr.variableName;
qnameExpr.prefix = varRefExpr.pkgAlias;
qnameExpr.namespaceURI = qnameExpr.nsSymbol.namespaceURI;
qnameExpr.isUsedInXML = false;
qnameExpr.pos = varRefExpr.pos;
qnameExpr.setBType(symTable.stringType);
result = qnameExpr;
return;
}
if (varRefExpr.symbol == null) {
result = varRefExpr;
return;
}
if ((varRefExpr.symbol.tag & SymTag.VARIABLE) == SymTag.VARIABLE) {
BVarSymbol varSymbol = (BVarSymbol) varRefExpr.symbol;
if (varSymbol.originalSymbol != null) {
varRefExpr.symbol = varSymbol.originalSymbol;
}
}
BSymbol ownerSymbol = varRefExpr.symbol.owner;
if ((varRefExpr.symbol.tag & SymTag.FUNCTION) == SymTag.FUNCTION &&
varRefExpr.symbol.type.tag == TypeTags.INVOKABLE) {
genVarRefExpr = new BLangFunctionVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((varRefExpr.symbol.tag & SymTag.TYPE) == SymTag.TYPE &&
!((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT)) {
genVarRefExpr = new BLangTypeLoad(varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.INVOKABLE) == SymTag.INVOKABLE ||
(ownerSymbol.tag & SymTag.LET) == SymTag.LET) {
genVarRefExpr = new BLangLocalVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.STRUCT) == SymTag.STRUCT) {
genVarRefExpr = new BLangFieldVarRef((BVarSymbol) varRefExpr.symbol);
} else if ((ownerSymbol.tag & SymTag.PACKAGE) == SymTag.PACKAGE ||
(ownerSymbol.tag & SymTag.SERVICE) == SymTag.SERVICE) {
if ((varRefExpr.symbol.tag & SymTag.CONSTANT) == SymTag.CONSTANT) {
BConstantSymbol constSymbol = (BConstantSymbol) varRefExpr.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
BLangLiteral literal = ASTBuilderUtil.createLiteral(varRefExpr.pos, constSymbol.literalType,
constSymbol.value.value);
result = rewriteExpr(addConversionExprIfRequired(literal, varRefExpr.getBType()));
return;
}
}
genVarRefExpr = new BLangPackageVarRef((BVarSymbol) varRefExpr.symbol);
if (!enclLocks.isEmpty()) {
BVarSymbol symbol = (BVarSymbol) varRefExpr.symbol;
BLangLockStmt lockStmt = enclLocks.peek();
lockStmt.addLockVariable(symbol);
lockStmt.addLockVariable(this.globalVariablesDependsOn.getOrDefault(symbol, new HashSet<>()));
}
}
genVarRefExpr.setBType(varRefExpr.getBType());
genVarRefExpr.pos = varRefExpr.pos;
if ((varRefExpr.isLValue)
|| genVarRefExpr.symbol.name.equals(IGNORE)) {
genVarRefExpr.isLValue = varRefExpr.isLValue;
genVarRefExpr.setBType(varRefExpr.symbol.type);
result = genVarRefExpr;
return;
}
genVarRefExpr.isLValue = varRefExpr.isLValue;
BType targetType = genVarRefExpr.getBType();
genVarRefExpr.setBType(genVarRefExpr.symbol.type);
BLangExpression expression = addConversionExprIfRequired(genVarRefExpr, targetType);
result = expression.impConversionExpr != null ? expression.impConversionExpr : expression;
}
@Override
public void visit(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixedFieldBasedAccess) {
rewriteFieldBasedAccess(nsPrefixedFieldBasedAccess);
}
private void rewriteFieldBasedAccess(BLangFieldBasedAccess fieldAccessExpr) {
if (safeNavigate(fieldAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(fieldAccessExpr));
return;
}
BLangAccessExpression targetVarRef = fieldAccessExpr;
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(fieldAccessExpr.expr.getBType());
fieldAccessExpr.expr = rewriteExpr(fieldAccessExpr.expr);
if (!types.isSameType(fieldAccessExpr.expr.getBType(), varRefType)) {
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, varRefType);
}
BLangLiteral stringLit = createStringLiteral(fieldAccessExpr.field.pos,
StringEscapeUtils.unescapeJava(fieldAccessExpr.field.value));
int varRefTypeTag = varRefType.tag;
if (varRefTypeTag == TypeTags.OBJECT ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.OBJECT)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE &&
((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
result = rewriteObjectMemberAccessAsField(fieldAccessExpr);
return;
} else {
boolean isStoreOnCreation = fieldAccessExpr.isStoreOnCreation;
if (!isStoreOnCreation && varRefTypeTag == TypeTags.OBJECT && env.enclInvokable != null) {
BInvokableSymbol originalFuncSymbol = ((BLangFunction) env.enclInvokable).originalFuncSymbol;
BObjectTypeSymbol objectTypeSymbol = (BObjectTypeSymbol) varRefType.tsymbol;
BAttachedFunction initializerFunc = objectTypeSymbol.initializerFunc;
BAttachedFunction generatedInitializerFunc = objectTypeSymbol.generatedInitializerFunc;
if ((generatedInitializerFunc != null && originalFuncSymbol == generatedInitializerFunc.symbol) ||
(initializerFunc != null && originalFuncSymbol == initializerFunc.symbol)) {
isStoreOnCreation = true;
}
}
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false,
isStoreOnCreation);
}
} else if (varRefTypeTag == TypeTags.RECORD ||
(varRefTypeTag == TypeTags.UNION &&
((BUnionType) varRefType).getMemberTypes().iterator().next().tag == TypeTags.RECORD)) {
if (fieldAccessExpr.symbol != null && fieldAccessExpr.symbol.type.tag == TypeTags.INVOKABLE
&& ((fieldAccessExpr.symbol.flags & Flags.ATTACHED) == Flags.ATTACHED)) {
targetVarRef = new BLangStructFunctionVarRef(fieldAccessExpr.expr, (BVarSymbol) fieldAccessExpr.symbol);
} else {
targetVarRef = new BLangStructFieldAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
(BVarSymbol) fieldAccessExpr.symbol, false, fieldAccessExpr.isStoreOnCreation);
}
} else if (types.isLax(varRefType)) {
if (!(varRefType.tag == TypeTags.XML || varRefType.tag == TypeTags.XML_ELEMENT)) {
if (varRefType.tag == TypeTags.MAP && TypeTags.isXMLTypeTag(((BMapType) varRefType).constraint.tag)) {
result = rewriteExpr(rewriteLaxMapAccess(fieldAccessExpr));
return;
}
fieldAccessExpr.expr = addConversionExprIfRequired(fieldAccessExpr.expr, symTable.jsonType);
targetVarRef = new BLangJSONAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit);
} else {
BLangInvocation xmlAccessInvocation = rewriteXMLAttributeOrElemNameAccess(fieldAccessExpr);
xmlAccessInvocation.setBType(fieldAccessExpr.getBType());
result = xmlAccessInvocation;
return;
}
} else if (varRefTypeTag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.isStoreOnCreation);
} else if (TypeTags.isXMLTypeTag(varRefTypeTag)) {
targetVarRef = new BLangXMLAccessExpr(fieldAccessExpr.pos, fieldAccessExpr.expr, stringLit,
fieldAccessExpr.fieldKind);
}
targetVarRef.isLValue = fieldAccessExpr.isLValue;
targetVarRef.setBType(fieldAccessExpr.getBType());
targetVarRef.optionalFieldAccess = fieldAccessExpr.optionalFieldAccess;
result = targetVarRef;
}
@Override
public void visit(BLangFieldBasedAccess fieldAccessExpr) {
rewriteFieldBasedAccess(fieldAccessExpr);
}
private BLangNode rewriteObjectMemberAccessAsField(BLangFieldBasedAccess fieldAccessExpr) {
Location pos = fieldAccessExpr.pos;
BInvokableSymbol originalMemberFuncSymbol = (BInvokableSymbol) fieldAccessExpr.symbol;
BLangFunction func = (BLangFunction) TreeBuilder.createFunctionNode();
String funcName = "$annon$method$delegate$" + lambdaFunctionCount++;
BInvokableSymbol funcSymbol = new BInvokableSymbol(SymTag.INVOKABLE, (Flags.ANONYMOUS | Flags.LAMBDA),
names.fromString(funcName),
env.enclPkg.packageID, originalMemberFuncSymbol.type, env.scope.owner, pos, VIRTUAL);
funcSymbol.retType = originalMemberFuncSymbol.retType;
funcSymbol.bodyExist = true;
funcSymbol.params = new ArrayList<>();
funcSymbol.scope = new Scope(funcSymbol);
func.pos = pos;
func.name = createIdentifier(pos, funcName);
func.flagSet.add(Flag.LAMBDA);
func.flagSet.add(Flag.ANONYMOUS);
func.body = (BLangBlockFunctionBody) TreeBuilder.createBlockFunctionBodyNode();
func.symbol = funcSymbol;
func.setBType(funcSymbol.type);
func.closureVarSymbols = new LinkedHashSet<>();
BLangExpression receiver = fieldAccessExpr.expr;
BLangSimpleVariableDef intermediateObjDef = null;
if (receiver.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BSymbol receiverSymbol = ((BLangVariableReference) receiver).symbol;
receiverSymbol.closure = true;
func.closureVarSymbols.add(new ClosureVarSymbol(receiverSymbol, pos));
} else {
BLangSimpleVariableDef varDef = createVarDef("$$temp$obj$" + annonVarCount++, receiver.getBType(),
receiver, pos);
intermediateObjDef = varDef;
varDef.var.symbol.closure = true;
env.scope.define(varDef.var.symbol.name, varDef.var.symbol);
BLangSimpleVarRef variableRef = createVariableRef(pos, varDef.var.symbol);
func.closureVarSymbols.add(new ClosureVarSymbol(varDef.var.symbol, pos));
receiver = variableRef;
}
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
for (BVarSymbol param : originalMemberFuncSymbol.params) {
BLangSimpleVariable fParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
fParam.symbol = new BVarSymbol(0, param.name, env.enclPkg.packageID, param.type, funcSymbol, pos,
VIRTUAL);
fParam.pos = pos;
fParam.name = createIdentifier(pos, param.name.value);
fParam.setBType(param.type);
func.requiredParams.add(fParam);
funcSymbol.params.add(fParam.symbol);
funcSymbol.scope.define(fParam.symbol.name, fParam.symbol);
BLangSimpleVarRef paramRef = createVariableRef(pos, fParam.symbol);
requiredArgs.add(paramRef);
}
ArrayList<BLangExpression> restArgs = new ArrayList<>();
if (originalMemberFuncSymbol.restParam != null) {
BLangSimpleVariable restParam = (BLangSimpleVariable) TreeBuilder.createSimpleVariableNode();
func.restParam = restParam;
BVarSymbol restSym = originalMemberFuncSymbol.restParam;
restParam.name = ASTBuilderUtil.createIdentifier(pos, restSym.name.value);
restParam.symbol = new BVarSymbol(0, restSym.name, env.enclPkg.packageID, restSym.type, funcSymbol, pos,
VIRTUAL);
restParam.pos = pos;
restParam.setBType(restSym.type);
funcSymbol.restParam = restParam.symbol;
funcSymbol.scope.define(restParam.symbol.name, restParam.symbol);
BLangSimpleVarRef restArg = createVariableRef(pos, restParam.symbol);
BLangRestArgsExpression restArgExpr = new BLangRestArgsExpression();
restArgExpr.expr = restArg;
restArgExpr.pos = pos;
restArgExpr.setBType(restSym.type);
restArgExpr.expectedType = restArgExpr.getBType();
restArgs.add(restArgExpr);
}
BLangIdentifier field = fieldAccessExpr.field;
BLangReturn retStmt = (BLangReturn) TreeBuilder.createReturnNode();
retStmt.expr = createObjectMethodInvocation(
receiver, field, fieldAccessExpr.symbol, requiredArgs, restArgs);
((BLangBlockFunctionBody) func.body).addStatement(retStmt);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.function = func;
lambdaFunction.capturedClosureEnv = env.createClone();
env.enclPkg.functions.add(func);
env.enclPkg.topLevelNodes.add(func);
lambdaFunction.parent = env.enclInvokable;
lambdaFunction.setBType(func.getBType());
if (intermediateObjDef == null) {
return rewrite(lambdaFunction, env);
} else {
BLangStatementExpression expr = createStatementExpression(intermediateObjDef, rewrite(lambdaFunction, env));
expr.setBType(lambdaFunction.getBType());
return rewrite(expr, env);
}
}
private BLangInvocation createObjectMethodInvocation(BLangExpression receiver, BLangIdentifier field,
BSymbol invocableSymbol,
List<BLangExpression> requiredArgs,
List<BLangExpression> restArgs) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.name = field;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = receiver;
invocationNode.symbol = invocableSymbol;
invocationNode.setBType(((BInvokableType) invocableSymbol.type).retType);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = restArgs;
return invocationNode;
}
private BLangStatementExpression rewriteLaxMapAccess(BLangFieldBasedAccess fieldAccessExpr) {
BLangStatementExpression statementExpression = new BLangStatementExpression();
BLangBlockStmt block = new BLangBlockStmt();
statementExpression.stmt = block;
BUnionType fieldAccessType = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.errorType);
Location pos = fieldAccessExpr.pos;
BLangSimpleVariableDef result = createVarDef("$mapAccessResult$", fieldAccessType, null, pos);
block.addStatement(result);
BLangSimpleVarRef resultRef = ASTBuilderUtil.createVariableRef(pos, result.var.symbol);
resultRef.setBType(fieldAccessType);
statementExpression.setBType(fieldAccessType);
BLangLiteral mapIndex = ASTBuilderUtil.createLiteral(
fieldAccessExpr.field.pos, symTable.stringType, fieldAccessExpr.field.value);
BLangMapAccessExpr mapAccessExpr = new BLangMapAccessExpr(pos, fieldAccessExpr.expr, mapIndex);
BUnionType xmlOrNil = BUnionType.create(null, fieldAccessExpr.getBType(), symTable.nilType);
mapAccessExpr.setBType(xmlOrNil);
BLangSimpleVariableDef mapResult = createVarDef("$mapAccess", xmlOrNil, mapAccessExpr, pos);
BLangSimpleVarRef mapResultRef = ASTBuilderUtil.createVariableRef(pos, mapResult.var.symbol);
block.addStatement(mapResult);
BLangIf ifStmt = ASTBuilderUtil.createIfStmt(pos, block);
BLangIsLikeExpr isLikeNilExpr = createIsLikeExpression(pos, mapResultRef, symTable.nilType);
ifStmt.expr = isLikeNilExpr;
BLangBlockStmt resultNilBody = new BLangBlockStmt();
ifStmt.body = resultNilBody;
BLangBlockStmt resultHasValueBody = new BLangBlockStmt();
ifStmt.elseStmt = resultHasValueBody;
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
List<BLangExpression> positionalArgs = new ArrayList<>();
List<BLangNamedArgsExpression> namedArgs = new ArrayList<>();
positionalArgs.add(createStringLiteral(pos, "{" + RuntimeConstants.MAP_LANG_LIB + "}InvalidKey"));
BLangNamedArgsExpression message = new BLangNamedArgsExpression();
message.name = ASTBuilderUtil.createIdentifier(pos, "key");
message.expr = createStringLiteral(pos, fieldAccessExpr.field.value);
namedArgs.add(message);
errorConstructorExpr.positionalArgs = positionalArgs;
errorConstructorExpr.namedArgs = namedArgs;
BLangSimpleVariableDef errorDef =
createVarDef("$_invalid_key_error", symTable.errorType, errorConstructorExpr, pos);
resultNilBody.addStatement(errorDef);
BLangSimpleVarRef errorRef = ASTBuilderUtil.createVariableRef(pos, errorDef.var.symbol);
BLangAssignment errorVarAssignment = ASTBuilderUtil.createAssignmentStmt(pos, resultNilBody);
errorVarAssignment.varRef = resultRef;
errorVarAssignment.expr = errorRef;
BLangAssignment mapResultAssignment = ASTBuilderUtil.createAssignmentStmt(
pos, resultHasValueBody);
mapResultAssignment.varRef = resultRef;
mapResultAssignment.expr = mapResultRef;
statementExpression.expr = resultRef;
return statementExpression;
}
private BLangInvocation rewriteXMLAttributeOrElemNameAccess(BLangFieldBasedAccess fieldAccessExpr) {
ArrayList<BLangExpression> args = new ArrayList<>();
String fieldName = fieldAccessExpr.field.value;
if (fieldAccessExpr.fieldKind == FieldKind.WITH_NS) {
BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess nsPrefixAccess =
(BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) fieldAccessExpr;
fieldName = createExpandedQName(nsPrefixAccess.nsSymbol.namespaceURI, fieldName);
}
if (fieldName.equals("_")) {
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ELEMENT_NAME_NIL_LIFTING,
fieldAccessExpr.expr, new ArrayList<>(), new ArrayList<>());
}
BLangLiteral attributeNameLiteral = createStringLiteral(fieldAccessExpr.field.pos, fieldName);
args.add(attributeNameLiteral);
args.add(isOptionalAccessToLiteral(fieldAccessExpr));
return createLanglibXMLInvocation(fieldAccessExpr.pos, XML_INTERNAL_GET_ATTRIBUTE, fieldAccessExpr.expr, args,
new ArrayList<>());
}
private BLangExpression isOptionalAccessToLiteral(BLangFieldBasedAccess fieldAccessExpr) {
return rewrite(
createLiteral(fieldAccessExpr.pos, symTable.booleanType, fieldAccessExpr.isOptionalFieldAccess()), env);
}
private String createExpandedQName(String nsURI, String localName) {
return "{" + nsURI + "}" + localName;
}
@Override
public void visit(BLangIndexBasedAccess indexAccessExpr) {
if (safeNavigate(indexAccessExpr)) {
result = rewriteExpr(rewriteSafeNavigationExpr(indexAccessExpr));
return;
}
BLangIndexBasedAccess targetVarRef = indexAccessExpr;
indexAccessExpr.indexExpr = rewriteExpr(indexAccessExpr.indexExpr);
BType varRefType = types.getTypeWithEffectiveIntersectionTypes(indexAccessExpr.expr.getBType());
indexAccessExpr.expr = rewriteExpr(indexAccessExpr.expr);
if (!types.isSameType(indexAccessExpr.expr.getBType(), varRefType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, varRefType);
}
if (varRefType.tag == TypeTags.MAP) {
targetVarRef = new BLangMapAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr, indexAccessExpr.isStoreOnCreation);
} else if (types.isSubTypeOfMapping(types.getSafeType(varRefType, true, false))) {
targetVarRef = new BLangStructFieldAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr,
(BVarSymbol) indexAccessExpr.symbol, false);
} else if (types.isSubTypeOfList(varRefType)) {
targetVarRef = new BLangArrayAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (TypeTags.isXMLTypeTag(varRefType.tag)) {
targetVarRef = new BLangXMLAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (types.isAssignable(varRefType, symTable.stringType)) {
indexAccessExpr.expr = addConversionExprIfRequired(indexAccessExpr.expr, symTable.stringType);
targetVarRef = new BLangStringAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
} else if (varRefType.tag == TypeTags.TABLE) {
if (targetVarRef.indexExpr.getKind() == NodeKind.TABLE_MULTI_KEY) {
BLangTupleLiteral listConstructorExpr = new BLangTupleLiteral();
listConstructorExpr.exprs = ((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs;
List<BType> memberTypes = new ArrayList<>();
((BLangTableMultiKeyExpr) indexAccessExpr.indexExpr).multiKeyIndexExprs.
forEach(expression -> memberTypes.add(expression.getBType()));
listConstructorExpr.setBType(new BTupleType(memberTypes));
indexAccessExpr.indexExpr = listConstructorExpr;
}
targetVarRef = new BLangTableAccessExpr(indexAccessExpr.pos, indexAccessExpr.expr,
indexAccessExpr.indexExpr);
}
targetVarRef.isLValue = indexAccessExpr.isLValue;
targetVarRef.setBType(indexAccessExpr.getBType());
result = targetVarRef;
}
@Override
public void visit(BLangTableMultiKeyExpr tableMultiKeyExpr) {
rewriteExprs(tableMultiKeyExpr.multiKeyIndexExprs);
result = tableMultiKeyExpr;
}
@Override
public void visit(BLangInvocation iExpr) {
rewriteInvocation(iExpr, false);
}
@Override
public void visit(BLangErrorConstructorExpr errorConstructorExpr) {
if (errorConstructorExpr.positionalArgs.size() == 1) {
errorConstructorExpr.positionalArgs.add(createNilLiteral());
}
errorConstructorExpr.positionalArgs.set(1,
addConversionExprIfRequired(errorConstructorExpr.positionalArgs.get(1), symTable.errorType));
rewriteExprs(errorConstructorExpr.positionalArgs);
BLangExpression errorDetail;
BLangRecordLiteral recordLiteral = ASTBuilderUtil.createEmptyRecordLiteral(errorConstructorExpr.pos,
((BErrorType) errorConstructorExpr.getBType()).detailType);
if (errorConstructorExpr.namedArgs.isEmpty()) {
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral), recordLiteral.getBType());
} else {
for (BLangNamedArgsExpression namedArg : errorConstructorExpr.namedArgs) {
BLangRecordLiteral.BLangRecordKeyValueField member = new BLangRecordLiteral.BLangRecordKeyValueField();
member.key = new BLangRecordLiteral.BLangRecordKey(ASTBuilderUtil.createLiteral(namedArg.name.pos,
symTable.stringType, namedArg.name.value));
if (recordLiteral.getBType().tag == TypeTags.RECORD) {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, symTable.anyType);
} else {
member.valueExpr = addConversionExprIfRequired(namedArg.expr, namedArg.expr.getBType());
}
recordLiteral.fields.add(member);
}
errorDetail = visitCloneReadonly(rewriteExpr(recordLiteral),
((BErrorType) errorConstructorExpr.getBType()).detailType);
}
errorConstructorExpr.errorDetail = errorDetail;
result = errorConstructorExpr;
}
@Override
public void visit(BLangInvocation.BLangActionInvocation actionInvocation) {
if (!actionInvocation.async && actionInvocation.invokedInsideTransaction) {
transactionDesugar.startTransactionCoordinatorOnce(env, actionInvocation.pos);
}
rewriteInvocation(actionInvocation, actionInvocation.async);
}
private void rewriteInvocation(BLangInvocation invocation, boolean async) {
BLangInvocation invRef = invocation;
if (!enclLocks.isEmpty()) {
BLangLockStmt lock = enclLocks.peek();
lock.lockVariables.addAll(((BInvokableSymbol) invocation.symbol).dependentGlobalVars);
}
reorderArguments(invocation);
invocation.requiredArgs = rewriteExprs(invocation.requiredArgs);
fixStreamTypeCastsInInvocationParams(invocation);
fixNonRestArgTypeCastInTypeParamInvocation(invocation);
invocation.restArgs = rewriteExprs(invocation.restArgs);
annotationDesugar.defineStatementAnnotations(invocation.annAttachments, invocation.pos,
invocation.symbol.pkgID, invocation.symbol.owner, env);
if (invocation.functionPointerInvocation) {
visitFunctionPointerInvocation(invocation);
return;
}
invocation.expr = rewriteExpr(invocation.expr);
result = invRef;
BInvokableSymbol invSym = (BInvokableSymbol) invocation.symbol;
if (Symbols.isFlagOn(invSym.retType.flags, Flags.PARAMETERIZED)) {
BType retType = unifier.build(invSym.retType);
invocation.setBType(invocation.async ? new BFutureType(TypeTags.FUTURE, retType, null) : retType);
}
if (invocation.expr == null) {
fixTypeCastInTypeParamInvocation(invocation, invRef);
if (invocation.exprSymbol == null) {
return;
}
invocation.expr = ASTBuilderUtil.createVariableRef(invocation.pos, invocation.exprSymbol);
invocation.expr = rewriteExpr(invocation.expr);
}
switch (invocation.expr.getBType().tag) {
case TypeTags.OBJECT:
case TypeTags.RECORD:
if (!invocation.langLibInvocation) {
List<BLangExpression> argExprs = new ArrayList<>(invocation.requiredArgs);
argExprs.add(0, invocation.expr);
BLangAttachedFunctionInvocation attachedFunctionInvocation =
new BLangAttachedFunctionInvocation(invocation.pos, argExprs, invocation.restArgs,
invocation.symbol, invocation.getBType(),
invocation.expr, async);
attachedFunctionInvocation.name = invocation.name;
attachedFunctionInvocation.annAttachments = invocation.annAttachments;
result = invRef = attachedFunctionInvocation;
}
break;
}
fixTypeCastInTypeParamInvocation(invocation, invRef);
}
private void fixNonRestArgTypeCastInTypeParamInvocation(BLangInvocation iExpr) {
if (!iExpr.langLibInvocation) {
return;
}
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
for (int i = 0; i < requiredArgs.size(); i++) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), params.get(i).type));
}
}
/* This function is a workaround and need improvement
* Notes for improvement :
* 1. Both arguments are same.
* 2. Due to current type param logic we put type param flag on the original type.
* 3. Error type having Cloneable type with type param flag, change expression type by this code.
* 4. using error type is a problem as Cloneable type is an typeparm eg: ExprBodiedFunctionTest
* added never to CloneableType type param
* @typeParam type
* CloneableType Cloneable|never;
*
*/
private void fixTypeCastInTypeParamInvocation(BLangInvocation iExpr, BLangInvocation genIExpr) {
var returnTypeOfInvokable = ((BInvokableSymbol) iExpr.symbol).retType;
if (!iExpr.langLibInvocation && !TypeParamAnalyzer.containsTypeParam(returnTypeOfInvokable)) {
return;
}
BType originalInvType = genIExpr.getBType();
if (!genIExpr.async) {
genIExpr.setBType(returnTypeOfInvokable);
}
this.result = addConversionExprIfRequired(genIExpr, originalInvType);
}
private void fixStreamTypeCastsInInvocationParams(BLangInvocation iExpr) {
List<BLangExpression> requiredArgs = iExpr.requiredArgs;
List<BVarSymbol> params = ((BInvokableSymbol) iExpr.symbol).params;
if (!params.isEmpty()) {
for (int i = 0; i < requiredArgs.size(); i++) {
BVarSymbol param = params.get(i);
if (param.type.tag == TypeTags.STREAM) {
requiredArgs.set(i, addConversionExprIfRequired(requiredArgs.get(i), param.type));
}
}
}
}
private BLangLiteral createNilLiteral() {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = null;
literal.setBType(symTable.nilType);
return literal;
}
public void visit(BLangTypeInit typeInitExpr) {
if (typeInitExpr.getBType().tag == TypeTags.STREAM) {
result = rewriteExpr(desugarStreamTypeInit(typeInitExpr));
} else {
result = rewrite(desugarObjectTypeInit(typeInitExpr), env);
}
}
private BLangStatementExpression desugarObjectTypeInit(BLangTypeInit typeInitExpr) {
typeInitExpr.desugared = true;
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(typeInitExpr.pos);
BType objType = getObjectType(typeInitExpr.getBType());
BLangSimpleVariableDef objVarDef = createVarDef("$obj$", objType, typeInitExpr, typeInitExpr.pos);
objVarDef.var.name.pos = symTable.builtinPos;
BLangSimpleVarRef objVarRef = ASTBuilderUtil.createVariableRef(typeInitExpr.pos, objVarDef.var.symbol);
blockStmt.addStatement(objVarDef);
typeInitExpr.initInvocation.exprSymbol = objVarDef.var.symbol;
typeInitExpr.initInvocation.symbol = ((BObjectTypeSymbol) objType.tsymbol).generatedInitializerFunc.symbol;
if (typeInitExpr.initInvocation.getBType().tag == TypeTags.NIL) {
BLangExpressionStmt initInvExpr = ASTBuilderUtil.createExpressionStmt(typeInitExpr.pos, blockStmt);
initInvExpr.expr = typeInitExpr.initInvocation;
typeInitExpr.initInvocation.name.value = GENERATED_INIT_SUFFIX.value;
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, objVarRef);
stmtExpr.setBType(objVarRef.symbol.type);
return stmtExpr;
}
BLangSimpleVariableDef initInvRetValVarDef = createVarDef("$temp$", typeInitExpr.initInvocation.getBType(),
typeInitExpr.initInvocation, typeInitExpr.pos);
blockStmt.addStatement(initInvRetValVarDef);
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", typeInitExpr.getBType(), null, typeInitExpr.pos);
blockStmt.addStatement(resultVarDef);
BLangSimpleVarRef initRetValVarRefInCondition =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
BLangTypeTestExpr isErrorTest =
ASTBuilderUtil.createTypeTestExpr(symTable.builtinPos, initRetValVarRefInCondition, getErrorTypeNode());
isErrorTest.setBType(symTable.booleanType);
BLangSimpleVarRef thenInitRetValVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, initInvRetValVarDef.var.symbol);
BLangSimpleVarRef thenResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment errAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, thenResultVarRef, thenInitRetValVarRef);
thenStmt.addStatement(errAssignment);
BLangSimpleVarRef elseResultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangAssignment objAssignment =
ASTBuilderUtil.createAssignmentStmt(symTable.builtinPos, elseResultVarRef, objVarRef);
BLangBlockStmt elseStmt = ASTBuilderUtil.createBlockStmt(symTable.builtinPos);
elseStmt.addStatement(objAssignment);
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(symTable.builtinPos, isErrorTest, thenStmt, elseStmt);
blockStmt.addStatement(ifelse);
BLangSimpleVarRef resultVarRef =
ASTBuilderUtil.createVariableRef(symTable.builtinPos, resultVarDef.var.symbol);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(resultVarRef.symbol.type);
return stmtExpr;
}
private BLangInvocation desugarStreamTypeInit(BLangTypeInit typeInitExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CONSTRUCT_STREAM).symbol;
BType constraintType = ((BStreamType) typeInitExpr.getBType()).constraint;
BType constraintTdType = new BTypedescType(constraintType, symTable.typeDesc.tsymbol);
BLangTypedescExpr constraintTdExpr = new BLangTypedescExpr();
constraintTdExpr.resolvedType = constraintType;
constraintTdExpr.setBType(constraintTdType);
BType completionType = ((BStreamType) typeInitExpr.getBType()).completionType;
BType completionTdType = new BTypedescType(completionType, symTable.typeDesc.tsymbol);
BLangTypedescExpr completionTdExpr = new BLangTypedescExpr();
completionTdExpr.resolvedType = completionType;
completionTdExpr.setBType(completionTdType);
List<BLangExpression> args = new ArrayList<>(Lists.of(constraintTdExpr, completionTdExpr));
if (!typeInitExpr.argsExpr.isEmpty()) {
args.add(typeInitExpr.argsExpr.get(0));
}
BLangInvocation streamConstructInvocation = ASTBuilderUtil.createInvocationExprForMethod(
typeInitExpr.pos, symbol, args, symResolver);
streamConstructInvocation.setBType(new BStreamType(TypeTags.STREAM, constraintType, completionType, null));
return streamConstructInvocation;
}
private BLangSimpleVariableDef createVarDef(String name, BType type, BLangExpression expr,
Location location) {
BSymbol objSym = symResolver.lookupSymbolInMainSpace(env, names.fromString(name));
if (objSym == null || objSym == symTable.notFoundSymbol) {
objSym = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, type,
this.env.scope.owner, location, VIRTUAL);
}
BLangSimpleVariable objVar = ASTBuilderUtil.createVariable(location, name, type, expr, (BVarSymbol) objSym);
BLangSimpleVariableDef objVarDef = ASTBuilderUtil.createVariableDef(location);
objVarDef.var = objVar;
objVarDef.setBType(objVar.getBType());
return objVarDef;
}
private BType getObjectType(BType type) {
if (type.tag == TypeTags.OBJECT) {
return type;
} else if (type.tag == TypeTags.UNION) {
return ((BUnionType) type).getMemberTypes().stream()
.filter(t -> t.tag == TypeTags.OBJECT)
.findFirst()
.orElse(symTable.noType);
}
throw new IllegalStateException("None object type '" + type.toString() + "' found in object init context");
}
BLangErrorType getErrorTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorType);
errorTypeNode.pos = symTable.builtinPos;
return errorTypeNode;
}
BLangErrorType getErrorOrNillTypeNode() {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(symTable.errorOrNilType);
return errorTypeNode;
}
@Override
public void visit(BLangTernaryExpr ternaryExpr) {
/*
* First desugar to if-else:
*
* T $result$;
* if () {
* $result$ = thenExpr;
* } else {
* $result$ = elseExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef =
createVarDef("$ternary_result$", ternaryExpr.getBType(), null, ternaryExpr.pos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, thenResultVarRef, ternaryExpr.thenExpr);
thenBody.addStatement(thenAssignment);
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(ternaryExpr.pos, elseResultVarRef, ternaryExpr.elseExpr);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(ternaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(ternaryExpr.pos, ternaryExpr.expr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(ternaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(ternaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangWaitExpr waitExpr) {
if (waitExpr.getExpression().getKind() == NodeKind.BINARY_EXPR) {
waitExpr.exprList = collectAllBinaryExprs((BLangBinaryExpr) waitExpr.getExpression(), new ArrayList<>());
} else {
waitExpr.exprList = Collections.singletonList(rewriteExpr(waitExpr.getExpression()));
}
result = waitExpr;
}
private List<BLangExpression> collectAllBinaryExprs(BLangBinaryExpr binaryExpr, List<BLangExpression> exprs) {
visitBinaryExprOfWait(binaryExpr.lhsExpr, exprs);
visitBinaryExprOfWait(binaryExpr.rhsExpr, exprs);
return exprs;
}
private void visitBinaryExprOfWait(BLangExpression expr, List<BLangExpression> exprs) {
if (expr.getKind() == NodeKind.BINARY_EXPR) {
collectAllBinaryExprs((BLangBinaryExpr) expr, exprs);
} else {
expr = rewriteExpr(expr);
exprs.add(expr);
}
}
@Override
public void visit(BLangWaitForAllExpr waitExpr) {
waitExpr.keyValuePairs.forEach(keyValue -> {
if (keyValue.valueExpr != null) {
keyValue.valueExpr = rewriteExpr(keyValue.valueExpr);
} else {
keyValue.keyExpr = rewriteExpr(keyValue.keyExpr);
}
});
BLangExpression expr = new BLangWaitForAllExpr.BLangWaitLiteral(waitExpr.keyValuePairs, waitExpr.getBType());
expr.pos = waitExpr.pos;
result = rewriteExpr(expr);
}
@Override
public void visit(BLangTrapExpr trapExpr) {
trapExpr.expr = rewriteExpr(trapExpr.expr);
if (trapExpr.expr.getBType().tag != TypeTags.NIL) {
trapExpr.expr = addConversionExprIfRequired(trapExpr.expr, trapExpr.getBType());
}
result = trapExpr;
}
@Override
public void visit(BLangBinaryExpr binaryExpr) {
if (isNullableBinaryExpr(binaryExpr)) {
BLangStatementExpression stmtExpr = createStmtExprForNullableBinaryExpr(binaryExpr);
result = rewrite(stmtExpr, env);
return;
}
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE || binaryExpr.opKind == OperatorKind.CLOSED_RANGE) {
if (binaryExpr.opKind == OperatorKind.HALF_OPEN_RANGE) {
binaryExpr.rhsExpr = getModifiedIntRangeEndExpr(binaryExpr.rhsExpr);
}
result = rewriteExpr(replaceWithIntRange(binaryExpr.pos, binaryExpr.lhsExpr, binaryExpr.rhsExpr));
return;
}
if (binaryExpr.opKind == OperatorKind.AND || binaryExpr.opKind == OperatorKind.OR) {
visitBinaryLogicalExpr(binaryExpr);
return;
}
OperatorKind binaryOpKind = binaryExpr.opKind;
if (binaryOpKind == OperatorKind.ADD || binaryOpKind == OperatorKind.SUB ||
binaryOpKind == OperatorKind.MUL || binaryOpKind == OperatorKind.DIV ||
binaryOpKind == OperatorKind.MOD || binaryOpKind == OperatorKind.BITWISE_AND ||
binaryOpKind == OperatorKind.BITWISE_OR || binaryOpKind == OperatorKind.BITWISE_XOR) {
checkByteTypeIncompatibleOperations(binaryExpr);
}
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
binaryExpr.rhsExpr = rewriteExpr(binaryExpr.rhsExpr);
result = binaryExpr;
int rhsExprTypeTag = binaryExpr.rhsExpr.getBType().tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.getBType().tag;
if (rhsExprTypeTag != lhsExprTypeTag && (binaryExpr.opKind == OperatorKind.EQUAL ||
binaryExpr.opKind == OperatorKind.NOT_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_EQUAL ||
binaryExpr.opKind == OperatorKind.REF_NOT_EQUAL)) {
if (TypeTags.isIntegerTypeTag(lhsExprTypeTag) && rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
}
boolean isBinaryShiftOperator = symResolver.isBinaryShiftOperator(binaryOpKind);
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryOpKind);
if (lhsExprTypeTag == rhsExprTypeTag) {
if (!isBinaryShiftOperator && !isArithmeticOperator) {
return;
}
if (types.isValueType(binaryExpr.lhsExpr.getBType())) {
return;
}
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(lhsExprTypeTag) &&
(rhsExprTypeTag == TypeTags.XML || rhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.lhsExpr.pos, symTable.xmlType);
return;
}
if (binaryExpr.opKind == OperatorKind.ADD && TypeTags.isStringTypeTag(rhsExprTypeTag) &&
(lhsExprTypeTag == TypeTags.XML || lhsExprTypeTag == TypeTags.XML_TEXT)) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
if (lhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.DECIMAL) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (lhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.lhsExpr.getBType());
return;
}
if (rhsExprTypeTag == TypeTags.FLOAT) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.rhsExpr.getBType());
return;
}
if (isArithmeticOperator) {
createTypeCastExprForArithmeticExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (isBinaryShiftOperator) {
createTypeCastExprForBinaryShiftExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
return;
}
if (symResolver.isBinaryComparisonOperator(binaryOpKind)) {
createTypeCastExprForRelationalExpr(binaryExpr, lhsExprTypeTag, rhsExprTypeTag);
}
}
private BLangStatementExpression createStmtExprForNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
/*
* int? x = 3;
* int? y = 5;
* int? z = x + y;
* Above is desugared to
* int? $result$;
* if (x is () or y is ()) {
* $result$ = ();
* } else {
* $result$ = x + y;
* }
* int z = $result$;
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BUnionType exprBType = (BUnionType) binaryExpr.getBType();
BType nonNilType = exprBType.getMemberTypes().iterator().next();
boolean isArithmeticOperator = symResolver.isArithmeticOperator(binaryExpr.opKind);
boolean isShiftOperator = symResolver.isBinaryShiftOperator(binaryExpr.opKind);
boolean isBitWiseOperator = !isArithmeticOperator && !isShiftOperator;
BType rhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.rhsExpr.getBType().isNullable()) {
rhsType = types.getSafeType(binaryExpr.rhsExpr.getBType(), true, false);
} else {
rhsType = binaryExpr.rhsExpr.getBType();
}
}
BType lhsType = nonNilType;
if (isBitWiseOperator) {
if (binaryExpr.lhsExpr.getBType().isNullable()) {
lhsType = types.getSafeType(binaryExpr.lhsExpr.getBType(), true, false);
} else {
lhsType = binaryExpr.lhsExpr.getBType();
}
}
if (binaryExpr.lhsExpr.getBType().isNullable()) {
binaryExpr.lhsExpr = rewriteExpr(binaryExpr.lhsExpr);
}
BLangSimpleVariableDef tempVarDef = createVarDef("result",
binaryExpr.getBType(), null, binaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExprOne = createTypeCheckExpr(binaryExpr.pos, binaryExpr.lhsExpr,
getNillTypeNode());
typeTestExprOne.setBType(symTable.booleanType);
BLangTypeTestExpr typeTestExprTwo = createTypeCheckExpr(binaryExpr.pos,
binaryExpr.rhsExpr, getNillTypeNode());
typeTestExprTwo.setBType(symTable.booleanType);
BLangBinaryExpr ifBlockCondition = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, typeTestExprOne,
typeTestExprTwo, symTable.booleanType, OperatorKind.OR, binaryExpr.opSymbol);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangBinaryExpr newBinaryExpr = ASTBuilderUtil.createBinaryExpr(binaryExpr.pos, binaryExpr.lhsExpr,
binaryExpr.rhsExpr, nonNilType, binaryExpr.opKind, binaryExpr.opSymbol);
newBinaryExpr.lhsExpr = createTypeCastExpr(newBinaryExpr.lhsExpr, lhsType);
newBinaryExpr.rhsExpr = createTypeCastExpr(newBinaryExpr.rhsExpr, rhsType);
bLangAssignmentElse.expr = newBinaryExpr;
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(binaryExpr.pos, blockStmt);
ifStatement.expr = ifBlockCondition;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(binaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableBinaryExpr(BLangBinaryExpr binaryExpr) {
if ((binaryExpr.lhsExpr.getBType() != null && binaryExpr.rhsExpr.getBType() != null) &&
(binaryExpr.rhsExpr.getBType().isNullable() ||
binaryExpr.lhsExpr.getBType().isNullable())) {
switch (binaryExpr.getOperatorKind()) {
case ADD:
case SUB:
case MUL:
case DIV:
case MOD:
case BITWISE_LEFT_SHIFT:
case BITWISE_RIGHT_SHIFT:
case BITWISE_UNSIGNED_RIGHT_SHIFT:
case BITWISE_AND:
case BITWISE_OR:
case BITWISE_XOR:
return true;
}
}
return false;
}
private void createTypeCastExprForArithmeticExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
if ((TypeTags.isIntegerTypeTag(lhsExprTypeTag) && TypeTags.isIntegerTypeTag(rhsExprTypeTag)) ||
(TypeTags.isStringTypeTag(lhsExprTypeTag) && TypeTags.isStringTypeTag(rhsExprTypeTag)) ||
(TypeTags.isXMLTypeTag(lhsExprTypeTag) && TypeTags.isXMLTypeTag(rhsExprTypeTag))) {
return;
}
if (TypeTags.isXMLTypeTag(lhsExprTypeTag) && !TypeTags.isXMLTypeTag(rhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.rhsExpr.getBType())) {
binaryExpr.rhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.rhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.xmlType);
return;
}
if (TypeTags.isXMLTypeTag(rhsExprTypeTag) && !TypeTags.isXMLTypeTag(lhsExprTypeTag)) {
if (types.checkTypeContainString(binaryExpr.lhsExpr.getBType())) {
binaryExpr.lhsExpr = ASTBuilderUtil.createXMLTextLiteralNode(binaryExpr, binaryExpr.lhsExpr,
binaryExpr.rhsExpr.pos, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.xmlType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, binaryExpr.getBType());
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, binaryExpr.getBType());
}
private void createTypeCastExprForBinaryShiftExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if (isLhsIntegerType || lhsExprTypeTag == TypeTags.BYTE) {
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
return;
}
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (isRhsIntegerType || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
}
private void createTypeCastExprForRelationalExpr(BLangBinaryExpr binaryExpr, int lhsExprTypeTag,
int rhsExprTypeTag) {
boolean isLhsIntegerType = TypeTags.isIntegerTypeTag(lhsExprTypeTag);
boolean isRhsIntegerType = TypeTags.isIntegerTypeTag(rhsExprTypeTag);
if ((isLhsIntegerType && isRhsIntegerType) || (lhsExprTypeTag == TypeTags.BYTE &&
rhsExprTypeTag == TypeTags.BYTE)) {
return;
}
if (isLhsIntegerType && !isRhsIntegerType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
return;
}
if (!isLhsIntegerType && isRhsIntegerType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
if (lhsExprTypeTag == TypeTags.BYTE || rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.intType);
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.intType);
return;
}
boolean isLhsStringType = TypeTags.isStringTypeTag(lhsExprTypeTag);
boolean isRhsStringType = TypeTags.isStringTypeTag(rhsExprTypeTag);
if (isLhsStringType && isRhsStringType) {
return;
}
if (isLhsStringType && !isRhsStringType) {
binaryExpr.rhsExpr = createTypeCastExpr(binaryExpr.rhsExpr, symTable.stringType);
return;
}
if (!isLhsStringType && isRhsStringType) {
binaryExpr.lhsExpr = createTypeCastExpr(binaryExpr.lhsExpr, symTable.stringType);
}
}
private BLangInvocation replaceWithIntRange(Location location, BLangExpression lhsExpr,
BLangExpression rhsExpr) {
BInvokableSymbol symbol = (BInvokableSymbol) symTable.langInternalModuleSymbol.scope
.lookup(Names.CREATE_INT_RANGE).symbol;
BLangInvocation createIntRangeInvocation = ASTBuilderUtil.createInvocationExprForMethod(location, symbol,
new ArrayList<>(Lists.of(lhsExpr, rhsExpr)), symResolver);
createIntRangeInvocation.setBType(symTable.intRangeType);
return createIntRangeInvocation;
}
private void checkByteTypeIncompatibleOperations(BLangBinaryExpr binaryExpr) {
if (binaryExpr.expectedType == null) {
return;
}
int rhsExprTypeTag = binaryExpr.rhsExpr.getBType().tag;
int lhsExprTypeTag = binaryExpr.lhsExpr.getBType().tag;
if (rhsExprTypeTag != TypeTags.BYTE && lhsExprTypeTag != TypeTags.BYTE) {
return;
}
int resultTypeTag = binaryExpr.expectedType.tag;
if (resultTypeTag == TypeTags.INT) {
if (rhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.rhsExpr = addConversionExprIfRequired(binaryExpr.rhsExpr, symTable.intType);
}
if (lhsExprTypeTag == TypeTags.BYTE) {
binaryExpr.lhsExpr = addConversionExprIfRequired(binaryExpr.lhsExpr, symTable.intType);
}
}
}
/**
* This method checks whether given binary expression is related to shift operation.
* If its true, then both lhs and rhs of the binary expression will be converted to 'int' type.
* <p>
* byte a = 12;
* byte b = 34;
* int i = 234;
* int j = -4;
* <p>
* true: where binary expression's expected type is 'int'
* int i1 = a >> b;
* int i2 = a << b;
* int i3 = a >> i;
* int i4 = a << i;
* int i5 = i >> j;
* int i6 = i << j;
*/
private boolean isBitwiseShiftOperation(BLangBinaryExpr binaryExpr) {
return binaryExpr.opKind == OperatorKind.BITWISE_LEFT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_RIGHT_SHIFT ||
binaryExpr.opKind == OperatorKind.BITWISE_UNSIGNED_RIGHT_SHIFT;
}
public void visit(BLangElvisExpr elvisExpr) {
BLangMatchExpression matchExpr = ASTBuilderUtil.createMatchExpression(elvisExpr.lhsExpr);
matchExpr.patternClauses.add(getMatchNullPatternGivenExpression(elvisExpr.pos,
rewriteExpr(elvisExpr.rhsExpr)));
matchExpr.setBType(elvisExpr.getBType());
matchExpr.pos = elvisExpr.pos;
result = rewriteExpr(matchExpr);
}
@Override
public void visit(BLangUnaryExpr unaryExpr) {
if (isNullableUnaryExpr(unaryExpr)) {
BLangStatementExpression statementExpression = createStmtExprForNilableUnaryExpr(unaryExpr);
result = rewrite(statementExpression, env);
return;
}
if (OperatorKind.BITWISE_COMPLEMENT == unaryExpr.operator) {
rewriteBitwiseComplementOperator(unaryExpr);
return;
}
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
result = unaryExpr;
}
/**
* This method desugar a bitwise complement (~) unary expressions into a bitwise xor binary expression as below.
* Example : ~a -> a ^ -1;
* ~ 11110011 -> 00001100
* 11110011 ^ 11111111 -> 00001100
*
* @param unaryExpr the bitwise complement expression
*/
private void rewriteBitwiseComplementOperator(BLangUnaryExpr unaryExpr) {
final Location pos = unaryExpr.pos;
final BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = pos;
binaryExpr.opKind = OperatorKind.BITWISE_XOR;
binaryExpr.lhsExpr = unaryExpr.expr;
if (TypeTags.BYTE == unaryExpr.getBType().tag) {
binaryExpr.setBType(symTable.byteType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.byteType, 0xffL);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.byteType, symTable.byteType);
} else {
binaryExpr.setBType(symTable.intType);
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(pos, symTable.intType, -1L);
binaryExpr.opSymbol = (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.BITWISE_XOR,
symTable.intType, symTable.intType);
}
result = rewriteExpr(binaryExpr);
}
private BLangStatementExpression createStmtExprForNilableUnaryExpr(BLangUnaryExpr unaryExpr) {
/*
* int? x = 3;
* int? y = +x;
*
*
* Above is desugared to
* int? $result$;
* if (x is ()) {
* $result$ = ();
* } else {
* $result$ = +x;
* }
* int y = $result$
*/
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BUnionType exprBType = (BUnionType) unaryExpr.getBType();
BType nilLiftType = exprBType.getMemberTypes().iterator().next();
unaryExpr.expr = rewriteExpr(unaryExpr.expr);
BLangSimpleVariableDef tempVarDef = createVarDef("$result",
unaryExpr.getBType(), createNilLiteral(), unaryExpr.pos);
BLangSimpleVarRef tempVarRef = ASTBuilderUtil.createVariableRef(unaryExpr.pos, tempVarDef.var.symbol);
blockStmt.addStatement(tempVarDef);
BLangTypeTestExpr typeTestExpr = createTypeCheckExpr(unaryExpr.pos, unaryExpr.expr,
getNillTypeNode());
typeTestExpr.setBType(symTable.booleanType);
BLangBlockStmt ifBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentIf = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, ifBody);
bLangAssignmentIf.varRef = tempVarRef;
bLangAssignmentIf.expr = createNilLiteral();
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(unaryExpr.pos);
BLangAssignment bLangAssignmentElse = ASTBuilderUtil.createAssignmentStmt(unaryExpr.pos, elseBody);
bLangAssignmentElse.varRef = tempVarRef;
BLangExpression expr = createTypeCastExpr(unaryExpr.expr, nilLiftType);
bLangAssignmentElse.expr = ASTBuilderUtil.createUnaryExpr(unaryExpr.pos, expr,
nilLiftType, unaryExpr.operator, unaryExpr.opSymbol);
BLangIf ifStatement = ASTBuilderUtil.createIfStmt(unaryExpr.pos, blockStmt);
ifStatement.expr = typeTestExpr;
ifStatement.body = ifBody;
ifStatement.elseStmt = elseBody;
BLangStatementExpression stmtExpr = ASTBuilderUtil.createStatementExpression(blockStmt, tempVarRef);
stmtExpr.setBType(unaryExpr.getBType());
return stmtExpr;
}
private boolean isNullableUnaryExpr(BLangUnaryExpr unaryExpr) {
if (unaryExpr.getBType() != null && unaryExpr.getBType().isNullable()) {
switch (unaryExpr.operator) {
case ADD:
case SUB:
case BITWISE_COMPLEMENT:
return true;
}
}
return false;
}
@Override
public void visit(BLangTypeConversionExpr conversionExpr) {
if (conversionExpr.typeNode == null && !conversionExpr.annAttachments.isEmpty()) {
result = rewriteExpr(conversionExpr.expr);
return;
}
BType targetType = conversionExpr.targetType;
conversionExpr.typeNode = rewrite(conversionExpr.typeNode, env);
conversionExpr.expr = rewriteExpr(conversionExpr.expr);
result = conversionExpr;
}
@Override
public void visit(BLangLambdaFunction bLangLambdaFunction) {
if (!env.enclPkg.lambdaFunctions.contains(bLangLambdaFunction)) {
env.enclPkg.lambdaFunctions.add(bLangLambdaFunction);
}
result = bLangLambdaFunction;
}
@Override
public void visit(BLangArrowFunction bLangArrowFunction) {
BLangFunction bLangFunction = (BLangFunction) TreeBuilder.createFunctionNode();
bLangFunction.setName(bLangArrowFunction.functionName);
BLangLambdaFunction lambdaFunction = (BLangLambdaFunction) TreeBuilder.createLambdaFunctionNode();
lambdaFunction.pos = bLangArrowFunction.pos;
bLangFunction.addFlag(Flag.LAMBDA);
lambdaFunction.function = bLangFunction;
BLangValueType returnType = (BLangValueType) TreeBuilder.createValueTypeNode();
returnType.setBType(bLangArrowFunction.body.expr.getBType());
bLangFunction.setReturnTypeNode(returnType);
bLangFunction.setBody(populateArrowExprBodyBlock(bLangArrowFunction));
bLangArrowFunction.params.forEach(bLangFunction::addParameter);
lambdaFunction.parent = bLangArrowFunction.parent;
lambdaFunction.setBType(bLangArrowFunction.funcType);
BLangFunction funcNode = lambdaFunction.function;
BInvokableSymbol funcSymbol = Symbols.createFunctionSymbol(Flags.asMask(funcNode.flagSet),
new Name(funcNode.name.value),
new Name(funcNode.name.originalValue),
env.enclPkg.symbol.pkgID,
bLangArrowFunction.funcType,
env.enclEnv.enclVarSym, true,
bLangArrowFunction.pos, VIRTUAL);
funcSymbol.originalName = new Name(funcNode.name.originalValue);
SymbolEnv invokableEnv = SymbolEnv.createFunctionEnv(funcNode, funcSymbol.scope, env);
defineInvokableSymbol(funcNode, funcSymbol, invokableEnv);
List<BVarSymbol> paramSymbols = funcNode.requiredParams.stream().peek(varNode -> {
Scope enclScope = invokableEnv.scope;
varNode.symbol.kind = SymbolKind.FUNCTION;
varNode.symbol.owner = invokableEnv.scope.owner;
enclScope.define(varNode.symbol.name, varNode.symbol);
}).map(varNode -> varNode.symbol).collect(Collectors.toList());
funcSymbol.params = paramSymbols;
funcSymbol.restParam = getRestSymbol(funcNode);
funcSymbol.retType = funcNode.returnTypeNode.getBType();
List<BType> paramTypes = paramSymbols.stream().map(paramSym -> paramSym.type).collect(Collectors.toList());
funcNode.setBType(
new BInvokableType(paramTypes, getRestType(funcSymbol), funcNode.returnTypeNode.getBType(), null));
lambdaFunction.function.pos = bLangArrowFunction.pos;
lambdaFunction.function.body.pos = bLangArrowFunction.pos;
lambdaFunction.capturedClosureEnv = env;
rewrite(lambdaFunction.function, env);
env.enclPkg.addFunction(lambdaFunction.function);
bLangArrowFunction.function = lambdaFunction.function;
result = rewriteExpr(lambdaFunction);
}
private void defineInvokableSymbol(BLangInvokableNode invokableNode, BInvokableSymbol funcSymbol,
SymbolEnv invokableEnv) {
invokableNode.symbol = funcSymbol;
funcSymbol.scope = new Scope(funcSymbol);
invokableEnv.scope = funcSymbol.scope;
}
@Override
public void visit(BLangXMLQName xmlQName) {
result = xmlQName;
}
@Override
public void visit(BLangXMLAttribute xmlAttribute) {
xmlAttribute.name = rewriteExpr(xmlAttribute.name);
xmlAttribute.value = rewriteExpr(xmlAttribute.value);
result = xmlAttribute;
}
@Override
public void visit(BLangXMLElementLiteral xmlElementLiteral) {
xmlElementLiteral.startTagName = rewriteExpr(xmlElementLiteral.startTagName);
xmlElementLiteral.endTagName = rewriteExpr(xmlElementLiteral.endTagName);
xmlElementLiteral.modifiedChildren = rewriteExprs(xmlElementLiteral.modifiedChildren);
xmlElementLiteral.attributes = rewriteExprs(xmlElementLiteral.attributes);
Iterator<BLangXMLAttribute> attributesItr = xmlElementLiteral.attributes.iterator();
while (attributesItr.hasNext()) {
BLangXMLAttribute attribute = attributesItr.next();
if (!attribute.isNamespaceDeclr) {
continue;
}
BLangXMLNS xmlns;
if ((xmlElementLiteral.scope.owner.tag & SymTag.PACKAGE) == SymTag.PACKAGE) {
xmlns = new BLangPackageXMLNS();
} else {
xmlns = new BLangLocalXMLNS();
}
xmlns.namespaceURI = attribute.value.concatExpr;
xmlns.prefix = ((BLangXMLQName) attribute.name).localname;
xmlns.symbol = attribute.symbol;
xmlElementLiteral.inlineNamespaces.add(xmlns);
}
result = xmlElementLiteral;
}
@Override
public void visit(BLangXMLSequenceLiteral xmlSequenceLiteral) {
for (BLangExpression xmlItem : xmlSequenceLiteral.xmlItems) {
rewriteExpr(xmlItem);
}
result = xmlSequenceLiteral;
}
@Override
public void visit(BLangXMLTextLiteral xmlTextLiteral) {
xmlTextLiteral.concatExpr = rewriteExpr(constructStringTemplateConcatExpression(xmlTextLiteral.textFragments));
result = xmlTextLiteral;
}
@Override
public void visit(BLangXMLCommentLiteral xmlCommentLiteral) {
xmlCommentLiteral.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlCommentLiteral.textFragments));
result = xmlCommentLiteral;
}
@Override
public void visit(BLangXMLProcInsLiteral xmlProcInsLiteral) {
xmlProcInsLiteral.target = rewriteExpr(xmlProcInsLiteral.target);
xmlProcInsLiteral.dataConcatExpr =
rewriteExpr(constructStringTemplateConcatExpression(xmlProcInsLiteral.dataFragments));
result = xmlProcInsLiteral;
}
@Override
public void visit(BLangXMLQuotedString xmlQuotedString) {
xmlQuotedString.concatExpr = rewriteExpr(
constructStringTemplateConcatExpression(xmlQuotedString.textFragments));
result = xmlQuotedString;
}
@Override
public void visit(BLangStringTemplateLiteral stringTemplateLiteral) {
result = rewriteExpr(constructStringTemplateConcatExpression(stringTemplateLiteral.exprs));
}
/**
* The raw template literal gets desugared to a type init expression. For each literal, a new object class type
* def is generated from the object type. The type init expression creates an instance of this generated object
* type. For example, consider the following statements:
* string name = "Pubudu";
* 'object:RawTemplate rt = `Hello ${name}!`;
*
* The raw template literal above is desugared to:
* type RawTemplate$Impl$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
*
*
* 'object:RawTemplate rt = new RawTemplate$Impl$0([name]);
*
* @param rawTemplateLiteral The raw template literal to be desugared.
*/
@Override
public void visit(BLangRawTemplateLiteral rawTemplateLiteral) {
Location pos = rawTemplateLiteral.pos;
BObjectType objType = (BObjectType) rawTemplateLiteral.getBType();
BLangClassDefinition objClassDef =
desugarTemplateLiteralObjectTypedef(rawTemplateLiteral.strings, objType, pos);
BObjectType classObjType = (BObjectType) objClassDef.getBType();
BVarSymbol insertionsSym = classObjType.fields.get("insertions").symbol;
BLangListConstructorExpr insertionsList = ASTBuilderUtil.createListConstructorExpr(pos, insertionsSym.type);
insertionsList.exprs.addAll(rawTemplateLiteral.insertions);
insertionsList.expectedType = insertionsSym.type;
BLangTypeInit typeNewExpr = ASTBuilderUtil.createEmptyTypeInit(pos, classObjType);
typeNewExpr.argsExpr.add(insertionsList);
typeNewExpr.initInvocation.argExprs.add(insertionsList);
typeNewExpr.initInvocation.requiredArgs.add(insertionsList);
result = rewriteExpr(typeNewExpr);
}
/**
* This method desugars a raw template literal object class for the provided raw template object type as follows.
* A literal defined as 'object:RawTemplate rt = `Hello ${name}!`;
* is desugared to,
* type $anonType$0 object {
* public string[] strings = ["Hello ", "!"];
* public (any|error)[] insertions;
*
* function init((any|error)[] insertions) {
* self.insertions = insertions;
* }
* };
* @param strings The string portions of the literal
* @param objectType The abstract object type for which an object class needs to be generated
* @param pos The diagnostic position info for the type node
* @return Returns the generated concrete object class def
*/
private BLangClassDefinition desugarTemplateLiteralObjectTypedef(List<BLangLiteral> strings, BObjectType objectType,
Location pos) {
BObjectTypeSymbol tSymbol = (BObjectTypeSymbol) objectType.tsymbol;
Name objectClassName = names.fromString(
anonModelHelper.getNextRawTemplateTypeKey(env.enclPkg.packageID, tSymbol.name));
BObjectTypeSymbol classTSymbol = Symbols.createClassSymbol(tSymbol.flags, objectClassName,
env.enclPkg.packageID, null, env.enclPkg.symbol,
pos, VIRTUAL, false);
classTSymbol.flags |= Flags.CLASS;
BObjectType objectClassType = new BObjectType(classTSymbol, classTSymbol.flags);
objectClassType.fields = objectType.fields;
classTSymbol.type = objectClassType;
objectClassType.typeIdSet.add(objectType.typeIdSet);
BLangClassDefinition classDef = TypeDefBuilderHelper.createClassDef(pos, classTSymbol, env);
classDef.name = ASTBuilderUtil.createIdentifier(pos, objectClassType.tsymbol.name.value);
BType stringsType = objectClassType.fields.get("strings").symbol.type;
BLangListConstructorExpr stringsList = ASTBuilderUtil.createListConstructorExpr(pos, stringsType);
stringsList.exprs.addAll(strings);
stringsList.expectedType = stringsType;
classDef.fields.get(0).expr = stringsList;
BLangFunction userDefinedInitFunction = createUserDefinedObjectInitFn(classDef, env);
classDef.initFunction = userDefinedInitFunction;
env.enclPkg.functions.add(userDefinedInitFunction);
env.enclPkg.topLevelNodes.add(userDefinedInitFunction);
BLangFunction tempGeneratedInitFunction = createGeneratedInitializerFunction(classDef, env);
tempGeneratedInitFunction.clonedEnv = SymbolEnv.createFunctionEnv(tempGeneratedInitFunction,
tempGeneratedInitFunction.symbol.scope, env);
this.semanticAnalyzer.analyzeNode(tempGeneratedInitFunction, env);
classDef.generatedInitFunction = tempGeneratedInitFunction;
env.enclPkg.functions.add(classDef.generatedInitFunction);
env.enclPkg.topLevelNodes.add(classDef.generatedInitFunction);
return rewrite(classDef, env);
}
/**
* Creates a user-defined init() method for the provided object type node. If there are fields without default
* values specified in the type node, this will add parameters for those fields in the init() method and assign the
* param values to the respective fields in the method body.
*
* @param classDefn The object type node for which the init() method is generated
* @param env The symbol env for the object type node
* @return The generated init() method
*/
private BLangFunction createUserDefinedObjectInitFn(BLangClassDefinition classDefn, SymbolEnv env) {
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefn.pos, classDefn.symbol, env,
names, Names.USER_DEFINED_INIT_SUFFIX,
symTable, classDefn.getBType());
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefn.getBType().tsymbol);
typeSymbol.initializerFunc = new BAttachedFunction(Names.USER_DEFINED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(), classDefn.pos);
classDefn.initFunction = initFunction;
initFunction.returnTypeNode.setBType(symTable.nilType);
BLangBlockFunctionBody initFuncBody = (BLangBlockFunctionBody) initFunction.body;
BInvokableType initFnType = (BInvokableType) initFunction.getBType();
for (BLangSimpleVariable field : classDefn.fields) {
if (field.expr != null) {
continue;
}
BVarSymbol fieldSym = field.symbol;
BVarSymbol paramSym = new BVarSymbol(Flags.FINAL, fieldSym.name, this.env.scope.owner.pkgID, fieldSym.type,
initFunction.symbol, classDefn.pos, VIRTUAL);
BLangSimpleVariable param = ASTBuilderUtil.createVariable(classDefn.pos, fieldSym.name.value,
fieldSym.type, null, paramSym);
param.flagSet.add(Flag.FINAL);
initFunction.symbol.scope.define(paramSym.name, paramSym);
initFunction.symbol.params.add(paramSym);
initFnType.paramTypes.add(param.getBType());
initFunction.requiredParams.add(param);
BLangSimpleVarRef paramRef = ASTBuilderUtil.createVariableRef(initFunction.pos, paramSym);
BLangAssignment fieldInit = createStructFieldUpdate(initFunction, paramRef, fieldSym, field.getBType(),
initFunction.receiver.symbol, field.name);
initFuncBody.addStatement(fieldInit);
}
return initFunction;
}
@Override
public void visit(BLangWorkerSend workerSendNode) {
workerSendNode.expr = visitCloneInvocation(rewriteExpr(workerSendNode.expr), workerSendNode.expr.getBType());
result = workerSendNode;
}
@Override
public void visit(BLangWorkerSyncSendExpr syncSendExpr) {
syncSendExpr.expr = visitCloneInvocation(rewriteExpr(syncSendExpr.expr), syncSendExpr.expr.getBType());
result = syncSendExpr;
}
@Override
public void visit(BLangWorkerReceive workerReceiveNode) {
result = workerReceiveNode;
}
@Override
public void visit(BLangWorkerFlushExpr workerFlushExpr) {
workerFlushExpr.workerIdentifierList = workerFlushExpr.cachedWorkerSendStmts
.stream().map(send -> send.workerIdentifier).distinct().collect(Collectors.toList());
result = workerFlushExpr;
}
@Override
public void visit(BLangTransactionalExpr transactionalExpr) {
BInvokableSymbol isTransactionalSymbol =
(BInvokableSymbol) transactionDesugar.getInternalTransactionModuleInvokableSymbol(IS_TRANSACTIONAL);
result = ASTBuilderUtil
.createInvocationExprMethod(transactionalExpr.pos, isTransactionalSymbol, Collections.emptyList(),
Collections.emptyList(), symResolver);
}
@Override
public void visit(BLangCommitExpr commitExpr) {
BLangStatementExpression stmtExpr = transactionDesugar.desugar(commitExpr, env);
result = rewriteExpr(stmtExpr);
}
@Override
public void visit(BLangXMLAttributeAccess xmlAttributeAccessExpr) {
xmlAttributeAccessExpr.indexExpr = rewriteExpr(xmlAttributeAccessExpr.indexExpr);
xmlAttributeAccessExpr.expr = rewriteExpr(xmlAttributeAccessExpr.expr);
if (xmlAttributeAccessExpr.indexExpr != null
&& xmlAttributeAccessExpr.indexExpr.getKind() == NodeKind.XML_QNAME) {
((BLangXMLQName) xmlAttributeAccessExpr.indexExpr).isUsedInXML = true;
}
xmlAttributeAccessExpr.desugared = true;
if (xmlAttributeAccessExpr.isLValue || xmlAttributeAccessExpr.indexExpr != null) {
result = xmlAttributeAccessExpr;
} else {
result = rewriteExpr(xmlAttributeAccessExpr);
}
}
@Override
public void visit(BLangFail failNode) {
if (this.onFailClause != null) {
if (this.onFailClause.bodyContainsFail) {
result = rewriteNestedOnFail(this.onFailClause, failNode);
} else {
result = createOnFailInvocation(onFailClause, failNode);
}
} else {
BLangReturn stmt = ASTBuilderUtil.createReturnStmt(failNode.pos, rewrite(failNode.expr, env));
stmt.desugared = true;
result = stmt;
}
}
@Override
public void visit(BLangLocalVarRef localVarRef) {
result = localVarRef;
}
@Override
public void visit(BLangFieldVarRef fieldVarRef) {
result = fieldVarRef;
}
@Override
public void visit(BLangPackageVarRef packageVarRef) {
result = packageVarRef;
}
@Override
public void visit(BLangFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangStructFieldAccessExpr fieldAccessExpr) {
result = fieldAccessExpr;
}
@Override
public void visit(BLangStructFunctionVarRef functionVarRef) {
result = functionVarRef;
}
@Override
public void visit(BLangMapAccessExpr mapKeyAccessExpr) {
result = mapKeyAccessExpr;
}
@Override
public void visit(BLangArrayAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTupleAccessExpr arrayIndexAccessExpr) {
result = arrayIndexAccessExpr;
}
@Override
public void visit(BLangTableAccessExpr tableKeyAccessExpr) {
result = tableKeyAccessExpr;
}
@Override
public void visit(BLangMapLiteral mapLiteral) {
result = mapLiteral;
}
@Override
public void visit(BLangStructLiteral structLiteral) {
result = structLiteral;
}
@Override
public void visit(BLangWaitForAllExpr.BLangWaitLiteral waitLiteral) {
result = waitLiteral;
}
@Override
public void visit(BLangXMLElementAccess xmlElementAccess) {
xmlElementAccess.expr = rewriteExpr(xmlElementAccess.expr);
ArrayList<BLangExpression> filters = expandFilters(xmlElementAccess.filters);
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlElementAccess.pos, XML_INTERNAL_GET_ELEMENTS,
xmlElementAccess.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
}
private ArrayList<BLangExpression> expandFilters(List<BLangXMLElementFilter> filters) {
Map<Name, BXMLNSSymbol> nameBXMLNSSymbolMap = symResolver.resolveAllNamespaces(env);
BXMLNSSymbol defaultNSSymbol = nameBXMLNSSymbolMap.get(names.fromString(XMLConstants.DEFAULT_NS_PREFIX));
String defaultNS = defaultNSSymbol != null ? defaultNSSymbol.namespaceURI : null;
ArrayList<BLangExpression> args = new ArrayList<>();
for (BLangXMLElementFilter filter : filters) {
BSymbol nsSymbol = symResolver.lookupSymbolInPrefixSpace(env, names.fromString(filter.namespace));
if (nsSymbol == symTable.notFoundSymbol) {
if (defaultNS != null && !filter.name.equals("*")) {
String expandedName = createExpandedQName(defaultNS, filter.name);
args.add(createStringLiteral(filter.elemNamePos, expandedName));
} else {
args.add(createStringLiteral(filter.elemNamePos, filter.name));
}
} else {
BXMLNSSymbol bxmlnsSymbol = (BXMLNSSymbol) nsSymbol;
String expandedName = createExpandedQName(bxmlnsSymbol.namespaceURI, filter.name);
BLangLiteral stringLiteral = createStringLiteral(filter.elemNamePos, expandedName);
args.add(stringLiteral);
}
}
return args;
}
private BLangInvocation createLanglibXMLInvocation(Location pos, String functionName,
BLangExpression invokeOnExpr,
ArrayList<BLangExpression> args,
ArrayList<BLangExpression> restArgs) {
invokeOnExpr = rewriteExpr(invokeOnExpr);
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = invokeOnExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(symTable.xmlType, names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(invokeOnExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.restArgs = rewriteExprs(restArgs);
invocationNode.setBType(((BInvokableType) invocationNode.symbol.type).getReturnType());
invocationNode.langLibInvocation = true;
return invocationNode;
}
@Override
public void visit(BLangXMLNavigationAccess xmlNavigation) {
xmlNavigation.expr = rewriteExpr(xmlNavigation.expr);
xmlNavigation.childIndex = rewriteExpr(xmlNavigation.childIndex);
ArrayList<BLangExpression> filters = expandFilters(xmlNavigation.filters);
if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.DESCENDANTS) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_SELECT_DESCENDANTS, xmlNavigation.expr, new ArrayList<>(), filters);
result = rewriteExpr(invocationNode);
} else if (xmlNavigation.navAccessType == XMLNavigationAccess.NavAccessType.CHILDREN) {
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos, XML_INTERNAL_CHILDREN,
xmlNavigation.expr, new ArrayList<>(), new ArrayList<>());
result = rewriteExpr(invocationNode);
} else {
BLangExpression childIndexExpr;
if (xmlNavigation.childIndex == null) {
childIndexExpr = new BLangLiteral(Long.valueOf(-1), symTable.intType);
} else {
childIndexExpr = xmlNavigation.childIndex;
}
ArrayList<BLangExpression> args = new ArrayList<>();
args.add(rewriteExpr(childIndexExpr));
BLangInvocation invocationNode = createLanglibXMLInvocation(xmlNavigation.pos,
XML_INTERNAL_GET_FILTERED_CHILDREN_FLAT, xmlNavigation.expr, args, filters);
result = rewriteExpr(invocationNode);
}
}
@Override
public void visit(BLangIsAssignableExpr assignableExpr) {
assignableExpr.lhsExpr = rewriteExpr(assignableExpr.lhsExpr);
result = assignableExpr;
}
@Override
public void visit(BFunctionPointerInvocation fpInvocation) {
result = fpInvocation;
}
@Override
public void visit(BLangTypedescExpr typedescExpr) {
typedescExpr.typeNode = rewrite(typedescExpr.typeNode, env);
result = typedescExpr;
}
@Override
public void visit(BLangIntRangeExpression intRangeExpression) {
if (!intRangeExpression.includeStart) {
intRangeExpression.startExpr = getModifiedIntRangeStartExpr(intRangeExpression.startExpr);
}
if (!intRangeExpression.includeEnd) {
intRangeExpression.endExpr = getModifiedIntRangeEndExpr(intRangeExpression.endExpr);
}
intRangeExpression.startExpr = rewriteExpr(intRangeExpression.startExpr);
intRangeExpression.endExpr = rewriteExpr(intRangeExpression.endExpr);
result = intRangeExpression;
}
@Override
public void visit(BLangRestArgsExpression bLangVarArgsExpression) {
result = rewriteExpr(bLangVarArgsExpression.expr);
}
@Override
public void visit(BLangNamedArgsExpression bLangNamedArgsExpression) {
bLangNamedArgsExpression.expr = rewriteExpr(bLangNamedArgsExpression.expr);
result = bLangNamedArgsExpression.expr;
}
@Override
public void visit(BLangMatchExpression bLangMatchExpression) {
addMatchExprDefaultCase(bLangMatchExpression);
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(bLangMatchExpression.pos, matchTempResultVarName,
bLangMatchExpression.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID,
bLangMatchExpression.getBType(),
this.env.scope.owner, bLangMatchExpression.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef =
ASTBuilderUtil.createVariableDef(bLangMatchExpression.pos, tempResultVar);
tempResultVarDef.desugared = true;
BLangBlockStmt stmts = ASTBuilderUtil.createBlockStmt(bLangMatchExpression.pos, Lists.of(tempResultVarDef));
List<BLangMatchTypedBindingPatternClause> patternClauses = new ArrayList<>();
for (int i = 0; i < bLangMatchExpression.patternClauses.size(); i++) {
BLangMatchExprPatternClause pattern = bLangMatchExpression.patternClauses.get(i);
pattern.expr = rewriteExpr(pattern.expr);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
pattern.expr = addConversionExprIfRequired(pattern.expr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(pattern.pos, tempResultVarRef, pattern.expr);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(pattern.pos, Lists.of(assignmentStmt));
patternClauses.add(ASTBuilderUtil.createMatchStatementPattern(pattern.pos, pattern.variable, patternBody));
}
stmts.addStatement(ASTBuilderUtil.createMatchStatement(bLangMatchExpression.pos, bLangMatchExpression.expr,
patternClauses));
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, tempResultVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(stmts, tempResultVarRef);
statementExpr.setBType(bLangMatchExpression.getBType());
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangCheckedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, false);
}
@Override
public void visit(BLangCheckPanickedExpr checkedExpr) {
visitCheckAndCheckPanicExpr(checkedExpr, true);
}
private void visitCheckAndCheckPanicExpr(BLangCheckedExpr checkedExpr, boolean isCheckPanic) {
String checkedExprVarName = GEN_VAR_PREFIX.value;
BLangSimpleVariable checkedExprVar =
ASTBuilderUtil.createVariable(checkedExpr.pos, checkedExprVarName, checkedExpr.getBType(), null,
new BVarSymbol(0, names.fromString(checkedExprVarName),
this.env.scope.owner.pkgID, checkedExpr.getBType(),
this.env.scope.owner, checkedExpr.pos, VIRTUAL));
BLangSimpleVariableDef checkedExprVarDef = ASTBuilderUtil.createVariableDef(checkedExpr.pos, checkedExprVar);
checkedExprVarDef.desugared = true;
BLangMatchTypedBindingPatternClause patternSuccessCase =
getSafeAssignSuccessPattern(checkedExprVar.pos, checkedExprVar.symbol.type, true,
checkedExprVar.symbol, null);
BLangMatchTypedBindingPatternClause patternErrorCase =
getSafeAssignErrorPattern(checkedExpr.pos, this.env.enclInvokable.symbol,
checkedExpr.equivalentErrorTypeList, isCheckPanic);
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(checkedExpr.pos, checkedExpr.expr,
new ArrayList<BLangMatchTypedBindingPatternClause>() {{
add(patternSuccessCase);
add(patternErrorCase);
}});
BLangBlockStmt generatedStmtBlock = ASTBuilderUtil.createBlockStmt(checkedExpr.pos,
new ArrayList<BLangStatement>() {{
add(checkedExprVarDef);
add(matchStmt);
}});
BLangSimpleVarRef tempCheckedExprVarRef = ASTBuilderUtil.createVariableRef(
checkedExpr.pos, checkedExprVar.symbol);
BLangStatementExpression statementExpr = createStatementExpression(
generatedStmtBlock, tempCheckedExprVarRef);
statementExpr.setBType(checkedExpr.getBType());
result = rewriteExpr(statementExpr);
}
@Override
public void visit(BLangServiceConstructorExpr serviceConstructorExpr) {
final BLangTypeInit typeInit = ASTBuilderUtil.createEmptyTypeInit(serviceConstructorExpr.pos,
serviceConstructorExpr.serviceNode.serviceClass.symbol.type);
serviceConstructorExpr.serviceNode.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = rewriteExpr(typeInit);
}
@Override
public void visit(BLangTypeTestExpr typeTestExpr) {
BLangExpression expr = typeTestExpr.expr;
if (types.isValueType(expr.getBType())) {
addConversionExprIfRequired(expr, symTable.anyType);
}
if (typeTestExpr.isNegation) {
BLangTypeTestExpr bLangTypeTestExpr = ASTBuilderUtil.createTypeTestExpr(typeTestExpr.pos,
typeTestExpr.expr, typeTestExpr.typeNode);
BLangGroupExpr bLangGroupExpr = (BLangGroupExpr) TreeBuilder.createGroupExpressionNode();
bLangGroupExpr.expression = bLangTypeTestExpr;
bLangGroupExpr.setBType(typeTestExpr.getBType());
BLangUnaryExpr unaryExpr = ASTBuilderUtil.createUnaryExpr(typeTestExpr.pos, bLangGroupExpr,
typeTestExpr.getBType(),
OperatorKind.NOT, null);
result = rewriteExpr(unaryExpr);
return;
}
typeTestExpr.expr = rewriteExpr(expr);
typeTestExpr.typeNode = rewrite(typeTestExpr.typeNode, env);
result = typeTestExpr;
}
@Override
public void visit(BLangAnnotAccessExpr annotAccessExpr) {
BLangBinaryExpr binaryExpr = (BLangBinaryExpr) TreeBuilder.createBinaryExpressionNode();
binaryExpr.pos = annotAccessExpr.pos;
binaryExpr.opKind = OperatorKind.ANNOT_ACCESS;
binaryExpr.lhsExpr = annotAccessExpr.expr;
binaryExpr.rhsExpr = ASTBuilderUtil.createLiteral(annotAccessExpr.pkgAlias.pos, symTable.stringType,
annotAccessExpr.annotationSymbol.bvmAlias());
binaryExpr.setBType(annotAccessExpr.getBType());
binaryExpr.opSymbol = new BOperatorSymbol(names.fromString(OperatorKind.ANNOT_ACCESS.value()), null,
new BInvokableType(Lists.of(binaryExpr.lhsExpr.getBType(),
binaryExpr.rhsExpr.getBType()),
annotAccessExpr.getBType(), null), null,
symTable.builtinPos, VIRTUAL);
result = rewriteExpr(binaryExpr);
}
@Override
public void visit(BLangIsLikeExpr isLikeExpr) {
isLikeExpr.expr = rewriteExpr(isLikeExpr.expr);
result = isLikeExpr;
}
@Override
public void visit(BLangStatementExpression bLangStatementExpression) {
bLangStatementExpression.expr = rewriteExpr(bLangStatementExpression.expr);
bLangStatementExpression.stmt = rewrite(bLangStatementExpression.stmt, env);
result = bLangStatementExpression;
}
@Override
public void visit(BLangQueryExpr queryExpr) {
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryExpr, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangQueryAction queryAction) {
BLangStatementExpression stmtExpr = queryDesugar.desugar(queryAction, env);
result = rewrite(stmtExpr, env);
}
@Override
public void visit(BLangJSONArrayLiteral jsonArrayLiteral) {
jsonArrayLiteral.exprs = rewriteExprs(jsonArrayLiteral.exprs);
result = jsonArrayLiteral;
}
@Override
public void visit(BLangConstant constant) {
BConstantSymbol constSymbol = constant.symbol;
if (constSymbol.literalType.tag <= TypeTags.BOOLEAN || constSymbol.literalType.tag == TypeTags.NIL) {
if (constSymbol.literalType.tag != TypeTags.NIL && (constSymbol.value == null ||
constSymbol.value.value == null)) {
throw new IllegalStateException();
}
BLangLiteral literal = ASTBuilderUtil.createLiteral(constant.expr.pos, constSymbol.literalType,
constSymbol.value.value);
constant.expr = rewriteExpr(literal);
} else {
constant.expr = rewriteExpr(constant.expr);
}
constant.annAttachments.forEach(attachment -> rewrite(attachment, env));
result = constant;
}
@Override
public void visit(BLangIgnoreExpr ignoreExpr) {
result = ignoreExpr;
}
@Override
public void visit(BLangDynamicArgExpr dynamicParamExpr) {
dynamicParamExpr.conditionalArgument = rewriteExpr(dynamicParamExpr.conditionalArgument);
dynamicParamExpr.condition = rewriteExpr(dynamicParamExpr.condition);
result = dynamicParamExpr;
}
@Override
public void visit(BLangConstRef constantRef) {
result = ASTBuilderUtil.createLiteral(constantRef.pos, constantRef.getBType(), constantRef.value);
}
BLangSimpleVariableDef getIteratorVariableDefinition(Location pos, BVarSymbol collectionSymbol,
BInvokableSymbol iteratorInvokableSymbol,
boolean isIteratorFuncFromLangLib) {
BLangSimpleVarRef dataReference = ASTBuilderUtil.createVariableRef(pos, collectionSymbol);
BLangInvocation iteratorInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
iteratorInvocation.pos = pos;
iteratorInvocation.expr = dataReference;
iteratorInvocation.symbol = iteratorInvokableSymbol;
iteratorInvocation.setBType(iteratorInvokableSymbol.retType);
iteratorInvocation.argExprs = Lists.of(dataReference);
iteratorInvocation.requiredArgs = iteratorInvocation.argExprs;
iteratorInvocation.langLibInvocation = isIteratorFuncFromLangLib;
BVarSymbol iteratorSymbol = new BVarSymbol(0, names.fromString("$iterator$"), this.env.scope.owner.pkgID,
iteratorInvokableSymbol.retType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVariable iteratorVariable = ASTBuilderUtil.createVariable(pos, "$iterator$",
iteratorInvokableSymbol.retType, iteratorInvocation, iteratorSymbol);
return ASTBuilderUtil.createVariableDef(pos, iteratorVariable);
}
BLangSimpleVariableDef getIteratorNextVariableDefinition(Location pos, BType nillableResultType,
BVarSymbol iteratorSymbol,
BVarSymbol resultSymbol) {
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
BLangSimpleVariable resultVariable = ASTBuilderUtil.createVariable(pos, "$result$",
nillableResultType, nextInvocation, resultSymbol);
return ASTBuilderUtil.createVariableDef(pos, resultVariable);
}
BLangAssignment getIteratorNextAssignment(Location pos,
BVarSymbol iteratorSymbol, BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInAssignment = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangInvocation nextInvocation = createIteratorNextInvocation(pos, iteratorSymbol);
nextInvocation.expr.setBType(types.getSafeType(nextInvocation.expr.getBType(), true, false));
return ASTBuilderUtil.createAssignmentStmt(pos, resultReferenceInAssignment, nextInvocation, false);
}
BLangInvocation createIteratorNextInvocation(Location pos, BVarSymbol iteratorSymbol) {
BLangIdentifier nextIdentifier = ASTBuilderUtil.createIdentifier(pos, "next");
BLangSimpleVarRef iteratorReferenceInNext = ASTBuilderUtil.createVariableRef(pos, iteratorSymbol);
BInvokableSymbol nextFuncSymbol = getNextFunc((BObjectType) iteratorSymbol.type).symbol;
BLangInvocation nextInvocation = (BLangInvocation) TreeBuilder.createInvocationNode();
nextInvocation.pos = pos;
nextInvocation.name = nextIdentifier;
nextInvocation.expr = iteratorReferenceInNext;
nextInvocation.requiredArgs = Lists.of(ASTBuilderUtil.createVariableRef(pos, iteratorSymbol));
nextInvocation.argExprs = nextInvocation.requiredArgs;
nextInvocation.symbol = nextFuncSymbol;
nextInvocation.setBType(nextFuncSymbol.retType);
return nextInvocation;
}
private BAttachedFunction getNextFunc(BObjectType iteratorType) {
BObjectTypeSymbol iteratorSymbol = (BObjectTypeSymbol) iteratorType.tsymbol;
for (BAttachedFunction bAttachedFunction : iteratorSymbol.attachedFuncs) {
if (bAttachedFunction.funcName.value.equals("next")) {
return bAttachedFunction;
}
}
return null;
}
BLangFieldBasedAccess getValueAccessExpression(Location location, BType varType,
BVarSymbol resultSymbol) {
return getFieldAccessExpression(location, "value", varType, resultSymbol);
}
BLangFieldBasedAccess getFieldAccessExpression(Location pos, String fieldName, BType varType,
BVarSymbol resultSymbol) {
BLangSimpleVarRef resultReferenceInVariableDef = ASTBuilderUtil.createVariableRef(pos, resultSymbol);
BLangIdentifier valueIdentifier = ASTBuilderUtil.createIdentifier(pos, fieldName);
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(resultReferenceInVariableDef, valueIdentifier);
fieldBasedAccessExpression.pos = pos;
fieldBasedAccessExpression.setBType(varType);
fieldBasedAccessExpression.originalType = fieldBasedAccessExpression.getBType();
return fieldBasedAccessExpression;
}
private BlockFunctionBodyNode populateArrowExprBodyBlock(BLangArrowFunction bLangArrowFunction) {
BlockFunctionBodyNode blockNode = TreeBuilder.createBlockFunctionBodyNode();
BLangReturn returnNode = (BLangReturn) TreeBuilder.createReturnNode();
returnNode.pos = bLangArrowFunction.body.expr.pos;
returnNode.setExpression(bLangArrowFunction.body.expr);
blockNode.addStatement(returnNode);
return blockNode;
}
private BLangInvocation createInvocationNode(String functionName, List<BLangExpression> args, BType retType) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symTable.rootScope.lookup(new Name(functionName)).symbol;
invocationNode.setBType(retType);
invocationNode.requiredArgs = args;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
BLangExpression onExpr,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.expr = onExpr;
invocationNode.symbol = symResolver.lookupLangLibMethod(onExpr.getBType(), names.fromString(functionName));
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.add(onExpr);
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangInvocation createLangLibInvocationNode(String functionName,
List<BLangExpression> args,
BType retType,
Location pos) {
BLangInvocation invocationNode = (BLangInvocation) TreeBuilder.createInvocationNode();
invocationNode.pos = pos;
BLangIdentifier name = (BLangIdentifier) TreeBuilder.createIdentifierNode();
name.setLiteral(false);
name.setValue(functionName);
name.pos = pos;
invocationNode.name = name;
invocationNode.pkgAlias = (BLangIdentifier) TreeBuilder.createIdentifierNode();
invocationNode.symbol = symResolver.lookupMethodInModule(symTable.langInternalModuleSymbol,
names.fromString(functionName), env);
ArrayList<BLangExpression> requiredArgs = new ArrayList<>();
requiredArgs.addAll(args);
invocationNode.requiredArgs = requiredArgs;
invocationNode.setBType(retType != null ? retType : ((BInvokableSymbol) invocationNode.symbol).retType);
invocationNode.langLibInvocation = true;
return invocationNode;
}
private BLangArrayLiteral createArrayLiteralExprNode() {
BLangArrayLiteral expr = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
expr.exprs = new ArrayList<>();
expr.setBType(new BArrayType(symTable.anyType));
return expr;
}
private void visitFunctionPointerInvocation(BLangInvocation iExpr) {
BLangValueExpression expr;
if (iExpr.expr == null) {
expr = new BLangSimpleVarRef();
} else {
BLangFieldBasedAccess fieldBasedAccess = new BLangFieldBasedAccess();
fieldBasedAccess.expr = iExpr.expr;
fieldBasedAccess.field = iExpr.name;
expr = fieldBasedAccess;
}
expr.symbol = iExpr.symbol;
expr.setBType(iExpr.symbol.type);
BLangExpression rewritten = rewriteExpr(expr);
result = new BFunctionPointerInvocation(iExpr, rewritten);
}
private BLangExpression visitCloneInvocation(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("clone", expr, new ArrayList<>(), null, expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
private BLangExpression visitCloneReadonly(BLangExpression expr, BType lhsType) {
if (types.isValueType(expr.getBType())) {
return expr;
}
if (expr.getBType().tag == TypeTags.ERROR) {
return expr;
}
BLangInvocation cloneInvok = createLangLibInvocationNode("cloneReadOnly", expr, new ArrayList<>(),
expr.getBType(),
expr.pos);
return addConversionExprIfRequired(cloneInvok, lhsType);
}
@SuppressWarnings("unchecked")
<E extends BLangNode> E rewrite(E node, SymbolEnv env) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
SymbolEnv previousEnv = this.env;
this.env = env;
node.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
this.env = previousEnv;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangExpression> E rewriteExpr(E node) {
if (node == null) {
return null;
}
if (node.desugared) {
return node;
}
BLangExpression expr = node;
if (node.impConversionExpr != null) {
expr = node.impConversionExpr;
node.impConversionExpr = null;
}
expr.accept(this);
BLangNode resultNode = this.result;
this.result = null;
resultNode.desugared = true;
return (E) resultNode;
}
@SuppressWarnings("unchecked")
<E extends BLangStatement> E rewrite(E statement, SymbolEnv env) {
if (statement == null) {
return null;
}
BLangStatementLink link = new BLangStatementLink();
link.parent = currentLink;
currentLink = link;
BLangStatement stmt = (BLangStatement) rewrite((BLangNode) statement, env);
link.statement = stmt;
stmt.statementLink = link;
currentLink = link.parent;
return (E) stmt;
}
private <E extends BLangStatement> List<E> rewriteStmt(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangNode> List<E> rewrite(List<E> nodeList, SymbolEnv env) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewrite(nodeList.get(i), env));
}
return nodeList;
}
private <E extends BLangExpression> List<E> rewriteExprs(List<E> nodeList) {
for (int i = 0; i < nodeList.size(); i++) {
nodeList.set(i, rewriteExpr(nodeList.get(i)));
}
return nodeList;
}
private BLangLiteral createStringLiteral(Location pos, String value) {
BLangLiteral stringLit = new BLangLiteral(value, symTable.stringType);
stringLit.pos = pos;
return stringLit;
}
private BLangLiteral createIntLiteral(long value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.intType);
return literal;
}
private BLangLiteral createByteLiteral(Location pos, Byte value) {
BLangLiteral byteLiteral = new BLangLiteral(Byte.toUnsignedInt(value), symTable.byteType);
byteLiteral.pos = pos;
return byteLiteral;
}
private BLangExpression createTypeCastExpr(BLangExpression expr, BType targetType) {
if (types.isSameType(expr.getBType(), targetType)) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr) TreeBuilder.createTypeConversionNode();
conversionExpr.pos = expr.pos;
conversionExpr.expr = expr;
conversionExpr.setBType(targetType);
conversionExpr.targetType = targetType;
conversionExpr.internal = true;
return conversionExpr;
}
private BType getElementType(BType type) {
if (type.tag != TypeTags.ARRAY) {
return type;
}
return getElementType(((BArrayType) type).getElementType());
}
private void addReturnIfNotPresent(BLangInvokableNode invokableNode) {
if (Symbols.isNative(invokableNode.symbol) ||
(invokableNode.hasBody() && invokableNode.body.getKind() != NodeKind.BLOCK_FUNCTION_BODY)) {
return;
}
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) invokableNode.body;
if (invokableNode.workers.size() == 0 && invokableNode.symbol.type.getReturnType().isNullable()
&& (funcBody.stmts.size() < 1 ||
funcBody.stmts.get(funcBody.stmts.size() - 1).getKind() != NodeKind.RETURN)) {
BLangReturn returnStmt;
if (invokableNode.name.value.contains(GENERATED_INIT_SUFFIX.value)) {
returnStmt = ASTBuilderUtil.createNilReturnStmt(null, symTable.nilType);
} else {
Location invPos = invokableNode.pos;
Location returnStmtPos = new BLangDiagnosticLocation(invPos.lineRange().filePath(),
invPos.lineRange().endLine().line(),
invPos.lineRange().endLine().line(),
invPos.lineRange().startLine().offset(),
invPos.lineRange().startLine().offset(), 0, 0);
returnStmt = ASTBuilderUtil.createNilReturnStmt(returnStmtPos, symTable.nilType);
}
funcBody.addStatement(returnStmt);
}
}
/**
* Reorder the invocation arguments to match the original function signature.
*
* @param iExpr Function invocation expressions to reorder the arguments
*/
private void reorderArguments(BLangInvocation iExpr) {
BSymbol symbol = iExpr.symbol;
if (symbol == null || symbol.type.tag != TypeTags.INVOKABLE) {
return;
}
BInvokableSymbol invokableSymbol = (BInvokableSymbol) symbol;
List<BLangExpression> restArgs = iExpr.restArgs;
int originalRequiredArgCount = iExpr.requiredArgs.size();
BLangSimpleVarRef varargRef = null;
BLangBlockStmt blockStmt = null;
BType varargVarType = null;
int restArgCount = restArgs.size();
if (restArgCount > 0 &&
restArgs.get(restArgCount - 1).getKind() == NodeKind.REST_ARGS_EXPR &&
originalRequiredArgCount < invokableSymbol.params.size()) {
BLangExpression expr = ((BLangRestArgsExpression) restArgs.get(restArgCount - 1)).expr;
Location varargExpPos = expr.pos;
varargVarType = expr.getBType();
String varargVarName = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varargVarSymbol = new BVarSymbol(0, names.fromString(varargVarName), this.env.scope.owner.pkgID,
varargVarType, this.env.scope.owner, varargExpPos, VIRTUAL);
varargRef = ASTBuilderUtil.createVariableRef(varargExpPos, varargVarSymbol);
BLangSimpleVariable var = createVariable(varargExpPos, varargVarName, varargVarType, expr, varargVarSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(varargExpPos);
varDef.var = var;
varDef.setBType(varargVarType);
blockStmt = createBlockStmt(varargExpPos);
blockStmt.stmts.add(varDef);
}
if (!invokableSymbol.params.isEmpty()) {
reorderNamedArgs(iExpr, invokableSymbol, varargRef);
}
if (restArgCount == 0 || restArgs.get(restArgCount - 1).getKind() != NodeKind.REST_ARGS_EXPR) {
if (invokableSymbol.restParam == null) {
return;
}
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
List<BLangExpression> exprs = new ArrayList<>();
BArrayType arrayType = (BArrayType) invokableSymbol.restParam.type;
BType elemType = arrayType.eType;
for (BLangExpression restArg : restArgs) {
exprs.add(addConversionExprIfRequired(restArg, elemType));
}
arrayLiteral.exprs = exprs;
arrayLiteral.setBType(arrayType);
if (restArgCount != 0) {
iExpr.restArgs = new ArrayList<>();
}
iExpr.restArgs.add(arrayLiteral);
return;
}
if (restArgCount == 1 && restArgs.get(0).getKind() == NodeKind.REST_ARGS_EXPR) {
if (iExpr.requiredArgs.size() == originalRequiredArgCount) {
return;
}
BLangExpression firstNonRestArg = iExpr.requiredArgs.remove(0);
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, firstNonRestArg);
stmtExpression.setBType(firstNonRestArg.getBType());
iExpr.requiredArgs.add(0, stmtExpression);
if (invokableSymbol.restParam == null) {
restArgs.remove(0);
return;
}
BLangRestArgsExpression restArgsExpression = (BLangRestArgsExpression) restArgs.remove(0);
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
if (restArgsExpression.getBType().tag == TypeTags.RECORD) {
BLangExpression expr = ASTBuilderUtil.createEmptyArrayLiteral(invokableSymbol.pos, restParamType);
restArgs.add(expr);
return;
}
Location pos = restArgsExpression.pos;
BLangArrayLiteral newArrayLiteral = createArrayLiteralExprNode();
newArrayLiteral.setBType(restParamType);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID,
restParamType, this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, newArrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangLiteral startIndex = createIntLiteral(invokableSymbol.params.size() - originalRequiredArgCount);
BLangInvocation lengthInvocation = createLengthInvocation(pos, varargRef);
BLangInvocation intRangeInvocation = replaceWithIntRange(pos, startIndex,
getModifiedIntRangeEndExpr(lengthInvocation));
BLangForeach foreach = (BLangForeach) TreeBuilder.createForeachNode();
foreach.pos = pos;
foreach.collection = intRangeInvocation;
types.setForeachTypedBindingPatternType(foreach);
final BLangSimpleVariable foreachVariable = ASTBuilderUtil.createVariable(pos, "$foreach$i",
foreach.varType);
foreachVariable.symbol = new BVarSymbol(0, names.fromIdNode(foreachVariable.name),
this.env.scope.owner.pkgID, foreachVariable.getBType(),
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef foreachVarRef = ASTBuilderUtil.createVariableRef(pos, foreachVariable.symbol);
foreach.variableDefinitionNode = ASTBuilderUtil.createVariableDef(pos, foreachVariable);
foreach.isDeclaredWithVar = true;
BLangBlockStmt foreachBody = ASTBuilderUtil.createBlockStmt(pos);
BLangIndexBasedAccess valueExpr = ASTBuilderUtil.createIndexAccessExpr(varargRef, foreachVarRef);
if (varargVarType.tag == TypeTags.ARRAY) {
BArrayType arrayType = (BArrayType) varargVarType;
if (arrayType.state == BArrayState.CLOSED &&
arrayType.size == (iExpr.requiredArgs.size() - originalRequiredArgCount)) {
valueExpr.setBType(restParamType.eType);
} else {
valueExpr.setBType(arrayType.eType);
}
} else {
valueExpr.setBType(symTable.anyOrErrorType);
}
BLangExpression pushExpr = addConversionExprIfRequired(valueExpr, restParamType.eType);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, foreachBody);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
List.of(pushExpr),
restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
foreach.body = foreachBody;
BLangBlockStmt newArrayBlockStmt = createBlockStmt(pos);
newArrayBlockStmt.addStatement(varDef);
newArrayBlockStmt.addStatement(foreach);
BLangStatementExpression newArrayStmtExpression = createStatementExpression(newArrayBlockStmt, arrayVarRef);
newArrayStmtExpression.setBType(restParamType);
restArgs.add(addConversionExprIfRequired(newArrayStmtExpression, restParamType));
return;
}
BArrayType restParamType = (BArrayType) invokableSymbol.restParam.type;
BLangArrayLiteral arrayLiteral = (BLangArrayLiteral) TreeBuilder.createArrayLiteralExpressionNode();
arrayLiteral.setBType(restParamType);
BType elemType = restParamType.eType;
Location pos = restArgs.get(0).pos;
List<BLangExpression> exprs = new ArrayList<>();
for (int i = 0; i < restArgCount - 1; i++) {
exprs.add(addConversionExprIfRequired(restArgs.get(i), elemType));
}
arrayLiteral.exprs = exprs;
BLangRestArgsExpression pushRestArgsExpr = (BLangRestArgsExpression) TreeBuilder.createVarArgsNode();
pushRestArgsExpr.pos = pos;
pushRestArgsExpr.expr = restArgs.remove(restArgCount - 1);
String name = DESUGARED_VARARG_KEY + UNDERSCORE + this.varargCount++;
BVarSymbol varSymbol = new BVarSymbol(0, names.fromString(name), this.env.scope.owner.pkgID, restParamType,
this.env.scope.owner, pos, VIRTUAL);
BLangSimpleVarRef arrayVarRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
BLangSimpleVariable var = createVariable(pos, name, restParamType, arrayLiteral, varSymbol);
BLangSimpleVariableDef varDef = ASTBuilderUtil.createVariableDef(pos);
varDef.var = var;
varDef.setBType(restParamType);
BLangBlockStmt pushBlockStmt = createBlockStmt(pos);
pushBlockStmt.stmts.add(varDef);
BLangExpressionStmt expressionStmt = createExpressionStmt(pos, pushBlockStmt);
BLangInvocation pushInvocation = createLangLibInvocationNode(PUSH_LANGLIB_METHOD, arrayVarRef,
new ArrayList<BLangExpression>() {{
add(pushRestArgsExpr);
}}, restParamType, pos);
pushInvocation.restArgs.add(pushInvocation.requiredArgs.remove(1));
expressionStmt.expr = pushInvocation;
BLangStatementExpression stmtExpression = createStatementExpression(pushBlockStmt, arrayVarRef);
stmtExpression.setBType(restParamType);
iExpr.restArgs = new ArrayList<BLangExpression>(1) {{ add(stmtExpression); }};
}
private void reorderNamedArgs(BLangInvocation iExpr, BInvokableSymbol invokableSymbol, BLangExpression varargRef) {
List<BLangExpression> args = new ArrayList<>();
Map<String, BLangExpression> namedArgs = new LinkedHashMap<>();
iExpr.requiredArgs.stream()
.filter(expr -> expr.getKind() == NodeKind.NAMED_ARGS_EXPR)
.forEach(expr -> namedArgs.put(((NamedArgNode) expr).getName().value, expr));
List<BVarSymbol> params = invokableSymbol.params;
List<BLangRecordLiteral> incRecordLiterals = new ArrayList<>();
BLangRecordLiteral incRecordParamAllowAdditionalFields = null;
int varargIndex = 0;
BType varargType = null;
boolean tupleTypedVararg = false;
if (varargRef != null) {
varargType = varargRef.getBType();
tupleTypedVararg = varargType.tag == TypeTags.TUPLE;
}
for (int i = 0; i < params.size(); i++) {
BVarSymbol param = params.get(i);
if (iExpr.requiredArgs.size() > i && iExpr.requiredArgs.get(i).getKind() != NodeKind.NAMED_ARGS_EXPR) {
args.add(iExpr.requiredArgs.get(i));
} else if (namedArgs.containsKey(param.name.value)) {
args.add(namedArgs.remove(param.name.value));
} else if (param.getFlags().contains(Flag.INCLUDED)) {
BLangRecordLiteral recordLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
BType paramType = param.type;
recordLiteral.setBType(paramType);
args.add(recordLiteral);
incRecordLiterals.add(recordLiteral);
if (((BRecordType) paramType).restFieldType != symTable.noType) {
incRecordParamAllowAdditionalFields = recordLiteral;
}
} else if (varargRef == null) {
BLangExpression expr = new BLangIgnoreExpr();
expr.setBType(param.type);
args.add(expr);
} else {
if (varargRef.getBType().tag == TypeTags.RECORD) {
if (param.isDefaultable) {
BLangInvocation hasKeyInvocation = createLangLibInvocationNode(HAS_KEY, varargRef,
List.of(createStringLiteral(param.pos, param.name.value)), null, varargRef.pos);
BLangExpression indexExpr = rewriteExpr(createStringLiteral(param.pos, param.name.value));
BLangIndexBasedAccess memberAccessExpr =
ASTBuilderUtil.createMemberAccessExprNode(param.type, varargRef, indexExpr);
BLangExpression ignoreExpr = ASTBuilderUtil.createIgnoreExprNode(param.type);
BLangTernaryExpr ternaryExpr = ASTBuilderUtil.createTernaryExprNode(param.type,
hasKeyInvocation, memberAccessExpr, ignoreExpr);
args.add(ASTBuilderUtil.createDynamicParamExpression(hasKeyInvocation, ternaryExpr));
} else {
BLangFieldBasedAccess fieldBasedAccessExpression =
ASTBuilderUtil.createFieldAccessExpr(varargRef,
ASTBuilderUtil.createIdentifier(param.pos, param.name.value));
fieldBasedAccessExpression.setBType(param.type);
args.add(fieldBasedAccessExpression);
}
} else {
BLangExpression indexExpr = rewriteExpr(createIntLiteral(varargIndex));
BType memberAccessExprType = tupleTypedVararg ?
((BTupleType) varargType).tupleTypes.get(varargIndex) : ((BArrayType) varargType).eType;
args.add(addConversionExprIfRequired(ASTBuilderUtil.createMemberAccessExprNode(memberAccessExprType,
varargRef, indexExpr), param.type));
varargIndex++;
}
}
}
if (namedArgs.size() > 0) {
setFieldsForIncRecordLiterals(namedArgs, incRecordLiterals, incRecordParamAllowAdditionalFields);
}
iExpr.requiredArgs = args;
}
private void setFieldsForIncRecordLiterals(Map<String, BLangExpression> namedArgs,
List<BLangRecordLiteral> incRecordLiterals,
BLangRecordLiteral incRecordParamAllowAdditionalFields) {
for (String name : namedArgs.keySet()) {
boolean isAdditionalField = true;
BLangNamedArgsExpression expr = (BLangNamedArgsExpression) namedArgs.get(name);
for (BLangRecordLiteral recordLiteral : incRecordLiterals) {
LinkedHashMap<String, BField> fields = ((BRecordType) recordLiteral.getBType()).fields;
if (fields.containsKey(name) && fields.get(name).type.tag != TypeTags.NEVER) {
isAdditionalField = false;
createAndAddRecordFieldForIncRecordLiteral(recordLiteral, expr);
break;
}
}
if (isAdditionalField) {
createAndAddRecordFieldForIncRecordLiteral(incRecordParamAllowAdditionalFields, expr);
}
}
}
private void createAndAddRecordFieldForIncRecordLiteral(BLangRecordLiteral recordLiteral,
BLangNamedArgsExpression expr) {
BLangSimpleVarRef varRef = new BLangSimpleVarRef();
varRef.variableName = expr.name;
BLangRecordLiteral.BLangRecordKeyValueField recordKeyValueField = ASTBuilderUtil.
createBLangRecordKeyValue(varRef, expr.expr);
recordLiteral.fields.add(recordKeyValueField);
}
private BLangMatchTypedBindingPatternClause getSafeAssignErrorPattern(Location location,
BSymbol invokableSymbol,
List<BType> equivalentErrorTypes,
boolean isCheckPanicExpr) {
BType enclosingFuncReturnType = ((BInvokableType) invokableSymbol.type).retType;
Set<BType> returnTypeSet = enclosingFuncReturnType.tag == TypeTags.UNION ?
((BUnionType) enclosingFuncReturnType).getMemberTypes() :
new LinkedHashSet<BType>() {{
add(enclosingFuncReturnType);
}};
boolean returnOnError = equivalentErrorTypes.stream()
.allMatch(errorType -> returnTypeSet.stream()
.anyMatch(retType -> types.isAssignable(errorType, retType)));
String patternFailureCaseVarName = GEN_VAR_PREFIX.value + "t_failure";
BLangSimpleVariable patternFailureCaseVar =
ASTBuilderUtil.createVariable(location, patternFailureCaseVarName, symTable.errorType, null,
new BVarSymbol(0, names.fromString(patternFailureCaseVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, location, VIRTUAL));
BLangVariableReference patternFailureCaseVarRef =
ASTBuilderUtil.createVariableRef(location, patternFailureCaseVar.symbol);
BLangBlockStmt patternBlockFailureCase = (BLangBlockStmt) TreeBuilder.createBlockNode();
patternBlockFailureCase.pos = location;
if (!isCheckPanicExpr && (returnOnError || this.onFailClause != null)) {
BLangFail failStmt = (BLangFail) TreeBuilder.createFailNode();
failStmt.pos = location;
failStmt.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(failStmt);
if (returnOnError && this.shouldReturnErrors) {
BLangReturn errorReturn = ASTBuilderUtil.createReturnStmt(location,
rewrite(patternFailureCaseVarRef, env));
errorReturn.desugared = true;
failStmt.exprStmt = errorReturn;
}
} else {
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.pos = location;
panicNode.expr = patternFailureCaseVarRef;
patternBlockFailureCase.stmts.add(panicNode);
}
return ASTBuilderUtil.createMatchStatementPattern(location, patternFailureCaseVar, patternBlockFailureCase);
}
private BLangMatchTypedBindingPatternClause getSafeAssignSuccessPattern(Location location,
BType lhsType, boolean isVarDef, BVarSymbol varSymbol, BLangExpression lhsExpr) {
String patternSuccessCaseVarName = GEN_VAR_PREFIX.value + "t_match";
BLangSimpleVariable patternSuccessCaseVar =
ASTBuilderUtil.createVariable(location, patternSuccessCaseVarName, lhsType, null,
new BVarSymbol(0, names.fromString(patternSuccessCaseVarName),
this.env.scope.owner.pkgID, lhsType,
this.env.scope.owner, location, VIRTUAL));
BLangExpression varRefExpr;
if (isVarDef) {
varRefExpr = ASTBuilderUtil.createVariableRef(location, varSymbol);
} else {
varRefExpr = lhsExpr;
}
BLangVariableReference patternSuccessCaseVarRef = ASTBuilderUtil.createVariableRef(location,
patternSuccessCaseVar.symbol);
BLangAssignment assignmentStmtSuccessCase = ASTBuilderUtil.createAssignmentStmt(location,
varRefExpr, patternSuccessCaseVarRef, false);
BLangBlockStmt patternBlockSuccessCase = ASTBuilderUtil.createBlockStmt(location,
new ArrayList<BLangStatement>() {{
add(assignmentStmtSuccessCase);
}});
return ASTBuilderUtil.createMatchStatementPattern(location,
patternSuccessCaseVar, patternBlockSuccessCase);
}
private BLangStatement generateIfElseStmt(BLangMatch matchStmt, BLangSimpleVariable matchExprVar) {
List<BLangMatchBindingPatternClause> patterns = matchStmt.patternClauses;
BLangIf parentIfNode = generateIfElseStmt(patterns.get(0), matchExprVar);
BLangIf currentIfNode = parentIfNode;
for (int i = 1; i < patterns.size(); i++) {
BLangMatchBindingPatternClause patternClause = patterns.get(i);
if (i == patterns.size() - 1 && patternClause.isLastPattern) {
currentIfNode.elseStmt = getMatchPatternElseBody(patternClause, matchExprVar);
} else {
currentIfNode.elseStmt = generateIfElseStmt(patternClause, matchExprVar);
currentIfNode = (BLangIf) currentIfNode.elseStmt;
}
}
return parentIfNode;
}
/**
* Generate an if-else statement from the given match statement.
*
* @param pattern match pattern statement node
* @param matchExprVar variable node of the match expression
* @return if else statement node
*/
private BLangIf generateIfElseStmt(BLangMatchBindingPatternClause pattern, BLangSimpleVariable matchExprVar) {
BLangExpression ifCondition = createPatternIfCondition(pattern, matchExprVar.symbol);
if (NodeKind.MATCH_TYPED_PATTERN_CLAUSE == pattern.getKind()) {
BLangBlockStmt patternBody = getMatchPatternBody(pattern, matchExprVar);
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, patternBody, null);
}
BType expectedType = matchExprVar.getBType();
if (pattern.getKind() == NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE) {
BLangMatchStructuredBindingPatternClause matchPattern = (BLangMatchStructuredBindingPatternClause) pattern;
expectedType = getStructuredBindingPatternType(matchPattern.bindingPatternVariable);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
BLangSimpleVariableDef varDef = forceCastIfApplicable(matchExprVar.symbol, pattern.pos, expectedType);
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, varDef.var.symbol);
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
if (structuredPattern.typeGuardExpr != null) {
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(structuredPattern.pos);
blockStmt.addStatement(varDef);
blockStmt.addStatement(varDefStmt);
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt,
structuredPattern.typeGuardExpr);
stmtExpr.setBType(symTable.booleanType);
ifCondition = ASTBuilderUtil
.createBinaryExpr(pattern.pos, ifCondition, stmtExpr, symTable.booleanType, OperatorKind.AND,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.AND, symTable.booleanType,
symTable.booleanType));
} else {
structuredPattern.body.stmts.add(0, varDef);
structuredPattern.body.stmts.add(1, varDefStmt);
}
}
return ASTBuilderUtil.createIfElseStmt(pattern.pos, ifCondition, pattern.body, null);
}
private BLangBlockStmt getMatchPatternBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body;
BLangMatchTypedBindingPatternClause patternClause = (BLangMatchTypedBindingPatternClause) pattern;
if (patternClause.variable.name.value.equals(Names.IGNORE.value)) {
return patternClause.body;
}
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(patternClause.pos,
matchExprVar.symbol);
BLangExpression patternVarExpr = addConversionExprIfRequired(matchExprVarRef,
patternClause.variable.getBType());
BLangSimpleVariable patternVar =
ASTBuilderUtil.createVariable(patternClause.pos, "", patternClause.variable.getBType(),
patternVarExpr, patternClause.variable.symbol);
BLangSimpleVariableDef patternVarDef = ASTBuilderUtil.createVariableDef(patternVar.pos, patternVar);
patternClause.body.stmts.add(0, patternVarDef);
body = patternClause.body;
return body;
}
private BLangBlockStmt getMatchPatternElseBody(BLangMatchBindingPatternClause pattern,
BLangSimpleVariable matchExprVar) {
BLangBlockStmt body = pattern.body;
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == pattern.getKind()) {
BLangSimpleVarRef matchExprVarRef = ASTBuilderUtil.createVariableRef(pattern.pos, matchExprVar.symbol);
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) pattern;
structuredPattern.bindingPatternVariable.expr = matchExprVarRef;
BLangStatement varDefStmt;
if (NodeKind.TUPLE_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createTupleVariableDef(pattern.pos,
(BLangTupleVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.RECORD_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createRecordVariableDef(pattern.pos,
(BLangRecordVariable) structuredPattern.bindingPatternVariable);
} else if (NodeKind.ERROR_VARIABLE == structuredPattern.bindingPatternVariable.getKind()) {
varDefStmt = ASTBuilderUtil.createErrorVariableDef(pattern.pos,
(BLangErrorVariable) structuredPattern.bindingPatternVariable);
} else {
varDefStmt = ASTBuilderUtil
.createVariableDef(pattern.pos, (BLangSimpleVariable) structuredPattern.bindingPatternVariable);
}
structuredPattern.body.stmts.add(0, varDefStmt);
body = structuredPattern.body;
}
return body;
}
BLangExpression addConversionExprIfRequired(BLangExpression expr, BType lhsType) {
if (lhsType.tag == TypeTags.NONE) {
return expr;
}
BType rhsType = expr.getBType();
if (types.isSameType(rhsType, lhsType)) {
return expr;
}
types.setImplicitCastExpr(expr, rhsType, lhsType);
if (expr.impConversionExpr != null) {
return expr;
}
if (lhsType.tag == TypeTags.JSON && rhsType.tag == TypeTags.NIL) {
return expr;
}
if (lhsType.tag == TypeTags.NIL && rhsType.isNullable()) {
return expr;
}
if (lhsType.tag == TypeTags.ARRAY && rhsType.tag == TypeTags.TUPLE) {
return expr;
}
BLangTypeConversionExpr conversionExpr = (BLangTypeConversionExpr)
TreeBuilder.createTypeConversionNode();
conversionExpr.expr = expr;
conversionExpr.targetType = lhsType;
conversionExpr.setBType(lhsType);
conversionExpr.pos = expr.pos;
conversionExpr.checkTypes = false;
conversionExpr.internal = true;
return conversionExpr;
}
private BLangExpression createPatternIfCondition(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol) {
BType patternType;
switch (patternClause.getKind()) {
case MATCH_STATIC_PATTERN_CLAUSE:
BLangMatchStaticBindingPatternClause staticPattern =
(BLangMatchStaticBindingPatternClause) patternClause;
patternType = staticPattern.literal.getBType();
break;
case MATCH_STRUCTURED_PATTERN_CLAUSE:
BLangMatchStructuredBindingPatternClause structuredPattern =
(BLangMatchStructuredBindingPatternClause) patternClause;
patternType = getStructuredBindingPatternType(structuredPattern.bindingPatternVariable);
break;
default:
BLangMatchTypedBindingPatternClause simplePattern = (BLangMatchTypedBindingPatternClause) patternClause;
patternType = simplePattern.variable.getBType();
break;
}
BLangExpression binaryExpr;
BType[] memberTypes;
if (patternType.tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) patternType;
memberTypes = unionType.getMemberTypes().toArray(new BType[0]);
} else {
memberTypes = new BType[1];
memberTypes[0] = patternType;
}
if (memberTypes.length == 1) {
binaryExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
} else {
BLangExpression lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[0]);
BLangExpression rhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[1]);
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.getBType(), rhsExpr.getBType()));
for (int i = 2; i < memberTypes.length; i++) {
lhsExpr = createPatternMatchBinaryExpr(patternClause, varSymbol, memberTypes[i]);
rhsExpr = binaryExpr;
binaryExpr = ASTBuilderUtil.createBinaryExpr(patternClause.pos, lhsExpr, rhsExpr,
symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.OR,
lhsExpr.getBType(), rhsExpr.getBType()));
}
}
return binaryExpr;
}
private BType getStructuredBindingPatternType(BLangVariable bindingPatternVariable) {
if (NodeKind.TUPLE_VARIABLE == bindingPatternVariable.getKind()) {
BLangTupleVariable tupleVariable = (BLangTupleVariable) bindingPatternVariable;
List<BType> memberTypes = new ArrayList<>();
for (int i = 0; i < tupleVariable.memberVariables.size(); i++) {
memberTypes.add(getStructuredBindingPatternType(tupleVariable.memberVariables.get(i)));
}
BTupleType tupleType = new BTupleType(memberTypes);
if (tupleVariable.restVariable != null) {
BArrayType restArrayType = (BArrayType) getStructuredBindingPatternType(tupleVariable.restVariable);
tupleType.restType = restArrayType.eType;
}
return tupleType;
}
if (NodeKind.RECORD_VARIABLE == bindingPatternVariable.getKind()) {
BLangRecordVariable recordVariable = (BLangRecordVariable) bindingPatternVariable;
BRecordTypeSymbol recordSymbol =
Symbols.createRecordSymbol(0, names.fromString("$anonRecordType$" + UNDERSCORE + recordCount++),
env.enclPkg.symbol.pkgID, null, env.scope.owner, recordVariable.pos,
VIRTUAL);
recordSymbol.initializerFunc = createRecordInitFunc();
recordSymbol.scope = new Scope(recordSymbol);
recordSymbol.scope.define(
names.fromString(recordSymbol.name.value + "." + recordSymbol.initializerFunc.funcName.value),
recordSymbol.initializerFunc.symbol);
LinkedHashMap<String, BField> fields = new LinkedHashMap<>();
List<BLangSimpleVariable> typeDefFields = new ArrayList<>();
for (int i = 0; i < recordVariable.variableList.size(); i++) {
String fieldNameStr = recordVariable.variableList.get(i).key.value;
Name fieldName = names.fromString(fieldNameStr);
BType fieldType = getStructuredBindingPatternType(
recordVariable.variableList.get(i).valueBindingPattern);
BVarSymbol fieldSymbol = new BVarSymbol(Flags.REQUIRED, fieldName, env.enclPkg.symbol.pkgID, fieldType,
recordSymbol, bindingPatternVariable.pos, VIRTUAL);
fields.put(fieldName.value, new BField(fieldName, bindingPatternVariable.pos, fieldSymbol));
typeDefFields.add(ASTBuilderUtil.createVariable(null, fieldNameStr, fieldType, null, fieldSymbol));
recordSymbol.scope.define(fieldName, fieldSymbol);
}
BRecordType recordVarType = new BRecordType(recordSymbol);
recordVarType.fields = fields;
recordVarType.restFieldType = recordVariable.restParam != null ?
((BRecordType) ((BLangSimpleVariable) recordVariable.restParam).getBType()).restFieldType :
symTable.anydataType;
recordSymbol.type = recordVarType;
recordVarType.tsymbol = recordSymbol;
BLangRecordTypeNode recordTypeNode = TypeDefBuilderHelper.createRecordTypeNode(typeDefFields,
recordVarType,
bindingPatternVariable.pos);
recordTypeNode.initFunction =
rewrite(TypeDefBuilderHelper.createInitFunctionForRecordType(recordTypeNode, env, names, symTable),
env);
TypeDefBuilderHelper.addTypeDefinition(recordVarType, recordSymbol, recordTypeNode, env);
return recordVarType;
}
if (NodeKind.ERROR_VARIABLE == bindingPatternVariable.getKind()) {
BLangErrorVariable errorVariable = (BLangErrorVariable) bindingPatternVariable;
BErrorTypeSymbol errorTypeSymbol = new BErrorTypeSymbol(
SymTag.ERROR,
Flags.PUBLIC,
names.fromString("$anonErrorType$" + UNDERSCORE + errorCount++),
env.enclPkg.symbol.pkgID,
null, null, errorVariable.pos, VIRTUAL);
BType detailType;
if ((errorVariable.detail == null || errorVariable.detail.isEmpty()) && errorVariable.restDetail != null) {
detailType = symTable.detailType;
} else {
detailType = createDetailType(errorVariable.detail, errorVariable.restDetail, errorCount++,
errorVariable.pos);
BLangRecordTypeNode recordTypeNode = createRecordTypeNode(errorVariable, (BRecordType) detailType);
recordTypeNode.initFunction = TypeDefBuilderHelper
.createInitFunctionForRecordType(recordTypeNode, env, names, symTable);
TypeDefBuilderHelper.addTypeDefinition(detailType, detailType.tsymbol, recordTypeNode, env);
}
BErrorType errorType = new BErrorType(errorTypeSymbol, detailType);
errorTypeSymbol.type = errorType;
TypeDefBuilderHelper.addTypeDefinition(errorType, errorTypeSymbol, createErrorTypeNode(errorType), env);
return errorType;
}
return bindingPatternVariable.getBType();
}
private BLangRecordTypeNode createRecordTypeNode(BLangErrorVariable errorVariable, BRecordType detailType) {
List<BLangSimpleVariable> fieldList = new ArrayList<>();
for (BLangErrorVariable.BLangErrorDetailEntry field : errorVariable.detail) {
BVarSymbol symbol = field.valueBindingPattern.symbol;
if (symbol == null) {
symbol = new BVarSymbol(Flags.PUBLIC, names.fromString(field.key.value + "$"),
this.env.enclPkg.packageID, symTable.pureType, null,
field.valueBindingPattern.pos, VIRTUAL);
}
BLangSimpleVariable fieldVar = ASTBuilderUtil.createVariable(
field.valueBindingPattern.pos,
symbol.name.value,
field.valueBindingPattern.getBType(),
field.valueBindingPattern.expr,
symbol);
fieldList.add(fieldVar);
}
return TypeDefBuilderHelper.createRecordTypeNode(fieldList, detailType, errorVariable.pos);
}
private BType createDetailType(List<BLangErrorVariable.BLangErrorDetailEntry> detail,
BLangSimpleVariable restDetail, int errorNo, Location pos) {
BRecordType detailRecordType = createAnonRecordType(pos);
if (restDetail == null) {
detailRecordType.sealed = true;
}
for (BLangErrorVariable.BLangErrorDetailEntry detailEntry : detail) {
Name fieldName = names.fromIdNode(detailEntry.key);
BType fieldType = getStructuredBindingPatternType(detailEntry.valueBindingPattern);
BVarSymbol fieldSym = new BVarSymbol(Flags.PUBLIC, fieldName, detailRecordType.tsymbol.pkgID, fieldType,
detailRecordType.tsymbol, detailEntry.key.pos, VIRTUAL);
detailRecordType.fields.put(fieldName.value, new BField(fieldName, detailEntry.key.pos, fieldSym));
detailRecordType.tsymbol.scope.define(fieldName, fieldSym);
}
return detailRecordType;
}
private BRecordType createAnonRecordType(Location pos) {
BRecordTypeSymbol detailRecordTypeSymbol = new BRecordTypeSymbol(
SymTag.RECORD,
Flags.PUBLIC,
names.fromString(anonModelHelper.getNextRecordVarKey(env.enclPkg.packageID)),
env.enclPkg.symbol.pkgID, null, null, pos, VIRTUAL);
detailRecordTypeSymbol.initializerFunc = createRecordInitFunc();
detailRecordTypeSymbol.scope = new Scope(detailRecordTypeSymbol);
detailRecordTypeSymbol.scope.define(
names.fromString(detailRecordTypeSymbol.name.value + "." +
detailRecordTypeSymbol.initializerFunc.funcName.value),
detailRecordTypeSymbol.initializerFunc.symbol);
BRecordType detailRecordType = new BRecordType(detailRecordTypeSymbol);
detailRecordType.restFieldType = symTable.anydataType;
return detailRecordType;
}
private BAttachedFunction createRecordInitFunc() {
BInvokableType bInvokableType = new BInvokableType(new ArrayList<>(), symTable.nilType, null);
BInvokableSymbol initFuncSymbol = Symbols.createFunctionSymbol(
Flags.PUBLIC, Names.EMPTY, Names.EMPTY, env.enclPkg.symbol.pkgID, bInvokableType, env.scope.owner,
false, symTable.builtinPos, VIRTUAL);
initFuncSymbol.retType = symTable.nilType;
return new BAttachedFunction(Names.INIT_FUNCTION_SUFFIX, initFuncSymbol, bInvokableType, symTable.builtinPos);
}
BLangErrorType createErrorTypeNode(BErrorType errorType) {
BLangErrorType errorTypeNode = (BLangErrorType) TreeBuilder.createErrorTypeNode();
errorTypeNode.setBType(errorType);
return errorTypeNode;
}
private BLangExpression createPatternMatchBinaryExpr(BLangMatchBindingPatternClause patternClause,
BVarSymbol varSymbol, BType patternType) {
Location pos = patternClause.pos;
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
if (NodeKind.MATCH_STATIC_PATTERN_CLAUSE == patternClause.getKind()) {
BLangMatchStaticBindingPatternClause pattern = (BLangMatchStaticBindingPatternClause) patternClause;
return createBinaryExpression(pos, varRef, pattern.literal);
}
if (NodeKind.MATCH_STRUCTURED_PATTERN_CLAUSE == patternClause.getKind()) {
return createIsLikeExpression(pos, ASTBuilderUtil.createVariableRef(pos, varSymbol), patternType);
}
if (patternType == symTable.nilType) {
BLangLiteral bLangLiteral = ASTBuilderUtil.createLiteral(pos, symTable.nilType, null);
return ASTBuilderUtil.createBinaryExpr(pos, varRef, bLangLiteral, symTable.booleanType,
OperatorKind.EQUAL, (BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.EQUAL,
symTable.anyType, symTable.nilType));
} else {
return createIsAssignableExpression(pos, varSymbol, patternType);
}
}
private BLangExpression createBinaryExpression(Location pos, BLangSimpleVarRef varRef,
BLangExpression expression) {
BLangBinaryExpr binaryExpr;
if (NodeKind.GROUP_EXPR == expression.getKind()) {
return createBinaryExpression(pos, varRef, ((BLangGroupExpr) expression).expression);
}
if (NodeKind.BINARY_EXPR == expression.getKind()) {
binaryExpr = (BLangBinaryExpr) expression;
BLangExpression lhsExpr = createBinaryExpression(pos, varRef, binaryExpr.lhsExpr);
BLangExpression rhsExpr = createBinaryExpression(pos, varRef, binaryExpr.rhsExpr);
binaryExpr = ASTBuilderUtil.createBinaryExpr(pos, lhsExpr, rhsExpr, symTable.booleanType, OperatorKind.OR,
(BOperatorSymbol) symResolver
.resolveBinaryOperator(OperatorKind.OR, symTable.booleanType, symTable.booleanType));
} else if (expression.getKind() == NodeKind.SIMPLE_VARIABLE_REF
&& ((BLangSimpleVarRef) expression).variableName.value.equals(IGNORE.value)) {
BLangValueType anyType = (BLangValueType) TreeBuilder.createValueTypeNode();
anyType.setBType(symTable.anyType);
anyType.typeKind = TypeKind.ANY;
return ASTBuilderUtil.createTypeTestExpr(pos, varRef, anyType);
} else {
binaryExpr = ASTBuilderUtil
.createBinaryExpr(pos, varRef, expression, symTable.booleanType, OperatorKind.EQUAL, null);
BSymbol opSymbol = symResolver.resolveBinaryOperator(OperatorKind.EQUAL, varRef.getBType(),
expression.getBType());
if (opSymbol == symTable.notFoundSymbol) {
opSymbol = symResolver
.getBinaryEqualityForTypeSets(OperatorKind.EQUAL, symTable.anydataType, expression.getBType(),
binaryExpr, env);
}
binaryExpr.opSymbol = (BOperatorSymbol) opSymbol;
}
return binaryExpr;
}
private BLangIsAssignableExpr createIsAssignableExpression(Location pos,
BVarSymbol varSymbol,
BType patternType) {
BLangSimpleVarRef varRef = ASTBuilderUtil.createVariableRef(pos, varSymbol);
return ASTBuilderUtil.createIsAssignableExpr(pos, varRef, patternType, symTable.booleanType, names,
symTable.builtinPos);
}
private BLangIsLikeExpr createIsLikeExpression(Location pos, BLangExpression expr, BType type) {
return ASTBuilderUtil.createIsLikeExpr(pos, expr, ASTBuilderUtil.createTypeNode(type), symTable.booleanType);
}
private BLangAssignment createAssignmentStmt(BLangSimpleVariable variable) {
BLangSimpleVarRef varRef = (BLangSimpleVarRef) TreeBuilder.createSimpleVariableReferenceNode();
varRef.pos = variable.pos;
varRef.variableName = variable.name;
varRef.symbol = variable.symbol;
varRef.setBType(variable.getBType());
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = variable.expr;
assignmentStmt.pos = variable.pos;
assignmentStmt.setVariable(varRef);
return assignmentStmt;
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangSimpleVariable variable,
BVarSymbol selfSymbol) {
return createStructFieldUpdate(function, variable.expr, variable.symbol, variable.getBType(), selfSymbol,
variable.name);
}
private BLangAssignment createStructFieldUpdate(BLangFunction function, BLangExpression expr,
BVarSymbol fieldSymbol, BType fieldType, BVarSymbol selfSymbol,
BLangIdentifier fieldName) {
BLangSimpleVarRef selfVarRef = ASTBuilderUtil.createVariableRef(function.pos, selfSymbol);
BLangFieldBasedAccess fieldAccess = ASTBuilderUtil.createFieldAccessExpr(selfVarRef, fieldName);
fieldAccess.symbol = fieldSymbol;
fieldAccess.setBType(fieldType);
fieldAccess.isStoreOnCreation = true;
BLangAssignment assignmentStmt = (BLangAssignment) TreeBuilder.createAssignmentNode();
assignmentStmt.expr = expr;
assignmentStmt.pos = function.pos;
assignmentStmt.setVariable(fieldAccess);
SymbolEnv initFuncEnv = SymbolEnv.createFunctionEnv(function, function.symbol.scope, env);
return rewrite(assignmentStmt, initFuncEnv);
}
private void addMatchExprDefaultCase(BLangMatchExpression bLangMatchExpression) {
List<BType> exprTypes;
List<BType> unmatchedTypes = new ArrayList<>();
if (bLangMatchExpression.expr.getBType().tag == TypeTags.UNION) {
BUnionType unionType = (BUnionType) bLangMatchExpression.expr.getBType();
exprTypes = new ArrayList<>(unionType.getMemberTypes());
} else {
exprTypes = Lists.of(bLangMatchExpression.getBType());
}
for (BType type : exprTypes) {
boolean assignable = false;
for (BLangMatchExprPatternClause pattern : bLangMatchExpression.patternClauses) {
if (this.types.isAssignable(type, pattern.variable.getBType())) {
assignable = true;
break;
}
}
if (!assignable) {
unmatchedTypes.add(type);
}
}
if (unmatchedTypes.isEmpty()) {
return;
}
BType defaultPatternType;
if (unmatchedTypes.size() == 1) {
defaultPatternType = unmatchedTypes.get(0);
} else {
defaultPatternType = BUnionType.create(null, new LinkedHashSet<>(unmatchedTypes));
}
String patternCaseVarName = GEN_VAR_PREFIX.value + "t_match_default";
BLangSimpleVariable patternMatchCaseVar =
ASTBuilderUtil.createVariable(bLangMatchExpression.pos, patternCaseVarName, defaultPatternType, null,
new BVarSymbol(0, names.fromString(patternCaseVarName),
this.env.scope.owner.pkgID, defaultPatternType,
this.env.scope.owner, bLangMatchExpression.pos, VIRTUAL));
BLangMatchExprPatternClause defaultPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
defaultPattern.variable = patternMatchCaseVar;
defaultPattern.expr = ASTBuilderUtil.createVariableRef(bLangMatchExpression.pos, patternMatchCaseVar.symbol);
defaultPattern.pos = bLangMatchExpression.pos;
bLangMatchExpression.patternClauses.add(defaultPattern);
}
private boolean safeNavigate(BLangAccessExpression accessExpr) {
if (accessExpr.isLValue || accessExpr.expr == null) {
return false;
}
if (accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation) {
return true;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR ||
kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
return safeNavigate((BLangAccessExpression) accessExpr.expr);
}
return false;
}
private BLangExpression rewriteSafeNavigationExpr(BLangAccessExpression accessExpr) {
BType originalExprType = accessExpr.getBType();
String matchTempResultVarName = GEN_VAR_PREFIX.value + "temp_result";
BLangSimpleVariable tempResultVar =
ASTBuilderUtil.createVariable(accessExpr.pos, matchTempResultVarName, accessExpr.getBType(), null,
new BVarSymbol(0, names.fromString(matchTempResultVarName),
this.env.scope.owner.pkgID, accessExpr.getBType(),
this.env.scope.owner, accessExpr.pos, VIRTUAL));
BLangSimpleVariableDef tempResultVarDef = ASTBuilderUtil.createVariableDef(accessExpr.pos, tempResultVar);
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
handleSafeNavigation(accessExpr, accessExpr.getBType(), tempResultVar);
BLangMatch matcEXpr = this.matchStmtStack.firstElement();
BLangBlockStmt blockStmt =
ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(tempResultVarDef, matcEXpr));
BLangStatementExpression stmtExpression = createStatementExpression(blockStmt, tempResultVarRef);
stmtExpression.setBType(originalExprType);
this.matchStmtStack = new Stack<>();
this.accessExprStack = new Stack<>();
this.successPattern = null;
this.safeNavigationAssignment = null;
return stmtExpression;
}
private void handleSafeNavigation(BLangAccessExpression accessExpr, BType type, BLangSimpleVariable tempResultVar) {
if (accessExpr.expr == null) {
return;
}
NodeKind kind = accessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
handleSafeNavigation((BLangAccessExpression) accessExpr.expr, type, tempResultVar);
}
if (!(accessExpr.errorSafeNavigation || accessExpr.nilSafeNavigation)) {
BType originalType = accessExpr.originalType;
if (TypeTags.isXMLTypeTag(originalType.tag)) {
accessExpr.setBType(BUnionType.create(null, originalType, symTable.errorType));
} else {
accessExpr.setBType(originalType);
}
if (this.safeNavigationAssignment != null) {
this.safeNavigationAssignment.expr = addConversionExprIfRequired(accessExpr, tempResultVar.getBType());
}
return;
}
/*
* If the field access is a safe navigation, create a match expression.
* Then chain the current expression as the success-pattern of the parent
* match expr, if available.
* eg:
* x but { <--- parent match expr
* error e => e,
* T t => t.y but { <--- current expr
* error e => e,
* R r => r.z
* }
* }
*/
BLangMatch matchStmt = ASTBuilderUtil.createMatchStatement(accessExpr.pos, accessExpr.expr, new ArrayList<>());
boolean isAllTypesRecords = false;
LinkedHashSet<BType> memTypes = new LinkedHashSet<>();
if (accessExpr.expr.getBType().tag == TypeTags.UNION) {
memTypes = new LinkedHashSet<>(((BUnionType) accessExpr.expr.getBType()).getMemberTypes());
isAllTypesRecords = isAllTypesAreRecordsInUnion(memTypes);
}
if (accessExpr.nilSafeNavigation) {
matchStmt.patternClauses.add(getMatchNullPattern(accessExpr, tempResultVar));
matchStmt.setBType(type);
memTypes.remove(symTable.nilType);
}
if (accessExpr.errorSafeNavigation) {
matchStmt.patternClauses.add(getMatchErrorPattern(accessExpr, tempResultVar));
matchStmt.setBType(type);
matchStmt.pos = accessExpr.pos;
memTypes.remove(symTable.errorType);
}
BLangMatchTypedBindingPatternClause successPattern = null;
Name field = getFieldName(accessExpr);
if (field == Names.EMPTY) {
successPattern = getSuccessPattern(accessExpr.expr.getBType(), accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
pushToMatchStatementStack(matchStmt, accessExpr, successPattern);
return;
}
if (isAllTypesRecords) {
for (BType memberType : memTypes) {
BRecordType recordType = (BRecordType) memberType;
if (recordType.fields.containsKey(field.value) || !recordType.sealed) {
successPattern = getSuccessPattern(memberType, accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
}
}
matchStmt.patternClauses.add(getMatchAllAndNilReturnPattern(accessExpr, tempResultVar));
pushToMatchStatementStack(matchStmt, accessExpr, successPattern);
return;
}
successPattern =
getSuccessPattern(accessExpr.expr.getBType(), accessExpr, tempResultVar,
accessExpr.errorSafeNavigation);
matchStmt.patternClauses.add(successPattern);
pushToMatchStatementStack(matchStmt, accessExpr, successPattern);
}
private void pushToMatchStatementStack(BLangMatch matchStmt, BLangAccessExpression accessExpr,
BLangMatchTypedBindingPatternClause successPattern) {
this.matchStmtStack.push(matchStmt);
if (this.successPattern != null) {
this.successPattern.body = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(matchStmt));
}
this.successPattern = successPattern;
}
private Name getFieldName(BLangAccessExpression accessExpr) {
Name field = Names.EMPTY;
if (accessExpr.getKind() == NodeKind.FIELD_BASED_ACCESS_EXPR) {
field = new Name(((BLangFieldBasedAccess) accessExpr).field.value);
} else if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
BLangExpression indexBasedExpression = ((BLangIndexBasedAccess) accessExpr).indexExpr;
if (indexBasedExpression.getKind() == NodeKind.LITERAL) {
field = new Name(((BLangLiteral) indexBasedExpression).value.toString());
}
}
return field;
}
private boolean isAllTypesAreRecordsInUnion(LinkedHashSet<BType> memTypes) {
for (BType memType : memTypes) {
int typeTag = memType.tag;
if (typeTag != TypeTags.RECORD && typeTag != TypeTags.ERROR && typeTag != TypeTags.NIL) {
return false;
}
}
return true;
}
private BLangMatchTypedBindingPatternClause getMatchErrorPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String errorPatternVarName = GEN_VAR_PREFIX.value + "t_match_error";
BLangSimpleVariable errorPatternVar =
ASTBuilderUtil.createVariable(expr.pos, errorPatternVarName, symTable.errorType, null,
new BVarSymbol(0, names.fromString(errorPatternVarName),
this.env.scope.owner.pkgID, symTable.errorType,
this.env.scope.owner, expr.pos, VIRTUAL));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, errorPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause errorPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, errorPatternVar, patternBody);
return errorPattern;
}
private BLangMatchExprPatternClause getMatchNullPatternGivenExpression(Location pos,
BLangExpression expr) {
String nullPatternVarName = IGNORE.toString();
BLangSimpleVariable errorPatternVar =
ASTBuilderUtil.createVariable(pos, nullPatternVarName, symTable.nilType, null,
new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType,
this.env.scope.owner, pos, VIRTUAL));
BLangMatchExprPatternClause nullPattern =
(BLangMatchExprPatternClause) TreeBuilder.createMatchExpressionPattern();
nullPattern.variable = errorPatternVar;
nullPattern.expr = expr;
nullPattern.pos = pos;
return nullPattern;
}
private BLangMatchTypedBindingPatternClause getMatchNullPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
String nullPatternVarName = GEN_VAR_PREFIX.value + "t_match_null";
BLangSimpleVariable nullPatternVar =
ASTBuilderUtil.createVariable(expr.pos, nullPatternVarName, symTable.nilType, null,
new BVarSymbol(0, names.fromString(nullPatternVarName),
this.env.scope.owner.pkgID, symTable.nilType,
this.env.scope.owner, expr.pos, VIRTUAL));
BLangSimpleVarRef assignmentRhsExpr = ASTBuilderUtil.createVariableRef(expr.pos, nullPatternVar.symbol);
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause nullPattern = ASTBuilderUtil
.createMatchStatementPattern(expr.pos, nullPatternVar, patternBody);
return nullPattern;
}
private BLangMatchStaticBindingPatternClause getMatchAllAndNilReturnPattern(BLangExpression expr,
BLangSimpleVariable tempResultVar) {
BLangVariableReference tempResultVarRef = ASTBuilderUtil.createVariableRef(expr.pos, tempResultVar.symbol);
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(expr.pos, tempResultVarRef, createLiteral(expr.pos,
symTable.nilType, Names.NIL_VALUE), false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(expr.pos, Lists.of(assignmentStmt));
BLangMatchStaticBindingPatternClause matchAllPattern =
(BLangMatchStaticBindingPatternClause) TreeBuilder.createMatchStatementStaticBindingPattern();
String matchAllVarName = "_";
matchAllPattern.literal =
ASTBuilderUtil.createVariableRef(expr.pos, new BVarSymbol(0, names.fromString(matchAllVarName),
this.env.scope.owner.pkgID, symTable.anyType,
this.env.scope.owner, expr.pos, VIRTUAL));
matchAllPattern.body = patternBody;
return matchAllPattern;
}
private BLangMatchTypedBindingPatternClause getSuccessPattern(BType type, BLangAccessExpression accessExpr,
BLangSimpleVariable tempResultVar,
boolean liftError) {
type = types.getSafeType(type, true, liftError);
String successPatternVarName = GEN_VAR_PREFIX.value + "t_match_success";
BVarSymbol successPatternSymbol;
if (type.tag == TypeTags.INVOKABLE) {
successPatternSymbol = new BInvokableSymbol(SymTag.VARIABLE, 0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner,
accessExpr.pos, VIRTUAL);
} else {
successPatternSymbol = new BVarSymbol(0, names.fromString(successPatternVarName),
this.env.scope.owner.pkgID, type, this.env.scope.owner,
accessExpr.pos, VIRTUAL);
}
BLangSimpleVariable successPatternVar = ASTBuilderUtil.createVariable(accessExpr.pos, successPatternVarName,
type, null, successPatternSymbol);
BLangAccessExpression tempAccessExpr = nodeCloner.cloneNode(accessExpr);
if (accessExpr.getKind() == NodeKind.INDEX_BASED_ACCESS_EXPR) {
((BLangIndexBasedAccess) tempAccessExpr).indexExpr = ((BLangIndexBasedAccess) accessExpr).indexExpr;
}
if (accessExpr instanceof BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) {
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) tempAccessExpr).nsSymbol =
((BLangFieldBasedAccess.BLangNSPrefixedFieldBasedAccess) accessExpr).nsSymbol;
}
tempAccessExpr.expr = ASTBuilderUtil.createVariableRef(accessExpr.pos, successPatternVar.symbol);
tempAccessExpr.errorSafeNavigation = false;
tempAccessExpr.nilSafeNavigation = false;
accessExpr.cloneRef = null;
if (TypeTags.isXMLTypeTag(tempAccessExpr.expr.getBType().tag)) {
tempAccessExpr.setBType(BUnionType.create(null, accessExpr.originalType, symTable.errorType,
symTable.nilType));
} else {
tempAccessExpr.setBType(accessExpr.originalType);
}
tempAccessExpr.optionalFieldAccess = accessExpr.optionalFieldAccess;
BLangVariableReference tempResultVarRef =
ASTBuilderUtil.createVariableRef(accessExpr.pos, tempResultVar.symbol);
BLangExpression assignmentRhsExpr = addConversionExprIfRequired(tempAccessExpr, tempResultVarRef.getBType());
BLangAssignment assignmentStmt =
ASTBuilderUtil.createAssignmentStmt(accessExpr.pos, tempResultVarRef, assignmentRhsExpr, false);
BLangBlockStmt patternBody = ASTBuilderUtil.createBlockStmt(accessExpr.pos, Lists.of(assignmentStmt));
BLangMatchTypedBindingPatternClause successPattern =
ASTBuilderUtil.createMatchStatementPattern(accessExpr.pos, successPatternVar, patternBody);
this.safeNavigationAssignment = assignmentStmt;
return successPattern;
}
private boolean safeNavigateLHS(BLangExpression expr) {
if (expr.getKind() != NodeKind.FIELD_BASED_ACCESS_EXPR && expr.getKind() != NodeKind.INDEX_BASED_ACCESS_EXPR) {
return false;
}
BLangExpression varRef = ((BLangAccessExpression) expr).expr;
if (varRef.getBType().isNullable()) {
return true;
}
return safeNavigateLHS(varRef);
}
private BLangStatement rewriteSafeNavigationAssignment(BLangAccessExpression accessExpr, BLangExpression rhsExpr,
boolean safeAssignment) {
this.accessExprStack = new Stack<>();
List<BLangStatement> stmts = new ArrayList<>();
createLHSSafeNavigation(stmts, accessExpr.expr);
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(accessExpr.pos,
cloneExpression(accessExpr), rhsExpr);
stmts.add(assignment);
return ASTBuilderUtil.createBlockStmt(accessExpr.pos, stmts);
}
private void createLHSSafeNavigation(List<BLangStatement> stmts, BLangExpression expr) {
NodeKind kind = expr.getKind();
boolean root = false;
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR ||
kind == NodeKind.INVOCATION) {
BLangAccessExpression accessExpr = (BLangAccessExpression) expr;
createLHSSafeNavigation(stmts, accessExpr.expr);
accessExpr.expr = accessExprStack.pop();
} else {
root = true;
}
if (expr.getKind() == NodeKind.INVOCATION) {
BLangInvocation invocation = (BLangInvocation) expr;
BVarSymbol interMediateSymbol = new BVarSymbol(0,
names.fromString(GEN_VAR_PREFIX.value + "i_intermediate"),
this.env.scope.owner.pkgID, invocation.getBType(),
this.env.scope.owner, expr.pos, VIRTUAL);
BLangSimpleVariable intermediateVariable = ASTBuilderUtil.createVariable(expr.pos,
interMediateSymbol.name.value,
invocation.getBType(), invocation,
interMediateSymbol);
BLangSimpleVariableDef intermediateVariableDefinition = ASTBuilderUtil.createVariableDef(invocation.pos,
intermediateVariable);
stmts.add(intermediateVariableDefinition);
expr = ASTBuilderUtil.createVariableRef(invocation.pos, interMediateSymbol);
}
if (expr.getBType().isNullable()) {
BLangTypeTestExpr isNillTest = ASTBuilderUtil.createTypeTestExpr(expr.pos, expr, getNillTypeNode());
isNillTest.setBType(symTable.booleanType);
BLangBlockStmt thenStmt = ASTBuilderUtil.createBlockStmt(expr.pos);
expr = cloneExpression(expr);
expr.setBType(types.getSafeType(expr.getBType(), true, false));
if (isDefaultableMappingType(expr.getBType()) && !root) {
BLangRecordLiteral jsonLiteral = (BLangRecordLiteral) TreeBuilder.createRecordLiteralNode();
jsonLiteral.setBType(expr.getBType());
jsonLiteral.pos = expr.pos;
BLangAssignment assignment = ASTBuilderUtil.createAssignmentStmt(expr.pos,
expr, jsonLiteral);
thenStmt.addStatement(assignment);
} else {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = ERROR_REASON_NULL_REFERENCE_ERROR;
literal.setBType(symTable.stringType);
BLangErrorConstructorExpr errorConstructorExpr =
(BLangErrorConstructorExpr) TreeBuilder.createErrorConstructorExpressionNode();
BSymbol symbol = symResolver.lookupMainSpaceSymbolInPackage(errorConstructorExpr.pos, env,
names.fromString(""), names.fromString("error"));
errorConstructorExpr.setBType(symbol.type);
errorConstructorExpr.pos = expr.pos;
List<BLangExpression> positionalArgs = new ArrayList<>();
positionalArgs.add(literal);
errorConstructorExpr.positionalArgs = positionalArgs;
BLangPanic panicNode = (BLangPanic) TreeBuilder.createPanicNode();
panicNode.expr = errorConstructorExpr;
panicNode.pos = expr.pos;
thenStmt.addStatement(panicNode);
}
BLangIf ifelse = ASTBuilderUtil.createIfElseStmt(expr.pos, isNillTest, thenStmt, null);
stmts.add(ifelse);
}
accessExprStack.push(expr);
}
BLangValueType getNillTypeNode() {
BLangValueType nillTypeNode = (BLangValueType) TreeBuilder.createValueTypeNode();
nillTypeNode.typeKind = TypeKind.NIL;
nillTypeNode.setBType(symTable.nilType);
return nillTypeNode;
}
private BLangValueExpression cloneExpression(BLangExpression expr) {
switch (expr.getKind()) {
case SIMPLE_VARIABLE_REF:
return ASTBuilderUtil.createVariableRef(expr.pos, ((BLangSimpleVarRef) expr).symbol);
case FIELD_BASED_ACCESS_EXPR:
case INDEX_BASED_ACCESS_EXPR:
return cloneAccessExpr((BLangAccessExpression) expr);
default:
throw new IllegalStateException();
}
}
private BLangAccessExpression cloneAccessExpr(BLangAccessExpression originalAccessExpr) {
if (originalAccessExpr.expr == null) {
return originalAccessExpr;
}
BLangExpression varRef;
NodeKind kind = originalAccessExpr.expr.getKind();
if (kind == NodeKind.FIELD_BASED_ACCESS_EXPR || kind == NodeKind.INDEX_BASED_ACCESS_EXPR) {
varRef = cloneAccessExpr((BLangAccessExpression) originalAccessExpr.expr);
} else {
varRef = cloneExpression(originalAccessExpr.expr);
}
varRef.setBType(types.getSafeType(originalAccessExpr.expr.getBType(), true, false));
BLangAccessExpression accessExpr;
switch (originalAccessExpr.getKind()) {
case FIELD_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createFieldAccessExpr(varRef,
((BLangFieldBasedAccess) originalAccessExpr).field);
break;
case INDEX_BASED_ACCESS_EXPR:
accessExpr = ASTBuilderUtil.createIndexAccessExpr(varRef,
((BLangIndexBasedAccess) originalAccessExpr).indexExpr);
break;
default:
throw new IllegalStateException();
}
accessExpr.originalType = originalAccessExpr.originalType;
accessExpr.pos = originalAccessExpr.pos;
accessExpr.isLValue = originalAccessExpr.isLValue;
accessExpr.symbol = originalAccessExpr.symbol;
accessExpr.errorSafeNavigation = false;
accessExpr.nilSafeNavigation = false;
accessExpr.setBType(originalAccessExpr.originalType);
return accessExpr;
}
private BLangBinaryExpr getModifiedIntRangeStartExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.ADD,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.ADD,
symTable.intType,
symTable.intType));
}
private BLangBinaryExpr getModifiedIntRangeEndExpr(BLangExpression expr) {
BLangLiteral constOneLiteral = ASTBuilderUtil.createLiteral(expr.pos, symTable.intType, 1L);
return ASTBuilderUtil.createBinaryExpr(expr.pos, expr, constOneLiteral, symTable.intType, OperatorKind.SUB,
(BOperatorSymbol) symResolver.resolveBinaryOperator(OperatorKind.SUB,
symTable.intType,
symTable.intType));
}
private BLangLiteral getBooleanLiteral(boolean value) {
BLangLiteral literal = (BLangLiteral) TreeBuilder.createLiteralExpression();
literal.value = value;
literal.setBType(symTable.booleanType);
literal.pos = symTable.builtinPos;
return literal;
}
private boolean isDefaultableMappingType(BType type) {
switch (types.getSafeType(type, true, false).tag) {
case TypeTags.JSON:
case TypeTags.MAP:
case TypeTags.RECORD:
return true;
default:
return false;
}
}
private BLangFunction createInitFunctionForClassDefn(BLangClassDefinition classDefinition, SymbolEnv env) {
BType returnType = symTable.nilType;
BLangFunction userDefinedInitMethod = classDefinition.initFunction;
if (userDefinedInitMethod != null) {
returnType = userDefinedInitMethod.getBType().getReturnType();
}
BLangFunction initFunction =
TypeDefBuilderHelper.createInitFunctionForStructureType(classDefinition.pos, classDefinition.symbol,
env, names, GENERATED_INIT_SUFFIX,
classDefinition.getBType(), returnType);
BObjectTypeSymbol typeSymbol = ((BObjectTypeSymbol) classDefinition.getBType().tsymbol);
typeSymbol.generatedInitializerFunc = new BAttachedFunction(GENERATED_INIT_SUFFIX, initFunction.symbol,
(BInvokableType) initFunction.getBType(),
classDefinition.pos);
classDefinition.generatedInitFunction = initFunction;
initFunction.returnTypeNode.setBType(returnType);
return rewrite(initFunction, env);
}
private void visitBinaryLogicalExpr(BLangBinaryExpr binaryExpr) {
/*
* Desugar (lhsExpr && rhsExpr) to following if-else:
*
* logical AND:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = rhsExpr;
* } else {
* $result$ = false;
* }
*
* logical OR:
* -------------
* T $result$;
* if (lhsExpr) {
* $result$ = true;
* } else {
* $result$ = rhsExpr;
* }
*
*/
BLangSimpleVariableDef resultVarDef = createVarDef("$result$", binaryExpr.getBType(), null,
symTable.builtinPos);
BLangBlockStmt thenBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangBlockStmt elseBody = ASTBuilderUtil.createBlockStmt(binaryExpr.pos);
BLangSimpleVarRef thenResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
BLangExpression thenResult;
if (binaryExpr.opKind == OperatorKind.AND) {
thenResult = binaryExpr.rhsExpr;
} else {
thenResult = getBooleanLiteral(true);
}
BLangAssignment thenAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, thenResultVarRef, thenResult);
thenBody.addStatement(thenAssignment);
BLangExpression elseResult;
BLangSimpleVarRef elseResultVarRef = ASTBuilderUtil.createVariableRef(symTable.builtinPos,
resultVarDef.var.symbol);
if (binaryExpr.opKind == OperatorKind.AND) {
elseResult = getBooleanLiteral(false);
} else {
elseResult = binaryExpr.rhsExpr;
}
BLangAssignment elseAssignment =
ASTBuilderUtil.createAssignmentStmt(binaryExpr.pos, elseResultVarRef, elseResult);
elseBody.addStatement(elseAssignment);
BLangSimpleVarRef resultVarRef = ASTBuilderUtil.createVariableRef(binaryExpr.pos, resultVarDef.var.symbol);
BLangIf ifElse = ASTBuilderUtil.createIfElseStmt(binaryExpr.pos, binaryExpr.lhsExpr, thenBody, elseBody);
BLangBlockStmt blockStmt = ASTBuilderUtil.createBlockStmt(binaryExpr.pos, Lists.of(resultVarDef, ifElse));
BLangStatementExpression stmtExpr = createStatementExpression(blockStmt, resultVarRef);
stmtExpr.setBType(binaryExpr.getBType());
result = rewriteExpr(stmtExpr);
}
/**
* Split packahe init function into several smaller functions.
*
* @param packageNode package node
* @param env symbol environment
* @return initial init function but trimmed in size
*/
private BLangFunction splitInitFunction(BLangPackage packageNode, SymbolEnv env) {
int methodSize = INIT_METHOD_SPLIT_SIZE;
BLangBlockFunctionBody funcBody = (BLangBlockFunctionBody) packageNode.initFunction.body;
if (!isJvmTarget) {
return packageNode.initFunction;
}
BLangFunction initFunction = packageNode.initFunction;
List<BLangFunction> generatedFunctions = new ArrayList<>();
List<BLangStatement> stmts = new ArrayList<>(funcBody.stmts);
funcBody.stmts.clear();
BLangFunction newFunc = initFunction;
BLangBlockFunctionBody newFuncBody = (BLangBlockFunctionBody) newFunc.body;
int varDefIndex = 0;
for (int i = 0; i < stmts.size(); i++) {
BLangStatement statement = stmts.get(i);
if (statement.getKind() == NodeKind.VARIABLE_DEF) {
break;
}
varDefIndex++;
if (i > 0 && (i % methodSize == 0 || isAssignmentWithInitOrRecordLiteralExpr(statement))) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
List<BLangStatement> chunkStmts = new ArrayList<>();
for (int i = varDefIndex; i < stmts.size(); i++) {
BLangStatement stmt = stmts.get(i);
chunkStmts.add(stmt);
varDefIndex++;
if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).expr.getKind() == NodeKind.SERVICE_CONSTRUCTOR) &&
(newFuncBody.stmts.size() + chunkStmts.size() > methodSize)) {
if (newFuncBody.stmts.size() + chunkStmts.size() > methodSize) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.addAll(chunkStmts);
chunkStmts.clear();
} else if ((stmt.getKind() == NodeKind.ASSIGNMENT) &&
(((BLangAssignment) stmt).varRef instanceof BLangPackageVarRef) &&
Symbols.isFlagOn(((BLangPackageVarRef) ((BLangAssignment) stmt).varRef).varSymbol.flags,
Flags.LISTENER)
) {
break;
}
}
newFuncBody.stmts.addAll(chunkStmts);
for (int i = varDefIndex; i < stmts.size(); i++) {
if (i > 0 && i % methodSize == 0) {
generatedFunctions.add(newFunc);
newFunc = createIntermediateInitFunction(packageNode, env);
newFuncBody = (BLangBlockFunctionBody) newFunc.body;
symTable.rootScope.define(names.fromIdNode(newFunc.name), newFunc.symbol);
}
newFuncBody.stmts.add(stmts.get(i));
}
generatedFunctions.add(newFunc);
for (int j = 0; j < generatedFunctions.size() - 1; j++) {
BLangFunction thisFunction = generatedFunctions.get(j);
BLangCheckedExpr checkedExpr =
ASTBuilderUtil.createCheckExpr(initFunction.pos,
createInvocationNode(generatedFunctions.get(j + 1).name.value,
new ArrayList<>(), symTable.errorOrNilType),
symTable.nilType);
checkedExpr.equivalentErrorTypeList.add(symTable.errorType);
BLangExpressionStmt expressionStmt = ASTBuilderUtil
.createExpressionStmt(thisFunction.pos, (BLangBlockFunctionBody) thisFunction.body);
expressionStmt.expr = checkedExpr;
expressionStmt.expr.pos = initFunction.pos;
if (j > 0) {
thisFunction = rewrite(thisFunction, env);
packageNode.functions.add(thisFunction);
packageNode.topLevelNodes.add(thisFunction);
}
}
if (generatedFunctions.size() > 1) {
BLangFunction lastFunc = generatedFunctions.get(generatedFunctions.size() - 1);
lastFunc = rewrite(lastFunc, env);
packageNode.functions.add(lastFunc);
packageNode.topLevelNodes.add(lastFunc);
}
return generatedFunctions.get(0);
}
private boolean isAssignmentWithInitOrRecordLiteralExpr(BLangStatement statement) {
if (statement.getKind() == NodeKind.ASSIGNMENT) {
return isMappingOrObjectConstructorOrObjInit(((BLangAssignment) statement).getExpression());
}
return false;
}
protected boolean isMappingOrObjectConstructorOrObjInit(BLangExpression expression) {
switch (expression.getKind()) {
case TYPE_INIT_EXPR:
case RECORD_LITERAL_EXPR:
case OBJECT_CTOR_EXPRESSION:
return true;
case CHECK_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangCheckedExpr) expression).expr);
case TYPE_CONVERSION_EXPR:
return isMappingOrObjectConstructorOrObjInit(((BLangTypeConversionExpr) expression).expr);
default:
return false;
}
}
/**
* Create an intermediate package init function.
*
* @param pkgNode package node
* @param env symbol environment of package
*/
private BLangFunction createIntermediateInitFunction(BLangPackage pkgNode, SymbolEnv env) {
String alias = pkgNode.symbol.pkgID.toString();
BLangFunction initFunction = ASTBuilderUtil
.createInitFunctionWithErrorOrNilReturn(pkgNode.pos, alias,
new Name(Names.INIT_FUNCTION_SUFFIX.value
+ this.initFuncIndex++), symTable);
createInvokableSymbol(initFunction, env);
return initFunction;
}
private BType getRestType(BInvokableSymbol invokableSymbol) {
if (invokableSymbol != null && invokableSymbol.restParam != null) {
return invokableSymbol.restParam.type;
}
return null;
}
private BType getRestType(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.getBType();
}
return null;
}
private BVarSymbol getRestSymbol(BLangFunction function) {
if (function != null && function.restParam != null) {
return function.restParam.symbol;
}
return null;
}
private boolean isComputedKey(RecordLiteralNode.RecordField field) {
if (!field.isKeyValueField()) {
return false;
}
return ((BLangRecordLiteral.BLangRecordKeyValueField) field).key.computedKey;
}
private BLangRecordLiteral rewriteMappingConstructor(BLangRecordLiteral mappingConstructorExpr) {
List<RecordLiteralNode.RecordField> fields = mappingConstructorExpr.fields;
BType type = mappingConstructorExpr.getBType();
Location pos = mappingConstructorExpr.pos;
List<RecordLiteralNode.RecordField> rewrittenFields = new ArrayList<>(fields.size());
for (RecordLiteralNode.RecordField field : fields) {
if (field.isKeyValueField()) {
BLangRecordLiteral.BLangRecordKeyValueField keyValueField =
(BLangRecordLiteral.BLangRecordKeyValueField) field;
BLangRecordLiteral.BLangRecordKey key = keyValueField.key;
BLangExpression origKey = key.expr;
BLangExpression keyExpr;
if (key.computedKey) {
keyExpr = origKey;
} else {
keyExpr = origKey.getKind() == NodeKind.SIMPLE_VARIABLE_REF ? createStringLiteral(pos,
StringEscapeUtils.unescapeJava(((BLangSimpleVarRef) origKey).variableName.value)) :
((BLangLiteral) origKey);
}
BLangRecordLiteral.BLangRecordKeyValueField rewrittenField =
ASTBuilderUtil.createBLangRecordKeyValue(rewriteExpr(keyExpr),
rewriteExpr(keyValueField.valueExpr));
rewrittenField.pos = keyValueField.pos;
rewrittenField.key.pos = key.pos;
rewrittenFields.add(rewrittenField);
} else if (field.getKind() == NodeKind.SIMPLE_VARIABLE_REF) {
BLangSimpleVarRef varRefField = (BLangSimpleVarRef) field;
rewrittenFields.add(ASTBuilderUtil.createBLangRecordKeyValue(
rewriteExpr(createStringLiteral(pos,
StringEscapeUtils.unescapeJava(varRefField.variableName.value))),
rewriteExpr(varRefField)));
} else {
BLangRecordLiteral.BLangRecordSpreadOperatorField spreadOpField =
(BLangRecordLiteral.BLangRecordSpreadOperatorField) field;
spreadOpField.expr = rewriteExpr(spreadOpField.expr);
rewrittenFields.add(spreadOpField);
}
}
fields.clear();
return type.tag == TypeTags.RECORD ? new BLangStructLiteral(pos, type, rewrittenFields) :
new BLangMapLiteral(pos, type, rewrittenFields);
}
protected void addTransactionInternalModuleImport() {
if (!env.enclPkg.packageID.equals(PackageID.TRANSACTION_INTERNAL)) {
BLangImportPackage importDcl = (BLangImportPackage) TreeBuilder.createImportPackageNode();
List<BLangIdentifier> pkgNameComps = new ArrayList<>();
pkgNameComps.add(ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.TRANSACTION.value));
importDcl.pkgNameComps = pkgNameComps;
importDcl.pos = env.enclPkg.symbol.pos;
importDcl.orgName = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, Names.BALLERINA_INTERNAL_ORG.value);
importDcl.alias = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "trx");
importDcl.version = ASTBuilderUtil.createIdentifier(env.enclPkg.pos, "");
importDcl.symbol = symTable.internalTransactionModuleSymbol;
env.enclPkg.imports.add(importDcl);
env.enclPkg.symbol.imports.add(importDcl.symbol);
}
}
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} | class definition node for which the initializer is created
* @param env The env for the type node
* @return The generated initializer method
*/
private BLangFunction createGeneratedInitializerFunction(BLangClassDefinition classDefinition, SymbolEnv env) {
BLangFunction generatedInitFunc = createInitFunctionForClassDefn(classDefinition, env);
if (classDefinition.initFunction == null) {
return generatedInitFunc;
}
return wireUpGeneratedInitFunction(generatedInitFunc,
(BObjectTypeSymbol) classDefinition.symbol, classDefinition.initFunction);
} |
Got it. I thought either regex or glob when hear pattern. | public void canCRUDEnterpriseTierDeployment() throws Exception {
allowAllSSL();
File tarGzFile = downloadFile(PETCLINIC_TAR_GZ_URL);
File jarFile = downloadFile(PETCLINIC_GATEWAY_JAR_URL);
String serviceName = generateRandomResourceName("springsvc", 15);
Region region = Region.US_EAST;
List<String> configFilePatterns = Arrays.asList("api-gateway", "customers-service");
SpringService service = appPlatformManager.springServices().define(serviceName)
.withRegion(region)
.withNewResourceGroup(rgName)
.withEnterpriseTierSku()
.withDefaultGitRepository(PETCLINIC_CONFIG_URL, "master", configFilePatterns)
.create();
String deploymentName = generateRandomResourceName("deploy", 15);
List<String> apiGatewayConfigFilePatterns = Arrays.asList("api-gateway");
String appName = "api-gateway";
SpringApp app = service.apps().define(appName)
.defineActiveDeployment(deploymentName)
.withJarFile(jarFile)
.withInstance(2)
.withCpu("500m")
.withMemory("512Mi")
.attach()
.withDefaultPublicEndpoint()
.withConfigurationServiceBinding()
.create();
SpringAppDeployment deployment = app.deployments().getByName(deploymentName);
Assertions.assertTrue(CoreUtils.isNullOrEmpty(deployment.configFilePatterns()));
deployment.update()
.withConfigFilePatterns(apiGatewayConfigFilePatterns)
.apply();
deployment.refresh();
Assertions.assertFalse(CoreUtils.isNullOrEmpty(deployment.configFilePatterns()));
Assertions.assertNotNull(app.url());
Assertions.assertNotNull(app.activeDeploymentName());
Assertions.assertEquals(1, app.deployments().list().stream().count());
String appName2 = "customers-service";
String module = "spring-petclinic-customers-service";
List<String> customerServiceConfigFilePatterns = Arrays.asList("customers-service");
SpringApp app2 = service.apps().define(appName2)
.defineActiveDeployment(deploymentName)
.withSourceCodeTarGzFile(tarGzFile, customerServiceConfigFilePatterns)
.withTargetModule(module)
.attach()
.withConfigurationServiceBinding()
.create();
Assertions.assertNull(app2.url());
SpringAppDeployment customersDeployment = app2.deployments().getByName(deploymentName);
Assertions.assertEquals(customerServiceConfigFilePatterns, customersDeployment.configFilePatterns());
} | List<String> configFilePatterns = Arrays.asList("api-gateway", "customers-service"); | public void canCRUDEnterpriseTierDeployment() throws Exception {
allowAllSSL();
File tarGzFile = downloadFile(PETCLINIC_TAR_GZ_URL);
File jarFile = downloadFile(PETCLINIC_GATEWAY_JAR_URL);
String serviceName = generateRandomResourceName("springsvc", 15);
Region region = Region.US_EAST;
List<String> configFilePatterns = Arrays.asList("api-gateway", "customers-service");
SpringService service = appPlatformManager.springServices().define(serviceName)
.withRegion(region)
.withNewResourceGroup(rgName)
.withEnterpriseTierSku()
.withDefaultGitRepository(PETCLINIC_CONFIG_URL, "master", configFilePatterns)
.create();
String deploymentName = generateRandomResourceName("deploy", 15);
List<String> apiGatewayConfigFilePatterns = Arrays.asList("api-gateway");
String appName = "api-gateway";
SpringApp app = service.apps().define(appName)
.defineActiveDeployment(deploymentName)
.withJarFile(jarFile)
.withInstance(2)
.withCpu("500m")
.withMemory("512Mi")
.attach()
.withDefaultPublicEndpoint()
.withConfigurationServiceBinding()
.create();
SpringAppDeployment deployment = app.deployments().getByName(deploymentName);
Assertions.assertTrue(CoreUtils.isNullOrEmpty(deployment.configFilePatterns()));
deployment.update()
.withConfigFilePatterns(apiGatewayConfigFilePatterns)
.apply();
deployment.refresh();
Assertions.assertFalse(CoreUtils.isNullOrEmpty(deployment.configFilePatterns()));
Assertions.assertNotNull(app.url());
Assertions.assertNotNull(app.activeDeploymentName());
Assertions.assertEquals(1, app.deployments().list().stream().count());
String appName2 = "customers-service";
String module = "spring-petclinic-customers-service";
List<String> customerServiceConfigFilePatterns = Arrays.asList("customers-service");
SpringApp app2 = service.apps().define(appName2)
.defineActiveDeployment(deploymentName)
.withSourceCodeTarGzFile(tarGzFile, customerServiceConfigFilePatterns)
.withTargetModule(module)
.attach()
.withConfigurationServiceBinding()
.create();
Assertions.assertNull(app2.url());
SpringAppDeployment customersDeployment = app2.deployments().getByName(deploymentName);
Assertions.assertEquals(customerServiceConfigFilePatterns, customersDeployment.configFilePatterns());
} | class SpringCloudLiveOnlyTest extends AppPlatformTest {
private static final String PIGGYMETRICS_CONFIG_URL = "https:
private static final String GATEWAY_JAR_URL = "https:
private static final String PIGGYMETRICS_TAR_GZ_URL = "https:
private static final String PETCLINIC_CONFIG_URL = "https:
private static final String PETCLINIC_GATEWAY_JAR_URL = "https:
private static final String PETCLINIC_TAR_GZ_URL = "https:
private static final String SPRING_CLOUD_SERVICE_OBJECT_ID = "938df8e2-2b9d-40b1-940c-c75c33494239";
@Test
@DoNotRecord(skipInPlayback = true)
public void canCRUDDeployment() throws Exception {
allowAllSSL();
String serviceName = generateRandomResourceName("springsvc", 15);
String appName = "gateway";
String deploymentName = generateRandomResourceName("deploy", 15);
String deploymentName1 = generateRandomResourceName("deploy", 15);
Region region = Region.US_EAST;
SpringService service = appPlatformManager.springServices().define(serviceName)
.withRegion(region)
.withNewResourceGroup(rgName)
.create();
File jarFile = downloadFile(GATEWAY_JAR_URL);
SpringApp app = service.apps().define(appName)
.defineActiveDeployment(deploymentName)
.withJarFile(jarFile)
.withInstance(2)
.withCpu(2)
.withMemory(4)
.withRuntime(RuntimeVersion.JAVA_11)
.attach()
.withDefaultPublicEndpoint()
.create();
Assertions.assertNotNull(app.url());
Assertions.assertNotNull(app.activeDeploymentName());
Assertions.assertEquals(1, app.deployments().list().stream().count());
Assertions.assertTrue(requestSuccess(app.url()));
SpringAppDeployment deployment = app.getActiveDeployment();
Assertions.assertEquals("2", deployment.settings().resourceRequests().cpu());
Assertions.assertEquals("4Gi", deployment.settings().resourceRequests().memory());
Assertions.assertEquals(2, deployment.instances().size());
File gzFile = downloadFile(PIGGYMETRICS_TAR_GZ_URL);
deployment = app.deployments().define(deploymentName1)
.withSourceCodeTarGzFile(gzFile)
.withTargetModule("gateway")
.withActivation()
.create();
app.refresh();
Assertions.assertEquals(deploymentName1, app.activeDeploymentName());
Assertions.assertEquals("1", deployment.settings().resourceRequests().cpu());
Assertions.assertNotNull(deployment.getLogFileUrl());
Assertions.assertTrue(requestSuccess(app.url()));
app.update()
.withoutDefaultPublicEndpoint()
.apply();
Assertions.assertFalse(app.isPublic());
app.deployments().deleteByName(deploymentName);
Assertions.assertEquals(1, app.deployments().list().stream().count());
}
@Test
@DoNotRecord(skipInPlayback = true)
public void canCreateCustomDomainWithSsl() throws Exception {
String domainName = generateRandomResourceName("jsdkdemo-", 20) + ".com";
String certOrderName = generateRandomResourceName("cert", 15);
String vaultName = generateRandomResourceName("vault", 15);
String certName = generateRandomResourceName("cert", 15);
String serviceName = generateRandomResourceName("springsvc", 15);
String appName = "gateway";
Region region = Region.US_EAST;
allowAllSSL();
String cerPassword = password();
String resourcePath = Paths.get(this.getClass().getResource("/session-records").toURI()).getParent().toString();
String cerPath = resourcePath + domainName + ".cer";
String pfxPath = resourcePath + domainName + ".pfx";
createCertificate(cerPath, pfxPath, domainName, cerPassword, "ssl." + domainName, "ssl." + domainName);
byte[] certificate = readAllBytes(new FileInputStream(pfxPath));
appPlatformManager.resourceManager().resourceGroups().define(rgName)
.withRegion(region)
.create();
DnsZone dnsZone = dnsZoneManager.zones().define(domainName)
.withExistingResourceGroup(rgName)
.create();
AppServiceDomain domain = appServiceManager.domains().define(domainName)
.withExistingResourceGroup(rgName)
.defineRegistrantContact()
.withFirstName("Jon")
.withLastName("Doe")
.withEmail("jondoe@contoso.com")
.withAddressLine1("123 4th Ave")
.withCity("Redmond")
.withStateOrProvince("WA")
.withCountry(CountryIsoCode.UNITED_STATES)
.withPostalCode("98052")
.withPhoneCountryCode(CountryPhoneCode.UNITED_STATES)
.withPhoneNumber("4258828080")
.attach()
.withDomainPrivacyEnabled(true)
.withAutoRenewEnabled(false)
.withExistingDnsZone(dnsZone)
.create();
Vault vault = keyVaultManager.vaults().define(vaultName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.defineAccessPolicy()
.forServicePrincipal(clientIdFromFile())
.allowSecretAllPermissions()
.allowCertificateAllPermissions()
.attach()
.defineAccessPolicy()
.forObjectId(SPRING_CLOUD_SERVICE_OBJECT_ID)
.allowCertificatePermissions(CertificatePermissions.GET, CertificatePermissions.LIST)
.allowSecretPermissions(SecretPermissions.GET, SecretPermissions.LIST)
.attach()
.create();
CertificateClient certificateClient = new CertificateClientBuilder()
.vaultUrl(vault.vaultUri())
.pipeline(appPlatformManager.httpPipeline())
.buildClient();
certificateClient.importCertificate(
new ImportCertificateOptions(certName, certificate)
.setPassword(cerPassword)
.setEnabled(true)
);
KeyStore store = KeyStore.getInstance("PKCS12");
store.load(new ByteArrayInputStream(certificate), cerPassword.toCharArray());
String alias = Collections.list(store.aliases()).get(0);
String thumbprint = printHexBinary(MessageDigest.getInstance("SHA-1").digest(store.getCertificate(alias).getEncoded()));
SpringService service = appPlatformManager.springServices().define(serviceName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withCertificate("test", vault.vaultUri(), certName)
.create();
service.apps().define(appName).withDefaultActiveDeployment().withDefaultPublicEndpoint().create();
SpringApp app = service.apps().getByName(appName);
dnsZone.update()
.withCNameRecordSet("www", app.fqdn())
.withCNameRecordSet("ssl", app.fqdn())
.apply();
app.update()
.withoutDefaultPublicEndpoint()
.withCustomDomain(String.format("www.%s", domainName))
.withCustomDomain(String.format("ssl.%s", domainName), thumbprint)
.apply();
Assertions.assertTrue(app.customDomains().validate(String.format("www.%s", domainName)).isValid());
Assertions.assertTrue(requestSuccess(String.format("http:
Assertions.assertTrue(requestSuccess(String.format("https:
app.update()
.withHttpsOnly()
.withoutCustomDomain(String.format("www.%s", domainName))
.apply();
Assertions.assertTrue(checkRedirect(String.format("http:
}
@Test
@DoNotRecord(skipInPlayback = true)
private File downloadFile(String remoteFileUrl) throws Exception {
String[] split = remoteFileUrl.split("/");
String filename = split[split.length - 1];
File downloaded = new File(filename);
if (!downloaded.exists()) {
HttpURLConnection connection = (HttpURLConnection) new URL(remoteFileUrl).openConnection();
connection.connect();
try (InputStream inputStream = connection.getInputStream();
OutputStream outputStream = new FileOutputStream(downloaded)) {
IOUtils.copy(inputStream, outputStream);
} finally {
connection.disconnect();
}
}
return downloaded;
}
private void extraTarGzSource(File folder, URL url) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.connect();
try (TarArchiveInputStream inputStream = new TarArchiveInputStream(new GzipCompressorInputStream(connection.getInputStream()))) {
TarArchiveEntry entry;
while ((entry = inputStream.getNextTarEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
File file = new File(folder, entry.getName());
File parent = file.getParentFile();
if (parent.exists() || parent.mkdirs()) {
try (OutputStream outputStream = new FileOutputStream(file)) {
IOUtils.copy(inputStream, outputStream);
}
} else {
throw new IllegalStateException("Cannot create directory: " + parent.getAbsolutePath());
}
}
} finally {
connection.disconnect();
}
}
private byte[] readAllBytes(InputStream inputStream) throws IOException {
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) {
byte[] data = new byte[4096];
while (true) {
int size = inputStream.read(data);
if (size > 0) {
outputStream.write(data, 0, size);
} else {
return outputStream.toByteArray();
}
}
}
}
public static void createCertificate(String certPath, String pfxPath,
String alias, String password, String cnName, String dnsName) throws IOException {
if (new File(pfxPath).exists()) {
return;
}
String validityInDays = "3650";
String keyAlg = "RSA";
String sigAlg = "SHA1withRSA";
String keySize = "2048";
String storeType = "pkcs12";
String command = "keytool";
String jdkPath = System.getProperty("java.home");
if (jdkPath != null && !jdkPath.isEmpty()) {
jdkPath = jdkPath.concat("\\bin");
if (new File(jdkPath).isDirectory()) {
command = String.format("%s%s%s", jdkPath, File.separator, command);
}
} else {
return;
}
String[] commandArgs = {command, "-genkey", "-alias", alias,
"-keystore", pfxPath, "-storepass", password, "-validity",
validityInDays, "-keyalg", keyAlg, "-sigalg", sigAlg, "-keysize", keySize,
"-storetype", storeType, "-dname", "CN=" + cnName, "-ext", "EKU=1.3.6.1.5.5.7.3.1"};
if (dnsName != null) {
List<String> args = new ArrayList<>(Arrays.asList(commandArgs));
args.add("-ext");
args.add("san=dns:" + dnsName);
commandArgs = args.toArray(new String[0]);
}
cmdInvocation(commandArgs, true);
File pfxFile = new File(pfxPath);
if (pfxFile.exists()) {
String[] certCommandArgs = {command, "-export", "-alias", alias,
"-storetype", storeType, "-keystore", pfxPath,
"-storepass", password, "-rfc", "-file", certPath};
cmdInvocation(certCommandArgs, true);
File cerFile = new File(pfxPath);
if (!cerFile.exists()) {
throw new IOException(
"Error occurred while creating certificate"
+ String.join(" ", certCommandArgs));
}
} else {
throw new IOException("Error occurred while creating certificates"
+ String.join(" ", commandArgs));
}
}
public static String cmdInvocation(String[] command,
boolean ignoreErrorStream) throws IOException {
String result = "";
String error = "";
Process process = new ProcessBuilder(command).start();
try (
InputStream inputStream = process.getInputStream();
InputStream errorStream = process.getErrorStream();
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
BufferedReader ebr = new BufferedReader(new InputStreamReader(errorStream, StandardCharsets.UTF_8));
) {
result = br.readLine();
process.waitFor();
error = ebr.readLine();
if (error != null && (!"".equals(error))) {
if (!ignoreErrorStream) {
throw new IOException(error, null);
}
}
} catch (Exception e) {
throw new RuntimeException("Exception occurred while invoking command", e);
}
return result;
}
private static final char[] HEX_CODE = "0123456789ABCDEF".toCharArray();
private static String printHexBinary(byte[] data) {
StringBuilder r = new StringBuilder(data.length * 2);
for (byte b : data) {
r.append(HEX_CODE[(b >> 4) & 0xF]);
r.append(HEX_CODE[(b & 0xF)]);
}
return r.toString();
}
} | class SpringCloudLiveOnlyTest extends AppPlatformTest {
private static final String PIGGYMETRICS_CONFIG_URL = "https:
private static final String GATEWAY_JAR_URL = "https:
private static final String PIGGYMETRICS_TAR_GZ_URL = "https:
private static final String PETCLINIC_CONFIG_URL = "https:
private static final String PETCLINIC_GATEWAY_JAR_URL = "https:
private static final String PETCLINIC_TAR_GZ_URL = "https:
private static final String SPRING_CLOUD_SERVICE_OBJECT_ID = "938df8e2-2b9d-40b1-940c-c75c33494239";
@Test
@DoNotRecord(skipInPlayback = true)
public void canCRUDDeployment() throws Exception {
allowAllSSL();
String serviceName = generateRandomResourceName("springsvc", 15);
String appName = "gateway";
String deploymentName = generateRandomResourceName("deploy", 15);
String deploymentName1 = generateRandomResourceName("deploy", 15);
Region region = Region.US_EAST;
SpringService service = appPlatformManager.springServices().define(serviceName)
.withRegion(region)
.withNewResourceGroup(rgName)
.create();
File jarFile = downloadFile(GATEWAY_JAR_URL);
SpringApp app = service.apps().define(appName)
.defineActiveDeployment(deploymentName)
.withJarFile(jarFile)
.withInstance(2)
.withCpu(2)
.withMemory(4)
.withRuntime(RuntimeVersion.JAVA_11)
.attach()
.withDefaultPublicEndpoint()
.create();
Assertions.assertNotNull(app.url());
Assertions.assertNotNull(app.activeDeploymentName());
Assertions.assertEquals(1, app.deployments().list().stream().count());
Assertions.assertTrue(requestSuccess(app.url()));
SpringAppDeployment deployment = app.getActiveDeployment();
Assertions.assertEquals("2", deployment.settings().resourceRequests().cpu());
Assertions.assertEquals("4Gi", deployment.settings().resourceRequests().memory());
Assertions.assertEquals(2, deployment.instances().size());
File gzFile = downloadFile(PIGGYMETRICS_TAR_GZ_URL);
deployment = app.deployments().define(deploymentName1)
.withSourceCodeTarGzFile(gzFile)
.withTargetModule("gateway")
.withActivation()
.create();
app.refresh();
Assertions.assertEquals(deploymentName1, app.activeDeploymentName());
Assertions.assertEquals("1", deployment.settings().resourceRequests().cpu());
Assertions.assertNotNull(deployment.getLogFileUrl());
Assertions.assertTrue(requestSuccess(app.url()));
app.update()
.withoutDefaultPublicEndpoint()
.apply();
Assertions.assertFalse(app.isPublic());
app.deployments().deleteByName(deploymentName);
Assertions.assertEquals(1, app.deployments().list().stream().count());
}
@Test
@DoNotRecord(skipInPlayback = true)
public void canCreateCustomDomainWithSsl() throws Exception {
String domainName = generateRandomResourceName("jsdkdemo-", 20) + ".com";
String certOrderName = generateRandomResourceName("cert", 15);
String vaultName = generateRandomResourceName("vault", 15);
String certName = generateRandomResourceName("cert", 15);
String serviceName = generateRandomResourceName("springsvc", 15);
String appName = "gateway";
Region region = Region.US_EAST;
allowAllSSL();
String cerPassword = password();
String resourcePath = Paths.get(this.getClass().getResource("/session-records").toURI()).getParent().toString();
String cerPath = resourcePath + domainName + ".cer";
String pfxPath = resourcePath + domainName + ".pfx";
createCertificate(cerPath, pfxPath, domainName, cerPassword, "ssl." + domainName, "ssl." + domainName);
byte[] certificate = readAllBytes(new FileInputStream(pfxPath));
appPlatformManager.resourceManager().resourceGroups().define(rgName)
.withRegion(region)
.create();
DnsZone dnsZone = dnsZoneManager.zones().define(domainName)
.withExistingResourceGroup(rgName)
.create();
AppServiceDomain domain = appServiceManager.domains().define(domainName)
.withExistingResourceGroup(rgName)
.defineRegistrantContact()
.withFirstName("Jon")
.withLastName("Doe")
.withEmail("jondoe@contoso.com")
.withAddressLine1("123 4th Ave")
.withCity("Redmond")
.withStateOrProvince("WA")
.withCountry(CountryIsoCode.UNITED_STATES)
.withPostalCode("98052")
.withPhoneCountryCode(CountryPhoneCode.UNITED_STATES)
.withPhoneNumber("4258828080")
.attach()
.withDomainPrivacyEnabled(true)
.withAutoRenewEnabled(false)
.withExistingDnsZone(dnsZone)
.create();
Vault vault = keyVaultManager.vaults().define(vaultName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.defineAccessPolicy()
.forServicePrincipal(clientIdFromFile())
.allowSecretAllPermissions()
.allowCertificateAllPermissions()
.attach()
.defineAccessPolicy()
.forObjectId(SPRING_CLOUD_SERVICE_OBJECT_ID)
.allowCertificatePermissions(CertificatePermissions.GET, CertificatePermissions.LIST)
.allowSecretPermissions(SecretPermissions.GET, SecretPermissions.LIST)
.attach()
.create();
CertificateClient certificateClient = new CertificateClientBuilder()
.vaultUrl(vault.vaultUri())
.pipeline(appPlatformManager.httpPipeline())
.buildClient();
certificateClient.importCertificate(
new ImportCertificateOptions(certName, certificate)
.setPassword(cerPassword)
.setEnabled(true)
);
KeyStore store = KeyStore.getInstance("PKCS12");
store.load(new ByteArrayInputStream(certificate), cerPassword.toCharArray());
String alias = Collections.list(store.aliases()).get(0);
String thumbprint = printHexBinary(MessageDigest.getInstance("SHA-1").digest(store.getCertificate(alias).getEncoded()));
SpringService service = appPlatformManager.springServices().define(serviceName)
.withRegion(region)
.withExistingResourceGroup(rgName)
.withCertificate("test", vault.vaultUri(), certName)
.create();
service.apps().define(appName).withDefaultActiveDeployment().withDefaultPublicEndpoint().create();
SpringApp app = service.apps().getByName(appName);
dnsZone.update()
.withCNameRecordSet("www", app.fqdn())
.withCNameRecordSet("ssl", app.fqdn())
.apply();
app.update()
.withoutDefaultPublicEndpoint()
.withCustomDomain(String.format("www.%s", domainName))
.withCustomDomain(String.format("ssl.%s", domainName), thumbprint)
.apply();
Assertions.assertTrue(app.customDomains().validate(String.format("www.%s", domainName)).isValid());
Assertions.assertTrue(requestSuccess(String.format("http:
Assertions.assertTrue(requestSuccess(String.format("https:
app.update()
.withHttpsOnly()
.withoutCustomDomain(String.format("www.%s", domainName))
.apply();
Assertions.assertTrue(checkRedirect(String.format("http:
}
@Test
@DoNotRecord(skipInPlayback = true)
private File downloadFile(String remoteFileUrl) throws Exception {
String[] split = remoteFileUrl.split("/");
String filename = split[split.length - 1];
File downloaded = new File(filename);
if (!downloaded.exists()) {
HttpURLConnection connection = (HttpURLConnection) new URL(remoteFileUrl).openConnection();
connection.connect();
try (InputStream inputStream = connection.getInputStream();
OutputStream outputStream = new FileOutputStream(downloaded)) {
IOUtils.copy(inputStream, outputStream);
} finally {
connection.disconnect();
}
}
return downloaded;
}
private void extraTarGzSource(File folder, URL url) throws IOException {
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.connect();
try (TarArchiveInputStream inputStream = new TarArchiveInputStream(new GzipCompressorInputStream(connection.getInputStream()))) {
TarArchiveEntry entry;
while ((entry = inputStream.getNextTarEntry()) != null) {
if (entry.isDirectory()) {
continue;
}
File file = new File(folder, entry.getName());
File parent = file.getParentFile();
if (parent.exists() || parent.mkdirs()) {
try (OutputStream outputStream = new FileOutputStream(file)) {
IOUtils.copy(inputStream, outputStream);
}
} else {
throw new IllegalStateException("Cannot create directory: " + parent.getAbsolutePath());
}
}
} finally {
connection.disconnect();
}
}
private byte[] readAllBytes(InputStream inputStream) throws IOException {
try (ByteArrayOutputStream outputStream = new ByteArrayOutputStream()) {
byte[] data = new byte[4096];
while (true) {
int size = inputStream.read(data);
if (size > 0) {
outputStream.write(data, 0, size);
} else {
return outputStream.toByteArray();
}
}
}
}
public static void createCertificate(String certPath, String pfxPath,
String alias, String password, String cnName, String dnsName) throws IOException {
if (new File(pfxPath).exists()) {
return;
}
String validityInDays = "3650";
String keyAlg = "RSA";
String sigAlg = "SHA1withRSA";
String keySize = "2048";
String storeType = "pkcs12";
String command = "keytool";
String jdkPath = System.getProperty("java.home");
if (jdkPath != null && !jdkPath.isEmpty()) {
jdkPath = jdkPath.concat("\\bin");
if (new File(jdkPath).isDirectory()) {
command = String.format("%s%s%s", jdkPath, File.separator, command);
}
} else {
return;
}
String[] commandArgs = {command, "-genkey", "-alias", alias,
"-keystore", pfxPath, "-storepass", password, "-validity",
validityInDays, "-keyalg", keyAlg, "-sigalg", sigAlg, "-keysize", keySize,
"-storetype", storeType, "-dname", "CN=" + cnName, "-ext", "EKU=1.3.6.1.5.5.7.3.1"};
if (dnsName != null) {
List<String> args = new ArrayList<>(Arrays.asList(commandArgs));
args.add("-ext");
args.add("san=dns:" + dnsName);
commandArgs = args.toArray(new String[0]);
}
cmdInvocation(commandArgs, true);
File pfxFile = new File(pfxPath);
if (pfxFile.exists()) {
String[] certCommandArgs = {command, "-export", "-alias", alias,
"-storetype", storeType, "-keystore", pfxPath,
"-storepass", password, "-rfc", "-file", certPath};
cmdInvocation(certCommandArgs, true);
File cerFile = new File(pfxPath);
if (!cerFile.exists()) {
throw new IOException(
"Error occurred while creating certificate"
+ String.join(" ", certCommandArgs));
}
} else {
throw new IOException("Error occurred while creating certificates"
+ String.join(" ", commandArgs));
}
}
public static String cmdInvocation(String[] command,
boolean ignoreErrorStream) throws IOException {
String result = "";
String error = "";
Process process = new ProcessBuilder(command).start();
try (
InputStream inputStream = process.getInputStream();
InputStream errorStream = process.getErrorStream();
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));
BufferedReader ebr = new BufferedReader(new InputStreamReader(errorStream, StandardCharsets.UTF_8));
) {
result = br.readLine();
process.waitFor();
error = ebr.readLine();
if (error != null && (!"".equals(error))) {
if (!ignoreErrorStream) {
throw new IOException(error, null);
}
}
} catch (Exception e) {
throw new RuntimeException("Exception occurred while invoking command", e);
}
return result;
}
private static final char[] HEX_CODE = "0123456789ABCDEF".toCharArray();
private static String printHexBinary(byte[] data) {
StringBuilder r = new StringBuilder(data.length * 2);
for (byte b : data) {
r.append(HEX_CODE[(b >> 4) & 0xF]);
r.append(HEX_CODE[(b & 0xF)]);
}
return r.toString();
}
} |
You should also test that the exception is retryable or not, and if they have hit the maximum number of attempts. These are checks that are agnostic of the retry algorithm. | public Duration getNextRetryInterval(Exception lastException, Duration remainingTime) {
int baseWaitTime = 0;
if (lastException == null || !(lastException instanceof AmqpException)) {
return this.onGetNextRetryInterval(lastException, remainingTime, baseWaitTime, this.getRetryCount());
}
if (((AmqpException) lastException).getErrorCondition() == ErrorCondition.SERVER_BUSY_ERROR) {
baseWaitTime += ClientConstants.SERVER_BUSY_BASE_SLEEP_TIME_IN_SECS;
}
return this.onGetNextRetryInterval(lastException, remainingTime, baseWaitTime, this.getRetryCount());
} | if (lastException == null || !(lastException instanceof AmqpException)) { | public Duration getNextRetryInterval(Exception lastException, Duration remainingTime) {
int baseWaitTime = 0;
if (!isRetriableException(lastException)) {
return null;
}
if (retryCount.get() >= maxRetryCount) {
return null;
}
if (((AmqpException) lastException).getErrorCondition() == ErrorCondition.SERVER_BUSY_ERROR) {
baseWaitTime += ClientConstants.SERVER_BUSY_BASE_SLEEP_TIME_IN_SECS;
}
return this.calculateNextRetryInterval(lastException, remainingTime, baseWaitTime, this.getRetryCount());
} | class Retry {
public static final Retry NO_RETRY = new RetryExponential(Duration.ofSeconds(0), Duration.ofSeconds(0), 0);
private AtomicInteger retryCount = new AtomicInteger(0);
/**
* Check if the existing exception is a retryable exception.
*
* @param exception A exception that was observed for the operation to be retried.
* @return true if the exception is a retryable exception, otherwise false.
* @throws IllegalArgumentException when the exception is null.
*/
public static boolean isRetryableException(Exception exception) {
if (exception == null) {
throw new IllegalArgumentException("exception cannot be null");
}
if (exception instanceof AmqpException) {
return ((AmqpException) exception).isTransient();
}
return false;
}
/**
* Get 'NO_RETRY' of current.
*
* @return Retry 'NO_RETRY'.
*/
public static Retry getNoRetry() {
return Retry.NO_RETRY;
}
/**
* Get default configured Retry.
*
* @return Retry which has all default property set up.
*/
public static Retry getDefault() {
return new RetryExponential(
ClientConstants.DEFAULT_RETRY_MIN_BACKOFF,
ClientConstants.DEFAULT_RETRY_MAX_BACKOFF,
ClientConstants.DEFAULT_MAX_RETRY_COUNT);
}
/**
* Increase one count to current count value.
*
* @return current AtomicInteger value.
*/
public int incrementRetryCount() {
return retryCount.incrementAndGet();
}
/**
* Get the current retried count.
*
* @return current AtomicInteger value.
*/
public int getRetryCount() {
return retryCount.get();
}
/**
* reset AtomicInteger to value zero.
*/
public void resetRetryInterval() {
retryCount.set(0);
}
/**
* Calculates the amount of time to delay before the next retry attempt
*
* @param lastException The last exception that was observed for the operation to be retried.
* @param remainingTime The amount of time remaining for the cumulative timeout across retry attempts.
* @return The amount of time to delay before retrying the associated operation; if {@code null}, then the operation is no longer eligible to be retried.
*/
/**
* Allows a concrete retry policy implementation to offer a base retry interval to be used in
* the calculations performed by 'Retry.GetNextRetryInterval'.
*
* @param lastException The last exception that was observed for the operation to be retried.
* @param remainingTime The amount of time remaining for the cumulative timeout across retry attempts.
* @param baseWaitSeconds The number of seconds to base the suggested retry interval on;
* this should be used as the minimum interval returned under normal circumstances.
* @param retryCount The number of retries that have already been attempted.
* @return The amount of time to delay before retrying the associated operation; if {@code null}, then the operation is no longer eligible to be retried.
*/
protected abstract Duration onGetNextRetryInterval(Exception lastException,
Duration remainingTime,
int baseWaitSeconds,
int retryCount);
} | class Retry {
public static final Duration DEFAULT_RETRY_MIN_BACKOFF = Duration.ofSeconds(0);
public static final Duration DEFAULT_RETRY_MAX_BACKOFF = Duration.ofSeconds(30);
public static final int DEFAULT_MAX_RETRY_COUNT = 10;
private final AtomicInteger retryCount = new AtomicInteger();
private final int maxRetryCount;
/**
* Creates a new instance of Retry with the maximum retry count of {@code maxRetryCount}
*
* @param maxRetryCount The maximum number of retries allowed.
*/
public Retry(int maxRetryCount) {
this.maxRetryCount = maxRetryCount;
}
/**
* Check if the existing exception is a retriable exception.
*
* @param exception An exception that was observed for the operation to be retried.
* @return true if the exception is a retriable exception, otherwise false.
*/
public static boolean isRetriableException(Exception exception) {
return (exception instanceof AmqpException) && ((AmqpException) exception).isTransient();
}
/**
* Get default configured Retry.
*
* @return Retry which has all default property set up.
*/
public static Retry getNoRetry() {
return new ExponentialRetry(Duration.ZERO, Duration.ZERO, 0);
}
/**
* Get default configured Retry.
*
* @return Retry which has all default property set up.
*/
public static Retry getDefaultRetry() {
return new ExponentialRetry(DEFAULT_RETRY_MIN_BACKOFF, DEFAULT_RETRY_MAX_BACKOFF, DEFAULT_MAX_RETRY_COUNT);
}
/**
* Increase one count to current count value.
*
* @return current AtomicInteger value.
*/
public int incrementRetryCount() {
return retryCount.incrementAndGet();
}
/**
* Get the current retried count.
*
* @return current AtomicInteger value.
*/
public int getRetryCount() {
return retryCount.get();
}
/**
* Reset AtomicInteger to value zero.
*/
public void resetRetryInterval() {
retryCount.set(0);
}
/**
* Get the maximum allowed retry count.
*
* @return maximum allowed retry count value.
*/
public int maxRetryCount() {
return maxRetryCount;
}
/**
* Calculates the amount of time to delay before the next retry attempt
*
* @param lastException The last exception that was observed for the operation to be retried.
* @param remainingTime The amount of time remaining for the cumulative timeout across retry attempts.
* @return The amount of time to delay before retrying the associated operation; if {@code null},
* then the operation is no longer eligible to be retried.
*/
/**
* Allows a concrete retry policy implementation to offer a base retry interval to be used in
* the calculations performed by 'Retry.GetNextRetryInterval'.
*
* @param lastException The last exception that was observed for the operation to be retried.
* @param remainingTime The amount of time remaining for the cumulative timeout across retry attempts.
* @param baseWaitSeconds The number of seconds to base the suggested retry interval on;
* this should be used as the minimum interval returned under normal circumstances.
* @param retryCount The number of retries that have already been attempted.
* @return The amount of time to delay before retrying the associated operation; if {@code null},
* then the operation is no longer eligible to be retried.
*/
protected abstract Duration calculateNextRetryInterval(Exception lastException, Duration remainingTime,
int baseWaitSeconds, int retryCount);
} |
The `SyntheticBean` constructor takes a `List<SomeBean>`, so the type argument here is inferred to be `List<SomeBean>`. | public SyntheticBean create(SyntheticCreationalContext<SyntheticBean> context) {
return new SyntheticBean(context.getInjectedReference(new TypeLiteral<>() {
}, All.Literal.INSTANCE));
} | return new SyntheticBean(context.getInjectedReference(new TypeLiteral<>() { | public SyntheticBean create(SyntheticCreationalContext<SyntheticBean> context) {
return new SyntheticBean(context.getInjectedReference(new TypeLiteral<List<SomeBean>>() {
}, All.Literal.INSTANCE));
} | class SynthBeanCreator implements BeanCreator<SyntheticBean> {
@Override
} | class SynthBeanCreator implements BeanCreator<SyntheticBean> {
@Override
} |
None of those functions is serializable therefore we cannot pass them directly to the sink. | public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
final SerializationSchema<RowData> keySerialization =
createSerialization(context, keyEncodingFormat, keyProjection, keyPrefix);
final SerializationSchema<RowData> valueSerialization =
createSerialization(context, valueEncodingFormat, valueProjection, null);
final KafkaSinkBuilder<RowData> sinkBuilder = KafkaSink.builder();
final List<LogicalType> physicalChildren = physicalDataType.getLogicalType().getChildren();
if (transactionalIdPrefix != null) {
sinkBuilder.setTransactionalIdPrefix(transactionalIdPrefix);
}
final KafkaSink<RowData> kafkaSink =
sinkBuilder
.setDeliverGuarantee(deliveryGuarantee)
.setBootstrapServers(
properties.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG).toString())
.setKafkaProducerConfig(properties)
.setRecordSerializer(
new DynamicKafkaRecordSerializationSchema(
topic,
partitioner,
keySerialization,
valueSerialization,
getFieldGetters(physicalChildren, keyProjection),
getFieldGetters(physicalChildren, valueProjection),
hasMetadata(),
getMetadataPositions(physicalChildren),
upsertMode))
.build();
if (flushMode.isEnabled() && upsertMode) {
return (DataStreamSinkProvider)
dataStream -> {
final boolean objectReuse =
dataStream
.getExecutionEnvironment()
.getConfig()
.isObjectReuseEnabled();
final ReducingUpsertSink<?> sink =
new ReducingUpsertSink<>(
kafkaSink,
physicalDataType,
keyProjection,
flushMode,
objectReuse
? createRowDataTypeSerializer(
context, dataStream.getExecutionConfig())
: null);
final DataStreamSink<RowData> end = dataStream.sinkTo(sink);
if (parallelism != null) {
end.setParallelism(parallelism);
}
return end;
};
}
return SinkProvider.of(kafkaSink, parallelism);
} | end.setParallelism(parallelism); | public SinkRuntimeProvider getSinkRuntimeProvider(Context context) {
final SerializationSchema<RowData> keySerialization =
createSerialization(context, keyEncodingFormat, keyProjection, keyPrefix);
final SerializationSchema<RowData> valueSerialization =
createSerialization(context, valueEncodingFormat, valueProjection, null);
final KafkaSinkBuilder<RowData> sinkBuilder = KafkaSink.builder();
final List<LogicalType> physicalChildren = physicalDataType.getLogicalType().getChildren();
if (transactionalIdPrefix != null) {
sinkBuilder.setTransactionalIdPrefix(transactionalIdPrefix);
}
final KafkaSink<RowData> kafkaSink =
sinkBuilder
.setDeliverGuarantee(deliveryGuarantee)
.setBootstrapServers(
properties.get(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG).toString())
.setKafkaProducerConfig(properties)
.setRecordSerializer(
new DynamicKafkaRecordSerializationSchema(
topic,
partitioner,
keySerialization,
valueSerialization,
getFieldGetters(physicalChildren, keyProjection),
getFieldGetters(physicalChildren, valueProjection),
hasMetadata(),
getMetadataPositions(physicalChildren),
upsertMode))
.build();
if (flushMode.isEnabled() && upsertMode) {
return (DataStreamSinkProvider)
dataStream -> {
final boolean objectReuse =
dataStream
.getExecutionEnvironment()
.getConfig()
.isObjectReuseEnabled();
final ReducingUpsertSink<?> sink =
new ReducingUpsertSink<>(
kafkaSink,
physicalDataType,
keyProjection,
flushMode,
objectReuse
? createRowDataTypeSerializer(
context,
dataStream.getExecutionConfig())
::copy
: rowData -> rowData);
final DataStreamSink<RowData> end = dataStream.sinkTo(sink);
if (parallelism != null) {
end.setParallelism(parallelism);
}
return end;
};
}
return SinkProvider.of(kafkaSink, parallelism);
} | class KafkaDynamicSink implements DynamicTableSink, SupportsWritingMetadata {
/** Metadata that is appended at the end of a physical sink row. */
protected List<String> metadataKeys;
/** Data type of consumed data type. */
protected DataType consumedDataType;
/** Data type to configure the formats. */
protected final DataType physicalDataType;
/** Optional format for encoding keys to Kafka. */
protected final @Nullable EncodingFormat<SerializationSchema<RowData>> keyEncodingFormat;
/** Format for encoding values to Kafka. */
protected final EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat;
/** Indices that determine the key fields and the source position in the consumed row. */
protected final int[] keyProjection;
/** Indices that determine the value fields and the source position in the consumed row. */
protected final int[] valueProjection;
/** Prefix that needs to be removed from fields when constructing the physical data type. */
protected final @Nullable String keyPrefix;
/** The defined delivery guarantee. */
private final DeliveryGuarantee deliveryGuarantee;
/**
* If the {@link
* prefix for all ids of opened Kafka transactions.
*/
@Nullable private final String transactionalIdPrefix;
/** The Kafka topic to write to. */
protected final String topic;
/** Properties for the Kafka producer. */
protected final Properties properties;
/** Partitioner to select Kafka partition for each item. */
protected final @Nullable FlinkKafkaPartitioner<RowData> partitioner;
/**
* Flag to determine sink mode. In upsert mode sink transforms the delete/update-before message
* to tombstone message.
*/
protected final boolean upsertMode;
/** Sink buffer flush config which only supported in upsert mode now. */
protected final SinkBufferFlushMode flushMode;
/** Parallelism of the physical Kafka producer. * */
protected final @Nullable Integer parallelism;
public KafkaDynamicSink(
DataType consumedDataType,
DataType physicalDataType,
@Nullable EncodingFormat<SerializationSchema<RowData>> keyEncodingFormat,
EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat,
int[] keyProjection,
int[] valueProjection,
@Nullable String keyPrefix,
String topic,
Properties properties,
@Nullable FlinkKafkaPartitioner<RowData> partitioner,
DeliveryGuarantee deliveryGuarantee,
boolean upsertMode,
SinkBufferFlushMode flushMode,
@Nullable Integer parallelism,
@Nullable String transactionalIdPrefix) {
this.consumedDataType =
checkNotNull(consumedDataType, "Consumed data type must not be null.");
this.physicalDataType =
checkNotNull(physicalDataType, "Physical data type must not be null.");
this.keyEncodingFormat = keyEncodingFormat;
this.valueEncodingFormat =
checkNotNull(valueEncodingFormat, "Value encoding format must not be null.");
this.keyProjection = checkNotNull(keyProjection, "Key projection must not be null.");
this.valueProjection = checkNotNull(valueProjection, "Value projection must not be null.");
this.keyPrefix = keyPrefix;
this.transactionalIdPrefix = transactionalIdPrefix;
this.metadataKeys = Collections.emptyList();
this.topic = checkNotNull(topic, "Topic must not be null.");
this.properties = checkNotNull(properties, "Properties must not be null.");
this.partitioner = partitioner;
this.deliveryGuarantee =
checkNotNull(deliveryGuarantee, "DeliveryGuarantee must not be null.");
this.upsertMode = upsertMode;
this.flushMode = checkNotNull(flushMode);
if (flushMode.isEnabled() && !upsertMode) {
throw new IllegalArgumentException(
"Sink buffer flush is only supported in upsert-kafka.");
}
this.parallelism = parallelism;
}
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
return valueEncodingFormat.getChangelogMode();
}
@Override
@Override
public Map<String, DataType> listWritableMetadata() {
final Map<String, DataType> metadataMap = new LinkedHashMap<>();
Stream.of(WritableMetadata.values())
.forEachOrdered(m -> metadataMap.put(m.key, m.dataType));
return metadataMap;
}
@Override
public void applyWritableMetadata(List<String> metadataKeys, DataType consumedDataType) {
this.metadataKeys = metadataKeys;
this.consumedDataType = consumedDataType;
}
@Override
public DynamicTableSink copy() {
final KafkaDynamicSink copy =
new KafkaDynamicSink(
consumedDataType,
physicalDataType,
keyEncodingFormat,
valueEncodingFormat,
keyProjection,
valueProjection,
keyPrefix,
topic,
properties,
partitioner,
deliveryGuarantee,
upsertMode,
flushMode,
parallelism,
transactionalIdPrefix);
copy.metadataKeys = metadataKeys;
return copy;
}
@Override
public String asSummaryString() {
return "Kafka table sink";
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final KafkaDynamicSink that = (KafkaDynamicSink) o;
return Objects.equals(metadataKeys, that.metadataKeys)
&& Objects.equals(consumedDataType, that.consumedDataType)
&& Objects.equals(physicalDataType, that.physicalDataType)
&& Objects.equals(keyEncodingFormat, that.keyEncodingFormat)
&& Objects.equals(valueEncodingFormat, that.valueEncodingFormat)
&& Arrays.equals(keyProjection, that.keyProjection)
&& Arrays.equals(valueProjection, that.valueProjection)
&& Objects.equals(keyPrefix, that.keyPrefix)
&& Objects.equals(topic, that.topic)
&& Objects.equals(properties, that.properties)
&& Objects.equals(partitioner, that.partitioner)
&& Objects.equals(deliveryGuarantee, that.deliveryGuarantee)
&& Objects.equals(upsertMode, that.upsertMode)
&& Objects.equals(flushMode, that.flushMode)
&& Objects.equals(transactionalIdPrefix, that.transactionalIdPrefix)
&& Objects.equals(parallelism, that.parallelism);
}
@Override
public int hashCode() {
return Objects.hash(
metadataKeys,
consumedDataType,
physicalDataType,
keyEncodingFormat,
valueEncodingFormat,
keyProjection,
valueProjection,
keyPrefix,
topic,
properties,
partitioner,
deliveryGuarantee,
upsertMode,
flushMode,
transactionalIdPrefix,
parallelism);
}
private TypeSerializer<RowData> createRowDataTypeSerializer(
Context context, ExecutionConfig executionConfig) {
final TypeInformation<RowData> typeInformation =
context.createTypeInformation(consumedDataType);
return typeInformation.createSerializer(executionConfig);
}
private int[] getMetadataPositions(List<LogicalType> physicalChildren) {
return Stream.of(WritableMetadata.values())
.mapToInt(
m -> {
final int pos = metadataKeys.indexOf(m.key);
if (pos < 0) {
return -1;
}
return physicalChildren.size() + pos;
})
.toArray();
}
private boolean hasMetadata() {
return metadataKeys.size() > 0;
}
private RowData.FieldGetter[] getFieldGetters(
List<LogicalType> physicalChildren, int[] keyProjection) {
return Arrays.stream(keyProjection)
.mapToObj(
targetField ->
RowData.createFieldGetter(
physicalChildren.get(targetField), targetField))
.toArray(RowData.FieldGetter[]::new);
}
private @Nullable SerializationSchema<RowData> createSerialization(
DynamicTableSink.Context context,
@Nullable EncodingFormat<SerializationSchema<RowData>> format,
int[] projection,
@Nullable String prefix) {
if (format == null) {
return null;
}
DataType physicalFormatDataType =
DataTypeUtils.projectRow(this.physicalDataType, projection);
if (prefix != null) {
physicalFormatDataType = DataTypeUtils.stripRowPrefix(physicalFormatDataType, prefix);
}
return format.createRuntimeEncoder(context, physicalFormatDataType);
}
enum WritableMetadata {
HEADERS(
"headers",
DataTypes.MAP(DataTypes.STRING().nullable(), DataTypes.BYTES().nullable())
.nullable(),
new MetadataConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object read(RowData row, int pos) {
if (row.isNullAt(pos)) {
return null;
}
final MapData map = row.getMap(pos);
final ArrayData keyArray = map.keyArray();
final ArrayData valueArray = map.valueArray();
final List<Header> headers = new ArrayList<>();
for (int i = 0; i < keyArray.size(); i++) {
if (!keyArray.isNullAt(i) && !valueArray.isNullAt(i)) {
final String key = keyArray.getString(i).toString();
final byte[] value = valueArray.getBinary(i);
headers.add(new KafkaHeader(key, value));
}
}
return headers;
}
}),
TIMESTAMP(
"timestamp",
DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).nullable(),
new MetadataConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object read(RowData row, int pos) {
if (row.isNullAt(pos)) {
return null;
}
return row.getTimestamp(pos, 3).getMillisecond();
}
});
final String key;
final DataType dataType;
final MetadataConverter converter;
WritableMetadata(String key, DataType dataType, MetadataConverter converter) {
this.key = key;
this.dataType = dataType;
this.converter = converter;
}
}
interface MetadataConverter extends Serializable {
Object read(RowData consumedRow, int pos);
}
private static class KafkaHeader implements Header {
private final String key;
private final byte[] value;
KafkaHeader(String key, byte[] value) {
this.key = key;
this.value = value;
}
@Override
public String key() {
return key;
}
@Override
public byte[] value() {
return value;
}
}
} | class KafkaDynamicSink implements DynamicTableSink, SupportsWritingMetadata {
/** Metadata that is appended at the end of a physical sink row. */
protected List<String> metadataKeys;
/** Data type of consumed data type. */
protected DataType consumedDataType;
/** Data type to configure the formats. */
protected final DataType physicalDataType;
/** Optional format for encoding keys to Kafka. */
protected final @Nullable EncodingFormat<SerializationSchema<RowData>> keyEncodingFormat;
/** Format for encoding values to Kafka. */
protected final EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat;
/** Indices that determine the key fields and the source position in the consumed row. */
protected final int[] keyProjection;
/** Indices that determine the value fields and the source position in the consumed row. */
protected final int[] valueProjection;
/** Prefix that needs to be removed from fields when constructing the physical data type. */
protected final @Nullable String keyPrefix;
/** The defined delivery guarantee. */
private final DeliveryGuarantee deliveryGuarantee;
/**
* If the {@link
* prefix for all ids of opened Kafka transactions.
*/
@Nullable private final String transactionalIdPrefix;
/** The Kafka topic to write to. */
protected final String topic;
/** Properties for the Kafka producer. */
protected final Properties properties;
/** Partitioner to select Kafka partition for each item. */
protected final @Nullable FlinkKafkaPartitioner<RowData> partitioner;
/**
* Flag to determine sink mode. In upsert mode sink transforms the delete/update-before message
* to tombstone message.
*/
protected final boolean upsertMode;
/** Sink buffer flush config which only supported in upsert mode now. */
protected final SinkBufferFlushMode flushMode;
/** Parallelism of the physical Kafka producer. * */
protected final @Nullable Integer parallelism;
public KafkaDynamicSink(
DataType consumedDataType,
DataType physicalDataType,
@Nullable EncodingFormat<SerializationSchema<RowData>> keyEncodingFormat,
EncodingFormat<SerializationSchema<RowData>> valueEncodingFormat,
int[] keyProjection,
int[] valueProjection,
@Nullable String keyPrefix,
String topic,
Properties properties,
@Nullable FlinkKafkaPartitioner<RowData> partitioner,
DeliveryGuarantee deliveryGuarantee,
boolean upsertMode,
SinkBufferFlushMode flushMode,
@Nullable Integer parallelism,
@Nullable String transactionalIdPrefix) {
this.consumedDataType =
checkNotNull(consumedDataType, "Consumed data type must not be null.");
this.physicalDataType =
checkNotNull(physicalDataType, "Physical data type must not be null.");
this.keyEncodingFormat = keyEncodingFormat;
this.valueEncodingFormat =
checkNotNull(valueEncodingFormat, "Value encoding format must not be null.");
this.keyProjection = checkNotNull(keyProjection, "Key projection must not be null.");
this.valueProjection = checkNotNull(valueProjection, "Value projection must not be null.");
this.keyPrefix = keyPrefix;
this.transactionalIdPrefix = transactionalIdPrefix;
this.metadataKeys = Collections.emptyList();
this.topic = checkNotNull(topic, "Topic must not be null.");
this.properties = checkNotNull(properties, "Properties must not be null.");
this.partitioner = partitioner;
this.deliveryGuarantee =
checkNotNull(deliveryGuarantee, "DeliveryGuarantee must not be null.");
this.upsertMode = upsertMode;
this.flushMode = checkNotNull(flushMode);
if (flushMode.isEnabled() && !upsertMode) {
throw new IllegalArgumentException(
"Sink buffer flush is only supported in upsert-kafka.");
}
this.parallelism = parallelism;
}
@Override
public ChangelogMode getChangelogMode(ChangelogMode requestedMode) {
return valueEncodingFormat.getChangelogMode();
}
@Override
@Override
public Map<String, DataType> listWritableMetadata() {
final Map<String, DataType> metadataMap = new LinkedHashMap<>();
Stream.of(WritableMetadata.values())
.forEachOrdered(m -> metadataMap.put(m.key, m.dataType));
return metadataMap;
}
@Override
public void applyWritableMetadata(List<String> metadataKeys, DataType consumedDataType) {
this.metadataKeys = metadataKeys;
this.consumedDataType = consumedDataType;
}
@Override
public DynamicTableSink copy() {
final KafkaDynamicSink copy =
new KafkaDynamicSink(
consumedDataType,
physicalDataType,
keyEncodingFormat,
valueEncodingFormat,
keyProjection,
valueProjection,
keyPrefix,
topic,
properties,
partitioner,
deliveryGuarantee,
upsertMode,
flushMode,
parallelism,
transactionalIdPrefix);
copy.metadataKeys = metadataKeys;
return copy;
}
@Override
public String asSummaryString() {
return "Kafka table sink";
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final KafkaDynamicSink that = (KafkaDynamicSink) o;
return Objects.equals(metadataKeys, that.metadataKeys)
&& Objects.equals(consumedDataType, that.consumedDataType)
&& Objects.equals(physicalDataType, that.physicalDataType)
&& Objects.equals(keyEncodingFormat, that.keyEncodingFormat)
&& Objects.equals(valueEncodingFormat, that.valueEncodingFormat)
&& Arrays.equals(keyProjection, that.keyProjection)
&& Arrays.equals(valueProjection, that.valueProjection)
&& Objects.equals(keyPrefix, that.keyPrefix)
&& Objects.equals(topic, that.topic)
&& Objects.equals(properties, that.properties)
&& Objects.equals(partitioner, that.partitioner)
&& Objects.equals(deliveryGuarantee, that.deliveryGuarantee)
&& Objects.equals(upsertMode, that.upsertMode)
&& Objects.equals(flushMode, that.flushMode)
&& Objects.equals(transactionalIdPrefix, that.transactionalIdPrefix)
&& Objects.equals(parallelism, that.parallelism);
}
@Override
public int hashCode() {
return Objects.hash(
metadataKeys,
consumedDataType,
physicalDataType,
keyEncodingFormat,
valueEncodingFormat,
keyProjection,
valueProjection,
keyPrefix,
topic,
properties,
partitioner,
deliveryGuarantee,
upsertMode,
flushMode,
transactionalIdPrefix,
parallelism);
}
private TypeSerializer<RowData> createRowDataTypeSerializer(
Context context, ExecutionConfig executionConfig) {
final TypeInformation<RowData> typeInformation =
context.createTypeInformation(consumedDataType);
return typeInformation.createSerializer(executionConfig);
}
private int[] getMetadataPositions(List<LogicalType> physicalChildren) {
return Stream.of(WritableMetadata.values())
.mapToInt(
m -> {
final int pos = metadataKeys.indexOf(m.key);
if (pos < 0) {
return -1;
}
return physicalChildren.size() + pos;
})
.toArray();
}
private boolean hasMetadata() {
return metadataKeys.size() > 0;
}
private RowData.FieldGetter[] getFieldGetters(
List<LogicalType> physicalChildren, int[] keyProjection) {
return Arrays.stream(keyProjection)
.mapToObj(
targetField ->
RowData.createFieldGetter(
physicalChildren.get(targetField), targetField))
.toArray(RowData.FieldGetter[]::new);
}
private @Nullable SerializationSchema<RowData> createSerialization(
DynamicTableSink.Context context,
@Nullable EncodingFormat<SerializationSchema<RowData>> format,
int[] projection,
@Nullable String prefix) {
if (format == null) {
return null;
}
DataType physicalFormatDataType =
DataTypeUtils.projectRow(this.physicalDataType, projection);
if (prefix != null) {
physicalFormatDataType = DataTypeUtils.stripRowPrefix(physicalFormatDataType, prefix);
}
return format.createRuntimeEncoder(context, physicalFormatDataType);
}
enum WritableMetadata {
HEADERS(
"headers",
DataTypes.MAP(DataTypes.STRING().nullable(), DataTypes.BYTES().nullable())
.nullable(),
new MetadataConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object read(RowData row, int pos) {
if (row.isNullAt(pos)) {
return null;
}
final MapData map = row.getMap(pos);
final ArrayData keyArray = map.keyArray();
final ArrayData valueArray = map.valueArray();
final List<Header> headers = new ArrayList<>();
for (int i = 0; i < keyArray.size(); i++) {
if (!keyArray.isNullAt(i) && !valueArray.isNullAt(i)) {
final String key = keyArray.getString(i).toString();
final byte[] value = valueArray.getBinary(i);
headers.add(new KafkaHeader(key, value));
}
}
return headers;
}
}),
TIMESTAMP(
"timestamp",
DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).nullable(),
new MetadataConverter() {
private static final long serialVersionUID = 1L;
@Override
public Object read(RowData row, int pos) {
if (row.isNullAt(pos)) {
return null;
}
return row.getTimestamp(pos, 3).getMillisecond();
}
});
final String key;
final DataType dataType;
final MetadataConverter converter;
WritableMetadata(String key, DataType dataType, MetadataConverter converter) {
this.key = key;
this.dataType = dataType;
this.converter = converter;
}
}
interface MetadataConverter extends Serializable {
Object read(RowData consumedRow, int pos);
}
private static class KafkaHeader implements Header {
private final String key;
private final byte[] value;
KafkaHeader(String key, byte[] value) {
this.key = key;
this.value = value;
}
@Override
public String key() {
return key;
}
@Override
public byte[] value() {
return value;
}
}
} |
```suggestion assertThat(m).containsKeys(String.class, Boolean.class); ``` | void unwrapOptionalsPreservesOrder() {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("a", String.class, "aaa");
map.put("b", Boolean.class, "bbb");
LinkedHashMap<Class<?>, String> m = map.unwrapOptionals();
assertThat(m).containsKey(String.class);
assertThat(m).containsValues("aaa", "bbb");
} | assertThat(m).containsKey(String.class); | void unwrapOptionalsPreservesOrder() {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("a", String.class, "aaa");
map.put("b", Boolean.class, "bbb");
LinkedHashMap<Class<?>, String> m = map.unwrapOptionals();
assertThat(m).containsKeys(String.class, Boolean.class);
assertThat(m).containsValues("aaa", "bbb");
} | class LinkedOptionalMapTest {
@Test
void usageExample() {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("java.lang.String", String.class, "a string class");
map.put("scala.Option", null, "a scala Option");
map.put("java.lang.Boolean", Boolean.class, null);
assertThat(map.keyNames()).contains("java.lang.String", "scala.Option");
assertThat(map.absentKeysOrValues()).contains("scala.Option", "java.lang.Boolean");
}
@Test
void overridingKeyWithTheSameKeyName() {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("java.lang.String", null, "a string class");
map.put("java.lang.String", String.class, "a string class");
assertThat(map.absentKeysOrValues()).isEmpty();
}
@Test
void overridingKeysAndValuesWithTheSameKeyName() {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("java.lang.String", null, null);
map.put("java.lang.String", String.class, "a string class");
assertThat(map.absentKeysOrValues()).isEmpty();
}
@Test
void overridingAValueWithMissingKeyShouldBeConsideredAsAbsent() {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("java.lang.String", null, null);
map.put("java.lang.String", null, "a string class");
assertThat(map.absentKeysOrValues()).contains("java.lang.String");
}
@Test
void mergingMapsWithPresentEntriesLeavesNoAbsentKeyNames() {
LinkedOptionalMap<Class<?>, String> first = new LinkedOptionalMap<>();
first.put("b", null, null);
first.put("c", String.class, null);
LinkedOptionalMap<Class<?>, String> second = new LinkedOptionalMap<>();
second.put("a", String.class, "aaa");
second.put("b", String.class, "bbb");
second.put("c", Void.class, "ccc");
second.put("d", String.class, "ddd");
first.putAll(second);
assertThat(first.absentKeysOrValues()).isEmpty();
}
@Test
void mergingMapsPreserversTheOrderOfTheOriginalMap() {
LinkedOptionalMap<Class<?>, String> first = new LinkedOptionalMap<>();
first.put("b", null, null);
first.put("c", String.class, null);
LinkedOptionalMap<Class<?>, String> second = new LinkedOptionalMap<>();
second.put("a", String.class, "aaa");
second.put("b", String.class, "bbb");
second.put("c", Void.class, "ccc");
second.put("d", String.class, "ddd");
first.putAll(second);
assertThat(first.keyNames()).contains("b", "c", "a", "d");
}
@Test
void mergingToEmpty() {
LinkedOptionalMap<Class<?>, String> first = new LinkedOptionalMap<>();
LinkedOptionalMap<Class<?>, String> second = new LinkedOptionalMap<>();
second.put("a", String.class, "aaa");
second.put("b", String.class, "bbb");
second.put("c", Void.class, "ccc");
second.put("d", String.class, "ddd");
first.putAll(second);
assertThat(first.keyNames()).contains("a", "b", "c", "d");
}
@Test
void unwrapOptionalsWithMissingValueThrows() {
assertThatThrownBy(
() -> {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("a", String.class, null);
map.unwrapOptionals();
})
.isInstanceOf(IllegalStateException.class);
}
@Test
void unwrapOptionalsWithMissingKeyThrows() {
assertThatThrownBy(
() -> {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("a", null, "blabla");
map.unwrapOptionals();
})
.isInstanceOf(IllegalStateException.class);
}
@Test
@Test
void testPrefix() {
LinkedOptionalMap<Class<?>, String> left = new LinkedOptionalMap<>();
left.put("a", String.class, "aaa");
left.put("b", String.class, "aaa");
LinkedOptionalMap<Class<?>, String> right = new LinkedOptionalMap<>(left);
right.put("c", Boolean.class, "bbb");
assertThat(LinkedOptionalMap.isLeftPrefixOfRight(left, right)).isTrue();
}
@Test
void testNonPrefix() {
LinkedOptionalMap<Class<?>, String> left = new LinkedOptionalMap<>();
left.put("a", String.class, "aaa");
left.put("c", String.class, "aaa");
LinkedOptionalMap<Class<?>, String> right = new LinkedOptionalMap<>();
right.put("b", Boolean.class, "bbb");
right.put("c", Boolean.class, "bbb");
assertThat(LinkedOptionalMap.isLeftPrefixOfRight(left, right)).isFalse();
}
@Test
void demoMergeResult() {
LinkedOptionalMap<Class<?>, String> left = new LinkedOptionalMap<>();
left.put("b", null, null);
left.put("c", String.class, null);
LinkedOptionalMap<Class<?>, String> right = new LinkedOptionalMap<>();
right.put("b", String.class, "bbb");
right.put("c", Void.class, "ccc");
right.put("a", Boolean.class, "aaa");
right.put("d", Long.class, "ddd");
MergeResult<Class<?>, String> result = LinkedOptionalMap.mergeRightIntoLeft(left, right);
assertThat(result.hasMissingKeys()).isFalse();
assertThat(result.isOrderedSubset()).isTrue();
assertThat(result.missingKeys()).isEmpty();
LinkedHashMap<Class<?>, String> merged = result.getMerged();
assertThat(merged.keySet())
.containsExactly(String.class, Void.class, Boolean.class, Long.class);
}
} | class LinkedOptionalMapTest {
@Test
void usageExample() {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("java.lang.String", String.class, "a string class");
map.put("scala.Option", null, "a scala Option");
map.put("java.lang.Boolean", Boolean.class, null);
assertThat(map.keyNames()).contains("java.lang.String", "scala.Option");
assertThat(map.absentKeysOrValues()).contains("scala.Option", "java.lang.Boolean");
}
@Test
void overridingKeyWithTheSameKeyName() {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("java.lang.String", null, "a string class");
map.put("java.lang.String", String.class, "a string class");
assertThat(map.absentKeysOrValues()).isEmpty();
}
@Test
void overridingKeysAndValuesWithTheSameKeyName() {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("java.lang.String", null, null);
map.put("java.lang.String", String.class, "a string class");
assertThat(map.absentKeysOrValues()).isEmpty();
}
@Test
void overridingAValueWithMissingKeyShouldBeConsideredAsAbsent() {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("java.lang.String", null, null);
map.put("java.lang.String", null, "a string class");
assertThat(map.absentKeysOrValues()).contains("java.lang.String");
}
@Test
void mergingMapsWithPresentEntriesLeavesNoAbsentKeyNames() {
LinkedOptionalMap<Class<?>, String> first = new LinkedOptionalMap<>();
first.put("b", null, null);
first.put("c", String.class, null);
LinkedOptionalMap<Class<?>, String> second = new LinkedOptionalMap<>();
second.put("a", String.class, "aaa");
second.put("b", String.class, "bbb");
second.put("c", Void.class, "ccc");
second.put("d", String.class, "ddd");
first.putAll(second);
assertThat(first.absentKeysOrValues()).isEmpty();
}
@Test
void mergingMapsPreserversTheOrderOfTheOriginalMap() {
LinkedOptionalMap<Class<?>, String> first = new LinkedOptionalMap<>();
first.put("b", null, null);
first.put("c", String.class, null);
LinkedOptionalMap<Class<?>, String> second = new LinkedOptionalMap<>();
second.put("a", String.class, "aaa");
second.put("b", String.class, "bbb");
second.put("c", Void.class, "ccc");
second.put("d", String.class, "ddd");
first.putAll(second);
assertThat(first.keyNames()).contains("b", "c", "a", "d");
}
@Test
void mergingToEmpty() {
LinkedOptionalMap<Class<?>, String> first = new LinkedOptionalMap<>();
LinkedOptionalMap<Class<?>, String> second = new LinkedOptionalMap<>();
second.put("a", String.class, "aaa");
second.put("b", String.class, "bbb");
second.put("c", Void.class, "ccc");
second.put("d", String.class, "ddd");
first.putAll(second);
assertThat(first.keyNames()).contains("a", "b", "c", "d");
}
@Test
void unwrapOptionalsWithMissingValueThrows() {
assertThatThrownBy(
() -> {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("a", String.class, null);
map.unwrapOptionals();
})
.isInstanceOf(IllegalStateException.class);
}
@Test
void unwrapOptionalsWithMissingKeyThrows() {
assertThatThrownBy(
() -> {
LinkedOptionalMap<Class<?>, String> map = new LinkedOptionalMap<>();
map.put("a", null, "blabla");
map.unwrapOptionals();
})
.isInstanceOf(IllegalStateException.class);
}
@Test
@Test
void testPrefix() {
LinkedOptionalMap<Class<?>, String> left = new LinkedOptionalMap<>();
left.put("a", String.class, "aaa");
left.put("b", String.class, "aaa");
LinkedOptionalMap<Class<?>, String> right = new LinkedOptionalMap<>(left);
right.put("c", Boolean.class, "bbb");
assertThat(LinkedOptionalMap.isLeftPrefixOfRight(left, right)).isTrue();
}
@Test
void testNonPrefix() {
LinkedOptionalMap<Class<?>, String> left = new LinkedOptionalMap<>();
left.put("a", String.class, "aaa");
left.put("c", String.class, "aaa");
LinkedOptionalMap<Class<?>, String> right = new LinkedOptionalMap<>();
right.put("b", Boolean.class, "bbb");
right.put("c", Boolean.class, "bbb");
assertThat(LinkedOptionalMap.isLeftPrefixOfRight(left, right)).isFalse();
}
@Test
void demoMergeResult() {
LinkedOptionalMap<Class<?>, String> left = new LinkedOptionalMap<>();
left.put("b", null, null);
left.put("c", String.class, null);
LinkedOptionalMap<Class<?>, String> right = new LinkedOptionalMap<>();
right.put("b", String.class, "bbb");
right.put("c", Void.class, "ccc");
right.put("a", Boolean.class, "aaa");
right.put("d", Long.class, "ddd");
MergeResult<Class<?>, String> result = LinkedOptionalMap.mergeRightIntoLeft(left, right);
assertThat(result.hasMissingKeys()).isFalse();
assertThat(result.isOrderedSubset()).isTrue();
assertThat(result.missingKeys()).isEmpty();
LinkedHashMap<Class<?>, String> merged = result.getMerged();
assertThat(merged.keySet())
.containsExactly(String.class, Void.class, Boolean.class, Long.class);
}
} |
Let's at least drop a TODO to add a more native ParDo.withBadRecordHandler() to reduce this boilerplate. | public PCollection<Integer> expand(PCollection<Integer> input) {
PCollectionTuple pCollectionTuple =
input.apply(
"NoOpDoFn",
ParDo.of(new OddIsBad(badRecordRouter))
.withOutputTags(RECORDS, TupleTagList.of(BadRecordRouter.BAD_RECORD_TAG)));
Coder<BadRecord> badRecordCoder;
try {
SchemaRegistry schemaRegistry = input.getPipeline().getSchemaRegistry();
badRecordCoder =
SchemaCoder.of(
schemaRegistry.getSchema(BadRecord.class),
TypeDescriptor.of(BadRecord.class),
schemaRegistry.getToRowFunction(BadRecord.class),
schemaRegistry.getFromRowFunction(BadRecord.class));
} catch (NoSuchSchemaException e) {
throw new RuntimeException(e);
}
errorHandler.addErrorCollection(
pCollectionTuple.get(BadRecordRouter.BAD_RECORD_TAG).setCoder(badRecordCoder));
return pCollectionTuple.get(RECORDS).setCoder(BigEndianIntegerCoder.of());
} | errorHandler.addErrorCollection( | public PCollection<Integer> expand(PCollection<Integer> input) {
PCollectionTuple pCollectionTuple =
input.apply(
"NoOpDoFn",
ParDo.of(new OddIsBad(badRecordRouter))
.withOutputTags(RECORDS, TupleTagList.of(BadRecordRouter.BAD_RECORD_TAG)));
errorHandler.addErrorCollection(
pCollectionTuple
.get(BadRecordRouter.BAD_RECORD_TAG)
.setCoder(BadRecord.getCoder(input.getPipeline())));
return pCollectionTuple.get(RECORDS).setCoder(BigEndianIntegerCoder.of());
} | class BRHEnabledPTransform extends PTransform<PCollection<Integer>, PCollection<Integer>> {
private ErrorHandler<BadRecord, ?> errorHandler = new NoOpErrorHandler<>();
private BadRecordRouter badRecordRouter = BadRecordRouter.THROWING_ROUTER;
private static final TupleTag<Integer> RECORDS = new TupleTag<>();
public BRHEnabledPTransform() {}
public BRHEnabledPTransform withBadRecordHandler(ErrorHandler<BadRecord, ?> errorHandler) {
this.errorHandler = errorHandler;
this.badRecordRouter = BadRecordRouter.RECORDING_ROUTER;
return this;
}
@Override
public static class OddIsBad extends DoFn<Integer, Integer> {
private BadRecordRouter badRecordRouter;
public OddIsBad(BadRecordRouter badRecordRouter) {
this.badRecordRouter = badRecordRouter;
}
@ProcessElement
public void processElement(@Element Integer element, MultiOutputReceiver receiver)
throws Exception {
if (element % 2 == 0) {
receiver.get(RECORDS).output(element);
} else {
badRecordRouter.route(
receiver,
element,
BigEndianIntegerCoder.of(),
new RuntimeException(),
"Integer was odd",
"NoOpDoFn");
}
}
}
} | class BRHEnabledPTransform extends PTransform<PCollection<Integer>, PCollection<Integer>> {
private ErrorHandler<BadRecord, ?> errorHandler = new DefaultErrorHandler<>();
private BadRecordRouter badRecordRouter = BadRecordRouter.THROWING_ROUTER;
private static final TupleTag<Integer> RECORDS = new TupleTag<>();
public BRHEnabledPTransform() {}
public BRHEnabledPTransform withBadRecordHandler(ErrorHandler<BadRecord, ?> errorHandler) {
this.errorHandler = errorHandler;
this.badRecordRouter = BadRecordRouter.RECORDING_ROUTER;
return this;
}
@Override
public static class OddIsBad extends DoFn<Integer, Integer> {
private final BadRecordRouter badRecordRouter;
public OddIsBad(BadRecordRouter badRecordRouter) {
this.badRecordRouter = badRecordRouter;
}
@ProcessElement
public void processElement(@Element Integer element, MultiOutputReceiver receiver)
throws Exception {
if (element % 2 == 0) {
receiver.get(RECORDS).output(element);
} else {
badRecordRouter.route(
receiver,
element,
BigEndianIntegerCoder.of(),
new RuntimeException("Integer was odd"),
"Integer was odd");
}
}
}
} |
this looks like a workaround to ignore the order? | public void testTypeConversions() throws Exception {
List<Row> data =
Arrays.asList(
Row.of(
1,
"ABC",
java.sql.Timestamp.valueOf("2000-12-12 12:30:57.12"),
Row.of(1, new byte[] {1, 2}, "ABC", Arrays.asList(1, 2, 3))),
Row.of(
Math.PI,
"ABC",
LocalDateTime.parse("2000-12-12T12:30:57.123456"),
Row.of(
Math.PI,
new byte[] {2, 3},
"ABC",
Arrays.asList(1L, 2L, 3L))),
Row.of(
3.1f,
"DEF",
LocalDateTime.parse("2000-12-12T12:30:57.1234567"),
Row.of(3.1f, new byte[] {3}, "DEF", Arrays.asList(1D, 2D, 3D))),
Row.of(
99L,
"DEFG",
LocalDateTime.parse("2000-12-12T12:30:57.12345678"),
Row.of(99L, new byte[] {3, 4}, "DEFG", Arrays.asList(1f, 2f, 3f))),
Row.of(
0d,
"D",
LocalDateTime.parse("2000-12-12T12:30:57.123"),
Row.of(0d, new byte[] {4}, "D", Arrays.asList(1, 2, 3))));
UnresolvedDataType rowType =
DataTypes.ROW(
DataTypes.FIELD(
"a", DataTypes.of("DECIMAL(10, 2) NOT NULL")),
DataTypes.FIELD("b", DataTypes.CHAR(4).notNull()),
DataTypes.FIELD("c", DataTypes.TIMESTAMP(4).notNull()),
DataTypes.FIELD(
"row",
DataTypes.ROW(
DataTypes.FIELD("a", DataTypes.DECIMAL(10, 3)),
DataTypes.FIELD("b", DataTypes.BINARY(2)),
DataTypes.FIELD("c", DataTypes.CHAR(5).notNull()),
DataTypes.FIELD(
"d", DataTypes.ARRAY(DataTypes.DECIMAL(10, 2))))));
Table t = tEnv().fromValues(rowType, data);
TestCollectionTableFactory.reset();
tEnv().executeSql(
"CREATE TABLE SinkTable("
+ "a DECIMAL(10, 2) NOT NULL, "
+ "b CHAR(4) NOT NULL,"
+ "c TIMESTAMP(4) NOT NULL,"
+ "`row` ROW<a DECIMAL(10, 3) NOT NULL, b BINARY(2), c CHAR(5) NOT NULL, d ARRAY<DECIMAL(10, 2)>>) "
+ "WITH ('connector' = 'COLLECTION')");
t.executeInsert("SinkTable").await();
List<Row> expected =
Arrays.asList(
Row.of(
new BigDecimal("1.00"),
"ABC ",
LocalDateTime.parse("2000-12-12T12:30:57.120"),
Row.of(
new BigDecimal("1.000"),
new byte[] {1, 2},
"ABC ",
new BigDecimal[] {
new BigDecimal("1.00"),
new BigDecimal("2.00"),
new BigDecimal("3.00")
})),
Row.of(
new BigDecimal("3.14"),
"ABC ",
LocalDateTime.parse("2000-12-12T12:30:57.123400"),
Row.of(
new BigDecimal("3.142"),
new byte[] {2, 3},
"ABC ",
new BigDecimal[] {
new BigDecimal("1.00"),
new BigDecimal("2.00"),
new BigDecimal("3.00")
})),
Row.of(
new BigDecimal("3.10"),
"DEF ",
LocalDateTime.parse("2000-12-12T12:30:57.123400"),
Row.of(
new BigDecimal("3.100"),
new byte[] {3, 0},
"DEF ",
new BigDecimal[] {
new BigDecimal("1.00"),
new BigDecimal("2.00"),
new BigDecimal("3.00")
})),
Row.of(
new BigDecimal("99.00"),
"DEFG",
LocalDateTime.parse("2000-12-12T12:30:57.123400"),
Row.of(
new BigDecimal("99.000"),
new byte[] {3, 4},
"DEFG ",
new BigDecimal[] {
new BigDecimal("1.00"),
new BigDecimal("2.00"),
new BigDecimal("3.00")
})),
Row.of(
new BigDecimal("0.00"),
"D ",
LocalDateTime.parse("2000-12-12T12:30:57.123"),
Row.of(
new BigDecimal("0.000"),
new byte[] {4, 0},
"D ",
new BigDecimal[] {
new BigDecimal("1.00"),
new BigDecimal("2.00"),
new BigDecimal("3.00")
})));
List<Row> actual = TestCollectionTableFactory.getResult();
assertThat(new HashSet<>(actual)).isEqualTo(new HashSet<>(expected));
} | assertThat(new HashSet<>(actual)).isEqualTo(new HashSet<>(expected)); | public void testTypeConversions() throws Exception {
List<Row> data =
Arrays.asList(
Row.of(
1,
"ABC",
java.sql.Timestamp.valueOf("2000-12-12 12:30:57.12"),
Row.of(1, new byte[] {1, 2}, "ABC", Arrays.asList(1, 2, 3))),
Row.of(
Math.PI,
"ABC",
LocalDateTime.parse("2000-12-12T12:30:57.123456"),
Row.of(
Math.PI,
new byte[] {2, 3},
"ABC",
Arrays.asList(1L, 2L, 3L))),
Row.of(
3.1f,
"DEF",
LocalDateTime.parse("2000-12-12T12:30:57.1234567"),
Row.of(3.1f, new byte[] {3}, "DEF", Arrays.asList(1D, 2D, 3D))),
Row.of(
99L,
"DEFG",
LocalDateTime.parse("2000-12-12T12:30:57.12345678"),
Row.of(99L, new byte[] {3, 4}, "DEFG", Arrays.asList(1f, 2f, 3f))),
Row.of(
0d,
"D",
LocalDateTime.parse("2000-12-12T12:30:57.123"),
Row.of(0d, new byte[] {4}, "D", Arrays.asList(1, 2, 3))));
UnresolvedDataType rowType =
DataTypes.ROW(
DataTypes.FIELD(
"a", DataTypes.of("DECIMAL(10, 2) NOT NULL")),
DataTypes.FIELD("b", DataTypes.CHAR(4).notNull()),
DataTypes.FIELD("c", DataTypes.TIMESTAMP(4).notNull()),
DataTypes.FIELD(
"row",
DataTypes.ROW(
DataTypes.FIELD("a", DataTypes.DECIMAL(10, 3)),
DataTypes.FIELD("b", DataTypes.BINARY(2)),
DataTypes.FIELD("c", DataTypes.CHAR(5).notNull()),
DataTypes.FIELD(
"d", DataTypes.ARRAY(DataTypes.DECIMAL(10, 2))))));
Table t = tEnv().fromValues(rowType, data);
TestCollectionTableFactory.reset();
tEnv().executeSql(
"CREATE TABLE SinkTable("
+ "a DECIMAL(10, 2) NOT NULL, "
+ "b CHAR(4) NOT NULL,"
+ "c TIMESTAMP(4) NOT NULL,"
+ "`row` ROW<a DECIMAL(10, 3) NOT NULL, b BINARY(2), c CHAR(5) NOT NULL, d ARRAY<DECIMAL(10, 2)>>) "
+ "WITH ('connector' = 'COLLECTION')");
t.executeInsert("SinkTable").await();
List<Row> expected =
Arrays.asList(
Row.of(
new BigDecimal("1.00"),
"ABC ",
LocalDateTime.parse("2000-12-12T12:30:57.120"),
Row.of(
new BigDecimal("1.000"),
new byte[] {1, 2},
"ABC ",
new BigDecimal[] {
new BigDecimal("1.00"),
new BigDecimal("2.00"),
new BigDecimal("3.00")
})),
Row.of(
new BigDecimal("3.14"),
"ABC ",
LocalDateTime.parse("2000-12-12T12:30:57.123400"),
Row.of(
new BigDecimal("3.142"),
new byte[] {2, 3},
"ABC ",
new BigDecimal[] {
new BigDecimal("1.00"),
new BigDecimal("2.00"),
new BigDecimal("3.00")
})),
Row.of(
new BigDecimal("3.10"),
"DEF ",
LocalDateTime.parse("2000-12-12T12:30:57.123400"),
Row.of(
new BigDecimal("3.100"),
new byte[] {3, 0},
"DEF ",
new BigDecimal[] {
new BigDecimal("1.00"),
new BigDecimal("2.00"),
new BigDecimal("3.00")
})),
Row.of(
new BigDecimal("99.00"),
"DEFG",
LocalDateTime.parse("2000-12-12T12:30:57.123400"),
Row.of(
new BigDecimal("99.000"),
new byte[] {3, 4},
"DEFG ",
new BigDecimal[] {
new BigDecimal("1.00"),
new BigDecimal("2.00"),
new BigDecimal("3.00")
})),
Row.of(
new BigDecimal("0.00"),
"D ",
LocalDateTime.parse("2000-12-12T12:30:57.123"),
Row.of(
new BigDecimal("0.000"),
new byte[] {4, 0},
"D ",
new BigDecimal[] {
new BigDecimal("1.00"),
new BigDecimal("2.00"),
new BigDecimal("3.00")
})));
List<Row> actual = TestCollectionTableFactory.getResult();
assertThat(actual).containsExactlyInAnyOrderElementsOf(expected);
} | class ValuesITCase extends StreamingTestBase {
@Test
@Test
public void testAllTypes() throws Exception {
List<Row> data =
Arrays.asList(
rowWithNestedRow(
(byte) 1,
(short) 1,
1,
1L,
1.1f,
1.1,
new BigDecimal("1.1"),
true,
LocalTime.of(1, 1, 1),
LocalDate.of(1, 1, 1),
LocalDateTime.of(1, 1, 1, 1, 1, 1, 1),
Instant.ofEpochMilli(1),
"1",
new byte[] {1},
new BigDecimal[] {new BigDecimal("1.1")},
createMap("1", new BigDecimal("1.1"))),
rowWithNestedRow(
(byte) 2,
(short) 2,
2,
2L,
2.2f,
2.2,
new BigDecimal("2.2"),
false,
LocalTime.of(2, 2, 2),
LocalDate.of(2, 2, 2),
LocalDateTime.of(2, 2, 2, 2, 2, 2, 2),
Instant.ofEpochMilli(2),
"2",
new byte[] {2},
new BigDecimal[] {new BigDecimal("2.2")},
createMap("2", new BigDecimal("2.2"))));
Table t = tEnv().fromValues(data);
TestCollectionTableFactory.reset();
tEnv().executeSql(
"CREATE TABLE SinkTable("
+ "f0 TINYINT, "
+ "f1 SMALLINT, "
+ "f2 INT, "
+ "f3 BIGINT, "
+ "f4 FLOAT, "
+ "f5 DOUBLE, "
+ "f6 DECIMAL(2, 1), "
+ "f7 BOOLEAN, "
+ "f8 TIME(0), "
+ "f9 DATE, "
+ "f12 TIMESTAMP(9), "
+ "f13 TIMESTAMP(3) WITH LOCAL TIME ZONE, "
+ "f14 CHAR(1), "
+ "f15 BINARY(1), "
+ "f16 ARRAY<DECIMAL(2, 1)>, "
+ "f17 MAP<CHAR(1), DECIMAL(2, 1)>, "
+ "f18 ROW<"
+ " `f0` TINYINT, "
+ " `f1` SMALLINT, "
+ " `f2` INT, "
+ " `f3` BIGINT, "
+ " `f4` FLOAT, "
+ " `f5` DOUBLE, "
+ " `f6` DECIMAL(2, 1), "
+ " `f7` BOOLEAN, "
+ " `f8` TIME(0), "
+ " `f9` DATE, "
+ " `f12` TIMESTAMP(9), "
+ " `f13` TIMESTAMP(3) WITH LOCAL TIME ZONE, "
+ " `f14` CHAR(1), "
+ " `f15` BINARY(1), "
+ " `f16` ARRAY<DECIMAL(2, 1)>, "
+ " `f17` MAP<CHAR(1), DECIMAL(2, 1)>>) "
+ "WITH ('connector' = 'COLLECTION')");
t.executeInsert("SinkTable").await();
List<Row> actual = TestCollectionTableFactory.getResult();
assertThat(new HashSet<>(actual)).isEqualTo(new HashSet<>(data));
}
@Test
public void testProjectionWithValues() throws Exception {
List<Row> data =
Arrays.asList(
Row.of(
(byte) 1,
(short) 1,
1,
1L,
1.1f,
1.1,
new BigDecimal("1.1"),
true,
LocalTime.of(1, 1, 1),
LocalDate.of(1, 1, 1),
LocalDateTime.of(1, 1, 1, 1, 1, 1, 1),
Instant.ofEpochMilli(1),
"1",
new byte[] {1},
new BigDecimal[] {new BigDecimal("1.1")},
createMap("1", new BigDecimal("1.1"))),
Row.of(
(byte) 2,
(short) 2,
2,
2L,
2.2f,
2.2,
new BigDecimal("2.2"),
false,
LocalTime.of(2, 2, 2),
LocalDate.of(2, 2, 2),
LocalDateTime.of(2, 2, 2, 2, 2, 2, 2),
Instant.ofEpochMilli(2),
"2",
new byte[] {2},
new BigDecimal[] {new BigDecimal("2.2")},
createMap("2", new BigDecimal("2.2"))));
tEnv().createTemporaryFunction("func", new CustomScalarFunction());
Table t = tEnv().fromValues(data).select(call("func", withColumns(range("f0", "f15"))));
TestCollectionTableFactory.reset();
tEnv().executeSql("CREATE TABLE SinkTable(str STRING) WITH ('connector' = 'COLLECTION')");
t.executeInsert("SinkTable").await();
List<Row> actual = TestCollectionTableFactory.getResult();
List<Row> expected =
Arrays.asList(
Row.of(
"1,1,1,1,1.1,1.1,1.1,true,01:01:01,0001-01-01,0001-01-01T01:01:01.000000001,"
+ "1970-01-01T00:00:00.001Z,1,[1],[1.1],{1=1.1}"),
Row.of(
"2,2,2,2,2.2,2.2,2.2,false,02:02:02,0002-02-02,0002-02-02T02:02:02.000000002,"
+ "1970-01-01T00:00:00.002Z,2,[2],[2.2],{2=2.2}"));
assertThat(new HashSet<>(actual)).isEqualTo(new HashSet<>(expected));
}
@Test
public void testRegisteringValuesWithComplexTypes() {
Map<Integer, Integer> mapData = new HashMap<>();
mapData.put(1, 1);
mapData.put(2, 2);
Row row = Row.of(mapData, Row.of(1, 2, 3), new Integer[] {1, 2});
Table values = tEnv().fromValues(Collections.singletonList(row));
tEnv().createTemporaryView("values_t", values);
List<Row> results =
CollectionUtil.iteratorToList(
tEnv().executeSql("select * from values_t").collect());
assertThat(results).isEqualTo(Collections.singletonList(row));
}
/**
* A {@link ScalarFunction} that takes all supported types as parameters and converts them to
* String.
*/
@FunctionHint(
output = @DataTypeHint("STRING"),
input = {
@DataTypeHint("TINYINT"),
@DataTypeHint("SMALLINT"),
@DataTypeHint("INT"),
@DataTypeHint("BIGINT"),
@DataTypeHint("FLOAT"),
@DataTypeHint("DOUBLE"),
@DataTypeHint("DECIMAL(2, 1)"),
@DataTypeHint("BOOLEAN"),
@DataTypeHint("TIME(0)"),
@DataTypeHint("DATE"),
@DataTypeHint("TIMESTAMP(9)"),
@DataTypeHint("TIMESTAMP(3) WITH LOCAL TIME ZONE"),
@DataTypeHint("CHAR(1)"),
@DataTypeHint("BINARY(1)"),
@DataTypeHint("ARRAY<DECIMAL(2, 1)>"),
@DataTypeHint("MAP<CHAR(1), DECIMAL(2, 1)>")
})
public static class CustomScalarFunction extends ScalarFunction {
public String eval(
byte tinyint,
short smallInt,
int integer,
long bigint,
float floating,
double doublePrecision,
BigDecimal decimal,
boolean bool,
LocalTime time,
LocalDate date,
LocalDateTime timestamp,
Instant localZonedTimestamp,
String character,
byte[] binary,
BigDecimal[] array,
Map<String, BigDecimal> map) {
return Stream.of(
tinyint,
smallInt,
integer,
bigint,
floating,
doublePrecision,
decimal,
bool,
time,
date,
timestamp,
localZonedTimestamp,
character,
Arrays.toString(binary),
Arrays.toString(array),
map)
.map(Object::toString)
.collect(Collectors.joining(","));
}
}
private static Map<String, BigDecimal> createMap(String key, BigDecimal value) {
Map<String, BigDecimal> map = new HashMap<>();
map.put(key, value);
return map;
}
private static Row rowWithNestedRow(
byte tinyint,
short smallInt,
int integer,
long bigint,
float floating,
double doublePrecision,
BigDecimal decimal,
boolean bool,
LocalTime time,
LocalDate date,
LocalDateTime timestamp,
Instant localZonedTimestamp,
String character,
byte[] binary,
BigDecimal[] array,
Map<String, BigDecimal> map) {
return Row.of(
tinyint,
smallInt,
integer,
bigint,
floating,
doublePrecision,
decimal,
bool,
time,
date,
timestamp,
localZonedTimestamp,
character,
binary,
array,
map,
Row.of(
tinyint,
smallInt,
integer,
bigint,
floating,
doublePrecision,
decimal,
bool,
time,
date,
timestamp,
localZonedTimestamp,
character,
binary,
array,
map));
}
} | class ValuesITCase extends StreamingTestBase {
@Test
@Test
public void testAllTypes() throws Exception {
List<Row> data =
Arrays.asList(
rowWithNestedRow(
(byte) 1,
(short) 1,
1,
1L,
1.1f,
1.1,
new BigDecimal("1.1"),
true,
LocalTime.of(1, 1, 1),
LocalDate.of(1, 1, 1),
LocalDateTime.of(1, 1, 1, 1, 1, 1, 1),
Instant.ofEpochMilli(1),
"1",
new byte[] {1},
new BigDecimal[] {new BigDecimal("1.1")},
createMap("1", new BigDecimal("1.1"))),
rowWithNestedRow(
(byte) 2,
(short) 2,
2,
2L,
2.2f,
2.2,
new BigDecimal("2.2"),
false,
LocalTime.of(2, 2, 2),
LocalDate.of(2, 2, 2),
LocalDateTime.of(2, 2, 2, 2, 2, 2, 2),
Instant.ofEpochMilli(2),
"2",
new byte[] {2},
new BigDecimal[] {new BigDecimal("2.2")},
createMap("2", new BigDecimal("2.2"))));
Table t = tEnv().fromValues(data);
TestCollectionTableFactory.reset();
tEnv().executeSql(
"CREATE TABLE SinkTable("
+ "f0 TINYINT, "
+ "f1 SMALLINT, "
+ "f2 INT, "
+ "f3 BIGINT, "
+ "f4 FLOAT, "
+ "f5 DOUBLE, "
+ "f6 DECIMAL(2, 1), "
+ "f7 BOOLEAN, "
+ "f8 TIME(0), "
+ "f9 DATE, "
+ "f12 TIMESTAMP(9), "
+ "f13 TIMESTAMP(3) WITH LOCAL TIME ZONE, "
+ "f14 CHAR(1), "
+ "f15 BINARY(1), "
+ "f16 ARRAY<DECIMAL(2, 1)>, "
+ "f17 MAP<CHAR(1), DECIMAL(2, 1)>, "
+ "f18 ROW<"
+ " `f0` TINYINT, "
+ " `f1` SMALLINT, "
+ " `f2` INT, "
+ " `f3` BIGINT, "
+ " `f4` FLOAT, "
+ " `f5` DOUBLE, "
+ " `f6` DECIMAL(2, 1), "
+ " `f7` BOOLEAN, "
+ " `f8` TIME(0), "
+ " `f9` DATE, "
+ " `f12` TIMESTAMP(9), "
+ " `f13` TIMESTAMP(3) WITH LOCAL TIME ZONE, "
+ " `f14` CHAR(1), "
+ " `f15` BINARY(1), "
+ " `f16` ARRAY<DECIMAL(2, 1)>, "
+ " `f17` MAP<CHAR(1), DECIMAL(2, 1)>>) "
+ "WITH ('connector' = 'COLLECTION')");
t.executeInsert("SinkTable").await();
List<Row> actual = TestCollectionTableFactory.getResult();
assertThat(actual).containsExactlyInAnyOrderElementsOf(data);
}
@Test
public void testProjectionWithValues() throws Exception {
List<Row> data =
Arrays.asList(
Row.of(
(byte) 1,
(short) 1,
1,
1L,
1.1f,
1.1,
new BigDecimal("1.1"),
true,
LocalTime.of(1, 1, 1),
LocalDate.of(1, 1, 1),
LocalDateTime.of(1, 1, 1, 1, 1, 1, 1),
Instant.ofEpochMilli(1),
"1",
new byte[] {1},
new BigDecimal[] {new BigDecimal("1.1")},
createMap("1", new BigDecimal("1.1"))),
Row.of(
(byte) 2,
(short) 2,
2,
2L,
2.2f,
2.2,
new BigDecimal("2.2"),
false,
LocalTime.of(2, 2, 2),
LocalDate.of(2, 2, 2),
LocalDateTime.of(2, 2, 2, 2, 2, 2, 2),
Instant.ofEpochMilli(2),
"2",
new byte[] {2},
new BigDecimal[] {new BigDecimal("2.2")},
createMap("2", new BigDecimal("2.2"))));
tEnv().createTemporaryFunction("func", new CustomScalarFunction());
Table t = tEnv().fromValues(data).select(call("func", withColumns(range("f0", "f15"))));
TestCollectionTableFactory.reset();
tEnv().executeSql("CREATE TABLE SinkTable(str STRING) WITH ('connector' = 'COLLECTION')");
t.executeInsert("SinkTable").await();
List<Row> actual = TestCollectionTableFactory.getResult();
List<Row> expected =
Arrays.asList(
Row.of(
"1,1,1,1,1.1,1.1,1.1,true,01:01:01,0001-01-01,0001-01-01T01:01:01.000000001,"
+ "1970-01-01T00:00:00.001Z,1,[1],[1.1],{1=1.1}"),
Row.of(
"2,2,2,2,2.2,2.2,2.2,false,02:02:02,0002-02-02,0002-02-02T02:02:02.000000002,"
+ "1970-01-01T00:00:00.002Z,2,[2],[2.2],{2=2.2}"));
assertThat(actual).containsExactlyInAnyOrderElementsOf(expected);
}
@Test
public void testRegisteringValuesWithComplexTypes() {
Map<Integer, Integer> mapData = new HashMap<>();
mapData.put(1, 1);
mapData.put(2, 2);
Row row = Row.of(mapData, Row.of(1, 2, 3), new Integer[] {1, 2});
Table values = tEnv().fromValues(Collections.singletonList(row));
tEnv().createTemporaryView("values_t", values);
List<Row> results =
CollectionUtil.iteratorToList(
tEnv().executeSql("select * from values_t").collect());
assertThat(results).containsExactly(row);
}
/**
* A {@link ScalarFunction} that takes all supported types as parameters and converts them to
* String.
*/
@FunctionHint(
output = @DataTypeHint("STRING"),
input = {
@DataTypeHint("TINYINT"),
@DataTypeHint("SMALLINT"),
@DataTypeHint("INT"),
@DataTypeHint("BIGINT"),
@DataTypeHint("FLOAT"),
@DataTypeHint("DOUBLE"),
@DataTypeHint("DECIMAL(2, 1)"),
@DataTypeHint("BOOLEAN"),
@DataTypeHint("TIME(0)"),
@DataTypeHint("DATE"),
@DataTypeHint("TIMESTAMP(9)"),
@DataTypeHint("TIMESTAMP(3) WITH LOCAL TIME ZONE"),
@DataTypeHint("CHAR(1)"),
@DataTypeHint("BINARY(1)"),
@DataTypeHint("ARRAY<DECIMAL(2, 1)>"),
@DataTypeHint("MAP<CHAR(1), DECIMAL(2, 1)>")
})
public static class CustomScalarFunction extends ScalarFunction {
public String eval(
byte tinyint,
short smallInt,
int integer,
long bigint,
float floating,
double doublePrecision,
BigDecimal decimal,
boolean bool,
LocalTime time,
LocalDate date,
LocalDateTime timestamp,
Instant localZonedTimestamp,
String character,
byte[] binary,
BigDecimal[] array,
Map<String, BigDecimal> map) {
return Stream.of(
tinyint,
smallInt,
integer,
bigint,
floating,
doublePrecision,
decimal,
bool,
time,
date,
timestamp,
localZonedTimestamp,
character,
Arrays.toString(binary),
Arrays.toString(array),
map)
.map(Object::toString)
.collect(Collectors.joining(","));
}
}
private static Map<String, BigDecimal> createMap(String key, BigDecimal value) {
Map<String, BigDecimal> map = new HashMap<>();
map.put(key, value);
return map;
}
private static Row rowWithNestedRow(
byte tinyint,
short smallInt,
int integer,
long bigint,
float floating,
double doublePrecision,
BigDecimal decimal,
boolean bool,
LocalTime time,
LocalDate date,
LocalDateTime timestamp,
Instant localZonedTimestamp,
String character,
byte[] binary,
BigDecimal[] array,
Map<String, BigDecimal> map) {
return Row.of(
tinyint,
smallInt,
integer,
bigint,
floating,
doublePrecision,
decimal,
bool,
time,
date,
timestamp,
localZonedTimestamp,
character,
binary,
array,
map,
Row.of(
tinyint,
smallInt,
integer,
bigint,
floating,
doublePrecision,
decimal,
bool,
time,
date,
timestamp,
localZonedTimestamp,
character,
binary,
array,
map));
}
} |
Please do not use `Map.Entry`, just use `Entry` | private Collection<EncryptColumnMetaData> getTableEncryptColumnMetaDatas() {
Collection<EncryptColumnMetaData> result = new LinkedList<>();
for (Map.Entry<String, ColumnMetaData> entry : schemaMetaData.get(tableName).getColumns().entrySet()) {
if (!(entry.getValue() instanceof EncryptColumnMetaData)) {
continue;
}
result.add((EncryptColumnMetaData) entry.getValue());
}
return result;
} | for (Map.Entry<String, ColumnMetaData> entry : schemaMetaData.get(tableName).getColumns().entrySet()) { | private Collection<EncryptColumnMetaData> getTableEncryptColumnMetaDatas() {
Collection<EncryptColumnMetaData> result = new LinkedList<>();
for (Entry<String, ColumnMetaData> entry : schemaMetaData.get(tableName).getColumns().entrySet()) {
if (entry.getValue() instanceof EncryptColumnMetaData) {
result.add((EncryptColumnMetaData) entry.getValue());
}
}
return result;
} | class EncryptColumnsMergedResult implements MergedResult {
private final SchemaMetaData schemaMetaData;
private final String tableName;
protected EncryptColumnsMergedResult(final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) {
this.schemaMetaData = schemaMetaData;
Preconditions.checkState(sqlStatementContext instanceof TableAvailable && 1 == ((TableAvailable) sqlStatementContext).getAllTables().size());
tableName = ((TableAvailable) sqlStatementContext).getAllTables().iterator().next().getTableName().getIdentifier().getValue();
}
@Override
public final boolean next() throws SQLException {
boolean hasNext = nextValue();
if (hasNext && getTableEncryptColumnMetaDatas().isEmpty()) {
return true;
}
if (!hasNext) {
return false;
}
String columnName = getOriginalValue(1, String.class).toString();
while (getAssistedQueryColumns().contains(columnName) || getPlainColumns().contains(columnName)) {
hasNext = nextValue();
if (!hasNext) {
return false;
}
columnName = getOriginalValue(1, String.class).toString();
}
return true;
}
private Collection<String> getAssistedQueryColumns() {
return getTableEncryptColumnMetaDatas().stream().map(EncryptColumnMetaData::getAssistedQueryColumnName)
.collect(Collectors.toList());
}
private Collection<String> getPlainColumns() {
return getTableEncryptColumnMetaDatas().stream().map(EncryptColumnMetaData::getPlainColumnName)
.collect(Collectors.toList());
}
@Override
public final Object getValue(final int columnIndex, final Class<?> type) throws SQLException {
if (1 == columnIndex) {
String columnName = getOriginalValue(columnIndex, type).toString();
Optional<String> logicColumn = getLogicColumnOfCipher(columnName);
return logicColumn.isPresent() ? logicColumn.get() : columnName;
}
return getOriginalValue(columnIndex, type);
}
private Optional<String> getLogicColumnOfCipher(final String cipherColumn) {
for (Map.Entry<String, ColumnMetaData> entry : schemaMetaData.get(tableName).getColumns().entrySet()) {
if (!(entry.getValue() instanceof EncryptColumnMetaData)) {
continue;
}
EncryptColumnMetaData encryptColumnMetaData = (EncryptColumnMetaData) entry.getValue();
if (encryptColumnMetaData.getCipherColumnName().equalsIgnoreCase(cipherColumn)) {
return Optional.of(entry.getKey());
}
}
return Optional.empty();
}
@Override
public final Object getCalendarValue(final int columnIndex, final Class<?> type, final Calendar calendar) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@SuppressWarnings("deprecation")
@Override
public final InputStream getInputStream(final int columnIndex, final String type) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
protected abstract boolean nextValue() throws SQLException;
protected abstract Object getOriginalValue(int columnIndex, Class<?> type) throws SQLException;
} | class EncryptColumnsMergedResult implements MergedResult {
private final SchemaMetaData schemaMetaData;
private final String tableName;
protected EncryptColumnsMergedResult(final SQLStatementContext sqlStatementContext, final SchemaMetaData schemaMetaData) {
this.schemaMetaData = schemaMetaData;
Preconditions.checkState(sqlStatementContext instanceof TableAvailable && 1 == ((TableAvailable) sqlStatementContext).getAllTables().size());
tableName = ((TableAvailable) sqlStatementContext).getAllTables().iterator().next().getTableName().getIdentifier().getValue();
}
@Override
public final boolean next() throws SQLException {
boolean hasNext = nextValue();
if (hasNext && getTableEncryptColumnMetaDatas().isEmpty()) {
return true;
}
if (!hasNext) {
return false;
}
String columnName = getOriginalValue(1, String.class).toString();
while (getAssistedQueryColumns().contains(columnName) || getPlainColumns().contains(columnName)) {
hasNext = nextValue();
if (!hasNext) {
return false;
}
columnName = getOriginalValue(1, String.class).toString();
}
return true;
}
private Collection<String> getAssistedQueryColumns() {
return getTableEncryptColumnMetaDatas().stream().map(EncryptColumnMetaData::getAssistedQueryColumnName)
.collect(Collectors.toList());
}
private Collection<String> getPlainColumns() {
return getTableEncryptColumnMetaDatas().stream().map(EncryptColumnMetaData::getPlainColumnName)
.collect(Collectors.toList());
}
@Override
public final Object getValue(final int columnIndex, final Class<?> type) throws SQLException {
if (1 == columnIndex) {
String columnName = getOriginalValue(columnIndex, type).toString();
Optional<String> logicColumn = getLogicColumnOfCipher(columnName);
return logicColumn.isPresent() ? logicColumn.get() : columnName;
}
return getOriginalValue(columnIndex, type);
}
private Optional<String> getLogicColumnOfCipher(final String cipherColumn) {
for (Entry<String, ColumnMetaData> entry : schemaMetaData.get(tableName).getColumns().entrySet()) {
if (entry.getValue() instanceof EncryptColumnMetaData) {
EncryptColumnMetaData encryptColumnMetaData = (EncryptColumnMetaData) entry.getValue();
if (encryptColumnMetaData.getCipherColumnName().equalsIgnoreCase(cipherColumn)) {
return Optional.of(entry.getKey());
}
}
}
return Optional.empty();
}
@Override
public final Object getCalendarValue(final int columnIndex, final Class<?> type, final Calendar calendar) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@SuppressWarnings("deprecation")
@Override
public final InputStream getInputStream(final int columnIndex, final String type) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
protected abstract boolean nextValue() throws SQLException;
protected abstract Object getOriginalValue(int columnIndex, Class<?> type) throws SQLException;
} |
How do we know when data distribution has to completed if we add new nodes? Since we know from the event what the target is, wouldn't it be better to identify the nodes that we are scaling to (i.e. the `count` nodes that are `active`, not retired and have the same resources as the `cluster.targetResources()`), then for each of those check with cluster controller if the node is in sync? | private Cluster updateCompletion(Cluster cluster, NodeList clusterNodes) {
if (cluster.lastScalingEvent().isEmpty()) return cluster;
var event = cluster.lastScalingEvent().get();
if (event.completion().isPresent()) return cluster;
if (clusterNodes.retired().stream()
.anyMatch(node -> node.history().hasEventAt(History.Event.Type.retired, event.at())))
return cluster;
for (NodeTimeseries nodeTimeseries : metricsDb.getNodeTimeseries(event.at(), clusterNodes)) {
Optional<MetricSnapshot> firstOnNewGeneration =
nodeTimeseries.asList().stream()
.filter(snapshot -> snapshot.generation() >= event.generation()).findFirst();
if (firstOnNewGeneration.isEmpty()) return cluster;
}
Instant completionTime = nodeRepository().clock().instant();
return cluster.with(event.withCompletion(completionTime));
} | private Cluster updateCompletion(Cluster cluster, NodeList clusterNodes) {
if (cluster.lastScalingEvent().isEmpty()) return cluster;
var event = cluster.lastScalingEvent().get();
if (event.completion().isPresent()) return cluster;
if (clusterNodes.retired().stream()
.anyMatch(node -> node.history().hasEventAt(History.Event.Type.retired, event.at())))
return cluster;
for (NodeTimeseries nodeTimeseries : metricsDb.getNodeTimeseries(event.at(), clusterNodes)) {
Optional<MetricSnapshot> firstOnNewGeneration =
nodeTimeseries.asList().stream()
.filter(snapshot -> snapshot.generation() >= event.generation()).findFirst();
if (firstOnNewGeneration.isEmpty()) return cluster;
}
Instant completionTime = nodeRepository().clock().instant();
return cluster.with(event.withCompletion(completionTime));
} | class AutoscalingMaintainer extends NodeRepositoryMaintainer {
private final Autoscaler autoscaler;
private final MetricsDb metricsDb;
private final Deployer deployer;
private final Metric metric;
public AutoscalingMaintainer(NodeRepository nodeRepository,
MetricsDb metricsDb,
Deployer deployer,
Metric metric,
Duration interval) {
super(nodeRepository, interval, metric);
this.autoscaler = new Autoscaler(metricsDb, nodeRepository);
this.metricsDb = metricsDb;
this.deployer = deployer;
this.metric = metric;
}
@Override
protected boolean maintain() {
if ( ! nodeRepository().isWorking()) return false;
boolean success = true;
if ( ! nodeRepository().zone().environment().isProduction()) return success;
activeNodesByApplication().forEach((applicationId, nodes) -> autoscale(applicationId, nodes));
return success;
}
private void autoscale(ApplicationId application, List<Node> applicationNodes) {
try (MaintenanceDeployment deployment = new MaintenanceDeployment(application, deployer, metric, nodeRepository())) {
if ( ! deployment.isValid()) return;
nodesByCluster(applicationNodes).forEach((clusterId, clusterNodes) -> autoscale(application, clusterId, NodeList.copyOf(clusterNodes), deployment));
}
}
private void autoscale(ApplicationId applicationId,
ClusterSpec.Id clusterId,
NodeList clusterNodes,
MaintenanceDeployment deployment) {
Application application = nodeRepository().applications().get(applicationId).orElse(new Application(applicationId));
if (application.cluster(clusterId).isEmpty()) return;
Cluster cluster = application.cluster(clusterId).get();
cluster = updateCompletion(cluster, clusterNodes);
var advice = autoscaler.autoscale(cluster, clusterNodes);
cluster = cluster.withAutoscalingStatus(advice.reason());
if (advice.isEmpty()) {
applications().put(application.with(cluster), deployment.applicationLock().get());
} else if (!cluster.targetResources().equals(advice.target())) {
applications().put(application.with(cluster.withTarget(advice.target())), deployment.applicationLock().get());
if (advice.target().isPresent()) {
logAutoscaling(advice.target().get(), applicationId, cluster, clusterNodes);
deployment.activate();
}
}
}
private Applications applications() {
return nodeRepository().applications();
}
/** Check if the last scaling event for this cluster has completed and if so record it in the returned instance */
private void logAutoscaling(ClusterResources target,
ApplicationId application,
Cluster cluster,
NodeList clusterNodes) {
ClusterResources current = new AllocatableClusterResources(clusterNodes.asList(), nodeRepository(), cluster.exclusive()).toAdvertisedClusterResources();
log.info("Autoscaling " + application + " " + clusterNodes.clusterSpec() + ":" +
"\nfrom " + toString(current) + "\nto " + toString(target));
}
static String toString(ClusterResources r) {
return r + " (total: " + r.totalResources() + ")";
}
private Map<ClusterSpec.Id, List<Node>> nodesByCluster(List<Node> applicationNodes) {
return applicationNodes.stream().collect(Collectors.groupingBy(n -> n.allocation().get().membership().cluster().id()));
}
} | class AutoscalingMaintainer extends NodeRepositoryMaintainer {
private final Autoscaler autoscaler;
private final MetricsDb metricsDb;
private final Deployer deployer;
private final Metric metric;
public AutoscalingMaintainer(NodeRepository nodeRepository,
MetricsDb metricsDb,
Deployer deployer,
Metric metric,
Duration interval) {
super(nodeRepository, interval, metric);
this.autoscaler = new Autoscaler(metricsDb, nodeRepository);
this.metricsDb = metricsDb;
this.deployer = deployer;
this.metric = metric;
}
@Override
protected boolean maintain() {
if ( ! nodeRepository().isWorking()) return false;
boolean success = true;
if ( ! nodeRepository().zone().environment().isProduction()) return success;
activeNodesByApplication().forEach((applicationId, nodes) -> autoscale(applicationId, nodes));
return success;
}
private void autoscale(ApplicationId application, List<Node> applicationNodes) {
try (MaintenanceDeployment deployment = new MaintenanceDeployment(application, deployer, metric, nodeRepository())) {
if ( ! deployment.isValid()) return;
nodesByCluster(applicationNodes).forEach((clusterId, clusterNodes) -> autoscale(application, clusterId, NodeList.copyOf(clusterNodes), deployment));
}
}
private void autoscale(ApplicationId applicationId,
ClusterSpec.Id clusterId,
NodeList clusterNodes,
MaintenanceDeployment deployment) {
Application application = nodeRepository().applications().get(applicationId).orElse(new Application(applicationId));
if (application.cluster(clusterId).isEmpty()) return;
Cluster cluster = application.cluster(clusterId).get();
cluster = updateCompletion(cluster, clusterNodes);
var advice = autoscaler.autoscale(cluster, clusterNodes);
cluster = cluster.withAutoscalingStatus(advice.reason());
if (advice.isEmpty()) {
applications().put(application.with(cluster), deployment.applicationLock().get());
} else if (!cluster.targetResources().equals(advice.target())) {
applications().put(application.with(cluster.withTarget(advice.target())), deployment.applicationLock().get());
if (advice.target().isPresent()) {
logAutoscaling(advice.target().get(), applicationId, cluster, clusterNodes);
deployment.activate();
}
}
}
private Applications applications() {
return nodeRepository().applications();
}
/** Check if the last scaling event for this cluster has completed and if so record it in the returned instance */
private void logAutoscaling(ClusterResources target,
ApplicationId application,
Cluster cluster,
NodeList clusterNodes) {
ClusterResources current = new AllocatableClusterResources(clusterNodes.asList(), nodeRepository(), cluster.exclusive()).toAdvertisedClusterResources();
log.info("Autoscaling " + application + " " + clusterNodes.clusterSpec() + ":" +
"\nfrom " + toString(current) + "\nto " + toString(target));
}
static String toString(ClusterResources r) {
return r + " (total: " + r.totalResources() + ")";
}
private Map<ClusterSpec.Id, List<Node>> nodesByCluster(List<Node> applicationNodes) {
return applicationNodes.stream().collect(Collectors.groupingBy(n -> n.allocation().get().membership().cluster().id()));
}
} | |
I believe so. But it's a good point that we could be more defensive here... | BeanArchivePredicateBuildItem additionalBeanArchives() {
return new BeanArchivePredicateBuildItem(new Predicate<ApplicationArchive>() {
@Override
public boolean test(ApplicationArchive archive) {
return !archive.getIndex().getKnownDirectImplementors(GrpcDotNames.MUTINY_BEAN).isEmpty();
}
});
} | return !archive.getIndex().getKnownDirectImplementors(GrpcDotNames.MUTINY_BEAN).isEmpty(); | BeanArchivePredicateBuildItem additionalBeanArchives() {
return new BeanArchivePredicateBuildItem(new Predicate<ApplicationArchive>() {
@Override
public boolean test(ApplicationArchive archive) {
return !archive.getIndex().getKnownDirectImplementors(GrpcDotNames.MUTINY_BEAN).isEmpty();
}
});
} | class extends the impl base
continue;
}
boolean excluded = false;
for (String excludedPackage : excludedPackages) {
if (mutinyImplBaseName.startsWith(excludedPackage)) {
excluded = true;
break;
}
} | class extends the impl base
continue;
}
boolean excluded = false;
for (String excludedPackage : excludedPackages) {
if (mutinyImplBaseName.startsWith(excludedPackage)) {
excluded = true;
break;
}
} |
This is a good question. Flink will restart the pipeline every time we scale up/down. `addSplitsback` is called only when we enable [Restart Pipelined Region Failover Strategy](https://nightlies.apache.org/flink/flink-docs-master/docs/ops/state/task_failure_recovery/#restart-pipelined-region-failover-strategy) and some readers failed. We will put these splits back to Enumerator and reassign them after the failed readers started. | public void addSplitsBack(List<PulsarPartitionSplit> splits, int subtaskId) {
splitAssigner.addSplitsBack(splits, subtaskId);
if (context.registeredReaders().containsKey(subtaskId)) {
LOG.debug(
"Reader {} has been restarted after crashing, we will put splits back to it.",
subtaskId);
List<Integer> readers = new ArrayList<>(context.registeredReaders().keySet());
assignPendingPartitionSplits(readers);
}
} | assignPendingPartitionSplits(readers); | public void addSplitsBack(List<PulsarPartitionSplit> splits, int subtaskId) {
splitAssigner.addSplitsBack(splits, subtaskId);
if (context.registeredReaders().containsKey(subtaskId)) {
LOG.debug(
"Reader {} has been restarted after crashing, we will put splits back to it.",
subtaskId);
List<Integer> readers = new ArrayList<>(context.registeredReaders().keySet());
assignPendingPartitionSplits(readers);
}
} | class PulsarSourceEnumerator
implements SplitEnumerator<PulsarPartitionSplit, PulsarSourceEnumState> {
private static final Logger LOG = LoggerFactory.getLogger(PulsarSourceEnumerator.class);
private final PulsarAdmin pulsarAdmin;
private final PulsarSubscriber subscriber;
private final StartCursor startCursor;
private final RangeGenerator rangeGenerator;
private final SourceConfiguration sourceConfiguration;
private final SplitEnumeratorContext<PulsarPartitionSplit> context;
private final SplitAssigner splitAssigner;
public PulsarSourceEnumerator(
PulsarSubscriber subscriber,
StartCursor startCursor,
StopCursor stopCursor,
RangeGenerator rangeGenerator,
SourceConfiguration sourceConfiguration,
SplitEnumeratorContext<PulsarPartitionSplit> context) {
this(
subscriber,
startCursor,
stopCursor,
rangeGenerator,
sourceConfiguration,
context,
initialState());
}
public PulsarSourceEnumerator(
PulsarSubscriber subscriber,
StartCursor startCursor,
StopCursor stopCursor,
RangeGenerator rangeGenerator,
SourceConfiguration sourceConfiguration,
SplitEnumeratorContext<PulsarPartitionSplit> context,
PulsarSourceEnumState enumState) {
this.pulsarAdmin = createAdmin(sourceConfiguration);
this.subscriber = subscriber;
this.startCursor = startCursor;
this.rangeGenerator = rangeGenerator;
this.sourceConfiguration = sourceConfiguration;
this.context = context;
this.splitAssigner = createAssigner(stopCursor, sourceConfiguration, context, enumState);
}
@Override
public void start() {
rangeGenerator.open(sourceConfiguration);
if (sourceConfiguration.isEnablePartitionDiscovery()) {
LOG.info(
"Starting the PulsarSourceEnumerator for subscription {} "
+ "with partition discovery interval of {} ms.",
sourceConfiguration.getSubscriptionDesc(),
sourceConfiguration.getPartitionDiscoveryIntervalMs());
context.callAsync(
this::getSubscribedTopicPartitions,
this::checkPartitionChanges,
0,
sourceConfiguration.getPartitionDiscoveryIntervalMs());
} else {
LOG.info(
"Starting the PulsarSourceEnumerator for subscription {} "
+ "without periodic partition discovery.",
sourceConfiguration.getSubscriptionDesc());
context.callAsync(this::getSubscribedTopicPartitions, this::checkPartitionChanges);
}
}
@Override
public void handleSplitRequest(int subtaskId, @Nullable String requesterHostname) {
}
@Override
@Override
public void addReader(int subtaskId) {
LOG.debug(
"Adding reader {} to PulsarSourceEnumerator for subscription {}.",
subtaskId,
sourceConfiguration.getSubscriptionDesc());
assignPendingPartitionSplits(singletonList(subtaskId));
}
@Override
public PulsarSourceEnumState snapshotState(long checkpointId) {
return splitAssigner.snapshotState();
}
@Override
public void close() {
if (pulsarAdmin != null) {
pulsarAdmin.close();
}
}
/**
* List subscribed topic partitions on Pulsar cluster.
*
* <p>NOTE: This method should only be invoked in the worker executor thread, because it
* requires network I/O with Pulsar cluster.
*
* @return Set of subscribed {@link TopicPartition}s
*/
private Set<TopicPartition> getSubscribedTopicPartitions() {
int parallelism = context.currentParallelism();
return subscriber.getSubscribedTopicPartitions(pulsarAdmin, rangeGenerator, parallelism);
}
/**
* Check if there are any partition changes within subscribed topic partitions fetched by worker
* thread, and convert them to splits, then assign them to pulsar readers.
*
* <p>NOTE: This method should only be invoked in the coordinator executor thread.
*
* @param fetchedPartitions Map from topic name to its description
* @param throwable Exception in worker thread
*/
private void checkPartitionChanges(Set<TopicPartition> fetchedPartitions, Throwable throwable) {
if (throwable != null) {
throw new FlinkRuntimeException(
"Failed to list subscribed topic partitions due to ", throwable);
}
List<TopicPartition> newPartitions =
splitAssigner.registerTopicPartitions(fetchedPartitions);
createSubscription(newPartitions);
List<Integer> registeredReaders = new ArrayList<>(context.registeredReaders().keySet());
assignPendingPartitionSplits(registeredReaders);
}
/** Create subscription on topic partition if it doesn't exist. */
private void createSubscription(List<TopicPartition> newPartitions) {
for (TopicPartition partition : newPartitions) {
String topicName = partition.getFullTopicName();
String subscriptionName = sourceConfiguration.getSubscriptionName();
List<String> subscriptions =
sneakyAdmin(() -> pulsarAdmin.topics().getSubscriptions(topicName));
if (!subscriptions.contains(subscriptionName)) {
CursorPosition position =
startCursor.position(partition.getTopic(), partition.getPartitionId());
MessageId initialPosition = queryInitialPosition(topicName, position);
sneakyAdmin(
() ->
pulsarAdmin
.topics()
.createSubscription(
topicName, subscriptionName, initialPosition));
}
}
}
/** Query the available message id from Pulsar. */
private MessageId queryInitialPosition(String topicName, CursorPosition position) {
CursorPosition.Type type = position.getType();
if (type == CursorPosition.Type.TIMESTAMP) {
return sneakyAdmin(
() ->
pulsarAdmin
.topics()
.getMessageIdByTimestamp(topicName, position.getTimestamp()));
} else if (type == CursorPosition.Type.MESSAGE_ID) {
return position.getMessageId();
} else {
throw new UnsupportedOperationException("We don't support this seek type " + type);
}
}
/** Query the unassigned splits and assign them to the available readers. */
private void assignPendingPartitionSplits(List<Integer> pendingReaders) {
pendingReaders.forEach(
reader -> {
if (!context.registeredReaders().containsKey(reader)) {
throw new IllegalStateException(
"Reader " + reader + " is not registered to source coordinator");
}
});
splitAssigner
.createAssignment(pendingReaders)
.ifPresent(
assignments -> {
LOG.info(
"The split assignment results are: {}",
assignments.assignment());
context.assignSplits(assignments);
});
for (Integer reader : pendingReaders) {
if (splitAssigner.noMoreSplits(reader)) {
LOG.debug(
"No more PulsarPartitionSplits to assign."
+ " Sending NoMoreSplitsEvent to reader {} in subscription {}.",
reader,
sourceConfiguration.getSubscriptionDesc());
context.signalNoMoreSplits(reader);
}
}
}
} | class PulsarSourceEnumerator
implements SplitEnumerator<PulsarPartitionSplit, PulsarSourceEnumState> {
private static final Logger LOG = LoggerFactory.getLogger(PulsarSourceEnumerator.class);
private final PulsarAdmin pulsarAdmin;
private final PulsarSubscriber subscriber;
private final StartCursor startCursor;
private final RangeGenerator rangeGenerator;
private final SourceConfiguration sourceConfiguration;
private final SplitEnumeratorContext<PulsarPartitionSplit> context;
private final SplitAssigner splitAssigner;
public PulsarSourceEnumerator(
PulsarSubscriber subscriber,
StartCursor startCursor,
StopCursor stopCursor,
RangeGenerator rangeGenerator,
SourceConfiguration sourceConfiguration,
SplitEnumeratorContext<PulsarPartitionSplit> context) {
this(
subscriber,
startCursor,
stopCursor,
rangeGenerator,
sourceConfiguration,
context,
initialState());
}
public PulsarSourceEnumerator(
PulsarSubscriber subscriber,
StartCursor startCursor,
StopCursor stopCursor,
RangeGenerator rangeGenerator,
SourceConfiguration sourceConfiguration,
SplitEnumeratorContext<PulsarPartitionSplit> context,
PulsarSourceEnumState enumState) {
this.pulsarAdmin = createAdmin(sourceConfiguration);
this.subscriber = subscriber;
this.startCursor = startCursor;
this.rangeGenerator = rangeGenerator;
this.sourceConfiguration = sourceConfiguration;
this.context = context;
this.splitAssigner = createAssigner(stopCursor, sourceConfiguration, context, enumState);
}
@Override
public void start() {
rangeGenerator.open(sourceConfiguration);
if (sourceConfiguration.isEnablePartitionDiscovery()) {
LOG.info(
"Starting the PulsarSourceEnumerator for subscription {} "
+ "with partition discovery interval of {} ms.",
sourceConfiguration.getSubscriptionDesc(),
sourceConfiguration.getPartitionDiscoveryIntervalMs());
context.callAsync(
this::getSubscribedTopicPartitions,
this::checkPartitionChanges,
0,
sourceConfiguration.getPartitionDiscoveryIntervalMs());
} else {
LOG.info(
"Starting the PulsarSourceEnumerator for subscription {} "
+ "without periodic partition discovery.",
sourceConfiguration.getSubscriptionDesc());
context.callAsync(this::getSubscribedTopicPartitions, this::checkPartitionChanges);
}
}
@Override
public void handleSplitRequest(int subtaskId, @Nullable String requesterHostname) {
}
@Override
@Override
public void addReader(int subtaskId) {
LOG.debug(
"Adding reader {} to PulsarSourceEnumerator for subscription {}.",
subtaskId,
sourceConfiguration.getSubscriptionDesc());
assignPendingPartitionSplits(singletonList(subtaskId));
}
@Override
public PulsarSourceEnumState snapshotState(long checkpointId) {
return splitAssigner.snapshotState();
}
@Override
public void close() {
if (pulsarAdmin != null) {
pulsarAdmin.close();
}
}
/**
* List subscribed topic partitions on Pulsar cluster.
*
* <p>NOTE: This method should only be invoked in the worker executor thread, because it
* requires network I/O with Pulsar cluster.
*
* @return Set of subscribed {@link TopicPartition}s
*/
private Set<TopicPartition> getSubscribedTopicPartitions() {
int parallelism = context.currentParallelism();
return subscriber.getSubscribedTopicPartitions(pulsarAdmin, rangeGenerator, parallelism);
}
/**
* Check if there are any partition changes within subscribed topic partitions fetched by worker
* thread, and convert them to splits, then assign them to pulsar readers.
*
* <p>NOTE: This method should only be invoked in the coordinator executor thread.
*
* @param fetchedPartitions Map from topic name to its description
* @param throwable Exception in worker thread
*/
private void checkPartitionChanges(Set<TopicPartition> fetchedPartitions, Throwable throwable) {
if (throwable != null) {
throw new FlinkRuntimeException(
"Failed to list subscribed topic partitions due to ", throwable);
}
List<TopicPartition> newPartitions =
splitAssigner.registerTopicPartitions(fetchedPartitions);
createSubscription(newPartitions);
List<Integer> registeredReaders = new ArrayList<>(context.registeredReaders().keySet());
assignPendingPartitionSplits(registeredReaders);
}
/** Create subscription on topic partition if it doesn't exist. */
private void createSubscription(List<TopicPartition> newPartitions) {
for (TopicPartition partition : newPartitions) {
String topicName = partition.getFullTopicName();
String subscriptionName = sourceConfiguration.getSubscriptionName();
List<String> subscriptions =
sneakyAdmin(() -> pulsarAdmin.topics().getSubscriptions(topicName));
if (!subscriptions.contains(subscriptionName)) {
CursorPosition position =
startCursor.position(partition.getTopic(), partition.getPartitionId());
MessageId initialPosition = queryInitialPosition(topicName, position);
sneakyAdmin(
() ->
pulsarAdmin
.topics()
.createSubscription(
topicName, subscriptionName, initialPosition));
}
}
}
/** Query the available message id from Pulsar. */
private MessageId queryInitialPosition(String topicName, CursorPosition position) {
CursorPosition.Type type = position.getType();
if (type == CursorPosition.Type.TIMESTAMP) {
return sneakyAdmin(
() ->
pulsarAdmin
.topics()
.getMessageIdByTimestamp(topicName, position.getTimestamp()));
} else if (type == CursorPosition.Type.MESSAGE_ID) {
return position.getMessageId();
} else {
throw new UnsupportedOperationException("We don't support this seek type " + type);
}
}
/** Query the unassigned splits and assign them to the available readers. */
private void assignPendingPartitionSplits(List<Integer> pendingReaders) {
pendingReaders.forEach(
reader -> {
if (!context.registeredReaders().containsKey(reader)) {
throw new IllegalStateException(
"Reader " + reader + " is not registered to source coordinator");
}
});
splitAssigner
.createAssignment(pendingReaders)
.ifPresent(
assignments -> {
LOG.info(
"The split assignment results are: {}",
assignments.assignment());
context.assignSplits(assignments);
});
for (Integer reader : pendingReaders) {
if (splitAssigner.noMoreSplits(reader)) {
LOG.debug(
"No more PulsarPartitionSplits to assign."
+ " Sending NoMoreSplitsEvent to reader {} in subscription {}.",
reader,
sourceConfiguration.getSubscriptionDesc());
context.signalNoMoreSplits(reader);
}
}
}
} |
Addressed in https://github.com/ballerina-platform/ballerina-lang/pull/38093/commits/f6fc6deae6ab6005be97a5ccea07619863dcab63. | public void testSymbolLookupInModuleAlias() {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_with_module_alias_test.bal");
SemanticModel model = getDefaultModulesSemanticModel(project);
Document srcFile = getDocumentForSingleSource(project);
List<Symbol> visibleSymbols = model.visibleSymbols(srcFile, LinePosition.from(19, 25));
List<String> expectedModuleSymbols = List.of("obj", "sys", "regexp", "'array");
ArrayList<Object> symbols = new ArrayList<>();
for (Symbol visibleSymbol : visibleSymbols) {
if (visibleSymbol.kind() == MODULE
&& visibleSymbol.getName().isPresent()
&& expectedModuleSymbols.contains(visibleSymbol.getName().get())) {
symbols.add(visibleSymbol.getName().get());
}
}
assertEquals(symbols.size(), 2);
} | assertEquals(symbols.size(), 2); | public void testSymbolLookupInModuleAlias() {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_with_module_alias_test.bal");
SemanticModel model = getDefaultModulesSemanticModel(project);
Document srcFile = getDocumentForSingleSource(project);
List<Symbol> visibleSymbols = model.visibleSymbols(srcFile, LinePosition.from(23, 23));
List<String> expectedModuleSymbols = List.of("obj", "tbl", "regexp", "arr", "foo");
int moduleSymbolsCount = 0;
for (Symbol visibleSymbol : visibleSymbols) {
if (visibleSymbol.kind() == MODULE
&& visibleSymbol.getName().isPresent()
&& expectedModuleSymbols.contains(visibleSymbol.getName().get())) {
moduleSymbolsCount++;
}
}
assertEquals(moduleSymbolsCount, expectedModuleSymbols.size());
} | class SymbolLookupTest {
@Test(dataProvider = "PositionProvider3")
public void testVarSymbolLookupInTypedefs(int line, int column, int expSymbols, List<String> expSymbolNames) {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_with_typedefs_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, line, column, moduleID);
assertEquals(symbolsInFile.size(), expSymbols);
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
}
@DataProvider(name = "PositionProvider3")
public Object[][] getPositionsForTypedefs() {
List<String> moduleLevelSymbols = asList("aString", "anInt", "HELLO", "testAnonTypes", "Person", "PersonObj",
"Colour", "RED", "GREEN", "BLUE");
return new Object[][]{
{18, 0, 10, moduleLevelSymbols},
{30, 65, 20, getSymbolNames(moduleLevelSymbols, "parent", "pParent", "name", "pName", "age", "pAge",
"self", "init", "getName", "getAge")},
{39, 8, 17, getSymbolNames(moduleLevelSymbols, "parent", "name", "age", "self", "getAge", "getName",
"init")},
{46, 9, 11, getSymbolNames(moduleLevelSymbols, "x")},
{48, 19, 11, getSymbolNames(moduleLevelSymbols, "x")},
{50, 15, 12, getSymbolNames(moduleLevelSymbols, "x", "person")},
{51, 0, 12, getSymbolNames(moduleLevelSymbols, "x", "person")},
};
}
@Test
public void testSymbolLookupInFollowingLine() {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_in_assignment.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, 18, 9, moduleID);
assertList(symbolsInFile, Arrays.asList("test", "v1"));
}
@Test
public void testMissingNodeFiltering() {
Project project = BCompileUtil.loadProject("test-src/missing_node_filtering_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, 19, 4, moduleID);
assertList(symbolsInFile, Arrays.asList("test", "x"));
}
@Test
public void testSymbolLookupWithAnnotationOnFunction() {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_with_annotation_on_function.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
List<Symbol> symbolList = model.visibleSymbols(srcFile, LinePosition.from(24, 5)).stream()
.filter(s -> s.getModule().get().id().equals(moduleID)).collect(Collectors.toList());
List<String> symbolStringList = symbolList.stream().map(this::createSymbolString).collect(Collectors.toList());
List<String> expectedNameList = asList("SimpleRecordTYPE_DEFINITION", "func1ANNOTATION", "func1FUNCTION");
assert symbolStringList.containsAll(expectedNameList);
}
@Test
public void testRedeclaredSymbolLookup() {
Project project = BCompileUtil.loadProject("test-src/errored_symbol_lookup_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
List<Symbol> allInScopeSymbols = model.visibleSymbols(srcFile, LinePosition.from(23, 8));
List<Symbol> symbols = new ArrayList<>();
for (Symbol symbol : allInScopeSymbols) {
if (symbol.getModule().get().id().equals(moduleID)) {
symbols.add(symbol);
switch (symbol.kind()) {
case VARIABLE:
assertErroredSymbols(symbol);
break;
case PARAMETER:
assertEquals(symbol.getName().get(), "b");
assertEquals(((ParameterSymbol) symbol).typeDescriptor().typeKind(), TypeDescKind.INT);
break;
case FUNCTION:
assertEquals(symbol.getName().get(), "test");
break;
default:
throw new AssertionError("Unexpected symbol kind: " + symbol.kind());
}
}
}
}
private void assertErroredSymbols(Symbol symbol) {
assertEquals(symbol.kind(), SymbolKind.VARIABLE);
assertEquals(symbol.getName().get(), "b");
assertEquals(((VariableSymbol) symbol).typeDescriptor().typeKind(), TypeDescKind.COMPILATION_ERROR);
assertEquals(((VariableSymbol) symbol).diagnosticState(), REDECLARED);
}
@Test
public void testRedeclaredSymbolLookup2() {
Project project = BCompileUtil.loadProject("test-src/errored_symbol_lookup_test_2.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
List<Symbol> allInScopeSymbols = model.visibleSymbols(srcFile, LinePosition.from(25, 8), VALID, REDECLARED,
UNKNOWN_TYPE);
List<Symbol> symbols = new ArrayList<>();
for (Symbol symbol : allInScopeSymbols) {
if (symbol.getModule().get().id().equals(moduleID) && symbol.kind() == SymbolKind.VARIABLE) {
symbols.add(symbol);
}
}
assertEquals(symbols.size(), 4);
for (Symbol symbol : symbols) {
switch (symbol.getName().get()) {
case "b":
assertVarSymbol(symbol, "b", REDECLARED);
break;
case "p":
assertVarSymbol(symbol, "p", UNKNOWN_TYPE);
break;
case "x":
assertVarSymbol(symbol, "x", UNKNOWN_TYPE);
break;
default:
throw new AssertionError("Unexpected symbol: " + symbol.getName().get());
}
}
}
private void assertVarSymbol(Symbol symbol, String name, DiagnosticState state) {
assertEquals(symbol.kind(), SymbolKind.VARIABLE);
assertEquals(symbol.getName().get(), name);
assertEquals(((VariableSymbol) symbol).typeDescriptor().typeKind(), TypeDescKind.COMPILATION_ERROR);
assertEquals(((VariableSymbol) symbol).diagnosticState(), state);
}
@Test
public void testDestructureStmts() {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_destructure_var_exclusion_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
List<String> expSymbolNames = List.of("personName", "personAge", "BasicErrorDetail", "rest", "s", "test", "f",
"detail1", "i", "Person", "message1", "UserDefinedError");
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, 22, 4, moduleID);
assertEquals(symbolsInFile.size(), expSymbolNames.size());
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
}
@Test
public void testObjectConstructorExpr() {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_in_object_constructor.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
List<String> expSymbolNames = List.of("test", "f1", "foo", "self", "a", "helloVar");
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, 21, 20, moduleID);
assertEquals(symbolsInFile.size(), expSymbolNames.size());
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
}
@Test(dataProvider = "PositionProvider6")
public void testTypeTest(int line, int col, List<String> expSymbolNames, TypeDescKind expVarType) {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_with_type_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, line, col, moduleID);
assertEquals(symbolsInFile.size(), expSymbolNames.size());
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
Symbol getResultSym = symbolsInFile.get("getResult");
assertEquals(getResultSym.kind(), SymbolKind.VARIABLE);
assertEquals(((VariableSymbol) getResultSym).typeDescriptor().typeKind(), expVarType);
}
@DataProvider(name = "PositionProvider6")
public Object[][] getPosForTypeTest() {
List<String> expSymbolNames = List.of("getValue", "testTypeTest", "getResult");
return new Object[][]{
{18, 20, expSymbolNames, TypeDescKind.INT},
{19, 8, expSymbolNames, TypeDescKind.COMPILATION_ERROR},
{22, 24, expSymbolNames, TypeDescKind.INT},
{23, 8, expSymbolNames, TypeDescKind.COMPILATION_ERROR},
};
}
@Test(dataProvider = "FieldSymbolPosProvider")
public void testSymbolLookupInFields(int line, int column, int expSymbols, List<String> expSymbolNames) {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_in_fields.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, line, column, moduleID);
assertEquals(symbolsInFile.size(), expSymbols);
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
}
@DataProvider(name = "FieldSymbolPosProvider")
public Object[][] getFieldSymbolPositions() {
List<String> moduleSymbols = List.of("Foo", "Bar", "Person", "PersonObj");
return new Object[][]{
{18, 4, 4, moduleSymbols},
{24, 4, 4, moduleSymbols},
{32, 8, 10, concatSymbols(moduleSymbols, "init", "inc", "self", "x")},
{38, 4, 4, moduleSymbols},
{43, 4, 4, moduleSymbols},
};
}
@Test(dataProvider = "OnFailSymbolPosProvider")
public void testOnFailClauseSymbolLookup(int line, int column, int expSymbols, List<String> expSymbolNames) {
Project project = BCompileUtil.loadProject("test-src/on_fail_symbol_lookup_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, line, column, moduleID);
assertEquals(symbolsInFile.size(), expSymbols);
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
}
@DataProvider(name = "OnFailSymbolPosProvider")
public Object[][] getOnFailSymbolPositions() {
List<String> expSymbolNames = List.of("testMatchOnFail", "testWhileOnFail", "testForEachOnFail",
"testLockOnFail", "testRetryOnFail", "testTransactionOnFail", "testDoOnFail");
return new Object[][]{
{25, 23, 10, concatSymbols(expSymbolNames, "err", "val", "errRef")},
{34, 20, 10, concatSymbols(expSymbolNames, "iter", "err", "ref")},
{43, 20, 10, concatSymbols(expSymbolNames, "arr", "err", "ref")},
{51, 20, 9, concatSymbols(expSymbolNames, "ref", "err")},
{67, 20, 12, concatSymbols(expSymbolNames, "str", "count", "err", "e", "ref")},
{79, 33, 9, concatSymbols(expSymbolNames, "e", "s")},
{88, 33, 10, concatSymbols(expSymbolNames, "x", "e", "s")}
};
}
@Test
private String createSymbolString(Symbol symbol) {
return (symbol.getName().isPresent() ? symbol.getName().get() : "") + symbol.kind();
}
private List<String> concatSymbols(List<String> moduleSymbols, String... symbols) {
return Stream.concat(moduleSymbols.stream(), Arrays.stream(symbols)).collect(Collectors.toList());
}
} | class SymbolLookupTest {
@Test(dataProvider = "PositionProvider3")
public void testVarSymbolLookupInTypedefs(int line, int column, int expSymbols, List<String> expSymbolNames) {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_with_typedefs_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, line, column, moduleID);
assertEquals(symbolsInFile.size(), expSymbols);
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
}
@DataProvider(name = "PositionProvider3")
public Object[][] getPositionsForTypedefs() {
List<String> moduleLevelSymbols = asList("aString", "anInt", "HELLO", "testAnonTypes", "Person", "PersonObj",
"Colour", "RED", "GREEN", "BLUE");
return new Object[][]{
{18, 0, 10, moduleLevelSymbols},
{30, 65, 20, getSymbolNames(moduleLevelSymbols, "parent", "pParent", "name", "pName", "age", "pAge",
"self", "init", "getName", "getAge")},
{39, 8, 17, getSymbolNames(moduleLevelSymbols, "parent", "name", "age", "self", "getAge", "getName",
"init")},
{46, 9, 11, getSymbolNames(moduleLevelSymbols, "x")},
{48, 19, 11, getSymbolNames(moduleLevelSymbols, "x")},
{50, 15, 12, getSymbolNames(moduleLevelSymbols, "x", "person")},
{51, 0, 12, getSymbolNames(moduleLevelSymbols, "x", "person")},
};
}
@Test
public void testSymbolLookupInFollowingLine() {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_in_assignment.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, 18, 9, moduleID);
assertList(symbolsInFile, Arrays.asList("test", "v1"));
}
@Test
public void testMissingNodeFiltering() {
Project project = BCompileUtil.loadProject("test-src/missing_node_filtering_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, 19, 4, moduleID);
assertList(symbolsInFile, Arrays.asList("test", "x"));
}
@Test
public void testSymbolLookupWithAnnotationOnFunction() {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_with_annotation_on_function.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
List<Symbol> symbolList = model.visibleSymbols(srcFile, LinePosition.from(24, 5)).stream()
.filter(s -> s.getModule().get().id().equals(moduleID)).collect(Collectors.toList());
List<String> symbolStringList = symbolList.stream().map(this::createSymbolString).collect(Collectors.toList());
List<String> expectedNameList = asList("SimpleRecordTYPE_DEFINITION", "func1ANNOTATION", "func1FUNCTION");
assert symbolStringList.containsAll(expectedNameList);
}
@Test
public void testRedeclaredSymbolLookup() {
Project project = BCompileUtil.loadProject("test-src/errored_symbol_lookup_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
List<Symbol> allInScopeSymbols = model.visibleSymbols(srcFile, LinePosition.from(23, 8));
List<Symbol> symbols = new ArrayList<>();
for (Symbol symbol : allInScopeSymbols) {
if (symbol.getModule().get().id().equals(moduleID)) {
symbols.add(symbol);
switch (symbol.kind()) {
case VARIABLE:
assertErroredSymbols(symbol);
break;
case PARAMETER:
assertEquals(symbol.getName().get(), "b");
assertEquals(((ParameterSymbol) symbol).typeDescriptor().typeKind(), TypeDescKind.INT);
break;
case FUNCTION:
assertEquals(symbol.getName().get(), "test");
break;
default:
throw new AssertionError("Unexpected symbol kind: " + symbol.kind());
}
}
}
}
private void assertErroredSymbols(Symbol symbol) {
assertEquals(symbol.kind(), SymbolKind.VARIABLE);
assertEquals(symbol.getName().get(), "b");
assertEquals(((VariableSymbol) symbol).typeDescriptor().typeKind(), TypeDescKind.COMPILATION_ERROR);
assertEquals(((VariableSymbol) symbol).diagnosticState(), REDECLARED);
}
@Test
public void testRedeclaredSymbolLookup2() {
Project project = BCompileUtil.loadProject("test-src/errored_symbol_lookup_test_2.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
List<Symbol> allInScopeSymbols = model.visibleSymbols(srcFile, LinePosition.from(25, 8), VALID, REDECLARED,
UNKNOWN_TYPE);
List<Symbol> symbols = new ArrayList<>();
for (Symbol symbol : allInScopeSymbols) {
if (symbol.getModule().get().id().equals(moduleID) && symbol.kind() == SymbolKind.VARIABLE) {
symbols.add(symbol);
}
}
assertEquals(symbols.size(), 4);
for (Symbol symbol : symbols) {
switch (symbol.getName().get()) {
case "b":
assertVarSymbol(symbol, "b", REDECLARED);
break;
case "p":
assertVarSymbol(symbol, "p", UNKNOWN_TYPE);
break;
case "x":
assertVarSymbol(symbol, "x", UNKNOWN_TYPE);
break;
default:
throw new AssertionError("Unexpected symbol: " + symbol.getName().get());
}
}
}
private void assertVarSymbol(Symbol symbol, String name, DiagnosticState state) {
assertEquals(symbol.kind(), SymbolKind.VARIABLE);
assertEquals(symbol.getName().get(), name);
assertEquals(((VariableSymbol) symbol).typeDescriptor().typeKind(), TypeDescKind.COMPILATION_ERROR);
assertEquals(((VariableSymbol) symbol).diagnosticState(), state);
}
@Test
public void testDestructureStmts() {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_destructure_var_exclusion_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
List<String> expSymbolNames = List.of("personName", "personAge", "BasicErrorDetail", "rest", "s", "test", "f",
"detail1", "i", "Person", "message1", "UserDefinedError");
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, 22, 4, moduleID);
assertEquals(symbolsInFile.size(), expSymbolNames.size());
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
}
@Test
public void testObjectConstructorExpr() {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_in_object_constructor.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
List<String> expSymbolNames = List.of("test", "f1", "foo", "self", "a", "helloVar");
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, 21, 20, moduleID);
assertEquals(symbolsInFile.size(), expSymbolNames.size());
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
}
@Test(dataProvider = "PositionProvider6")
public void testTypeTest(int line, int col, List<String> expSymbolNames, TypeDescKind expVarType) {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_with_type_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, line, col, moduleID);
assertEquals(symbolsInFile.size(), expSymbolNames.size());
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
Symbol getResultSym = symbolsInFile.get("getResult");
assertEquals(getResultSym.kind(), SymbolKind.VARIABLE);
assertEquals(((VariableSymbol) getResultSym).typeDescriptor().typeKind(), expVarType);
}
@DataProvider(name = "PositionProvider6")
public Object[][] getPosForTypeTest() {
List<String> expSymbolNames = List.of("getValue", "testTypeTest", "getResult");
return new Object[][]{
{18, 20, expSymbolNames, TypeDescKind.INT},
{19, 8, expSymbolNames, TypeDescKind.COMPILATION_ERROR},
{22, 24, expSymbolNames, TypeDescKind.INT},
{23, 8, expSymbolNames, TypeDescKind.COMPILATION_ERROR},
};
}
@Test(dataProvider = "FieldSymbolPosProvider")
public void testSymbolLookupInFields(int line, int column, int expSymbols, List<String> expSymbolNames) {
Project project = BCompileUtil.loadProject("test-src/symbol_lookup_in_fields.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, line, column, moduleID);
assertEquals(symbolsInFile.size(), expSymbols);
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
}
@DataProvider(name = "FieldSymbolPosProvider")
public Object[][] getFieldSymbolPositions() {
List<String> moduleSymbols = List.of("Foo", "Bar", "Person", "PersonObj");
return new Object[][]{
{18, 4, 4, moduleSymbols},
{24, 4, 4, moduleSymbols},
{32, 8, 10, concatSymbols(moduleSymbols, "init", "inc", "self", "x")},
{38, 4, 4, moduleSymbols},
{43, 4, 4, moduleSymbols},
};
}
@Test(dataProvider = "OnFailSymbolPosProvider")
public void testOnFailClauseSymbolLookup(int line, int column, int expSymbols, List<String> expSymbolNames) {
Project project = BCompileUtil.loadProject("test-src/on_fail_symbol_lookup_test.bal");
Package currentPackage = project.currentPackage();
ModuleId defaultModuleId = currentPackage.getDefaultModule().moduleId();
PackageCompilation packageCompilation = currentPackage.getCompilation();
SemanticModel model = packageCompilation.getSemanticModel(defaultModuleId);
Document srcFile = getDocumentForSingleSource(project);
BLangPackage pkg = packageCompilation.defaultModuleBLangPackage();
ModuleID moduleID = new BallerinaModuleID(pkg.packageID);
Map<String, Symbol> symbolsInFile = getSymbolsInFile(model, srcFile, line, column, moduleID);
assertEquals(symbolsInFile.size(), expSymbols);
for (String symName : expSymbolNames) {
assertTrue(symbolsInFile.containsKey(symName), "Symbol not found: " + symName);
}
}
@DataProvider(name = "OnFailSymbolPosProvider")
public Object[][] getOnFailSymbolPositions() {
List<String> expSymbolNames = List.of("testMatchOnFail", "testWhileOnFail", "testForEachOnFail",
"testLockOnFail", "testRetryOnFail", "testTransactionOnFail", "testDoOnFail");
return new Object[][]{
{25, 23, 10, concatSymbols(expSymbolNames, "err", "val", "errRef")},
{34, 20, 10, concatSymbols(expSymbolNames, "iter", "err", "ref")},
{43, 20, 10, concatSymbols(expSymbolNames, "arr", "err", "ref")},
{51, 20, 9, concatSymbols(expSymbolNames, "ref", "err")},
{67, 20, 12, concatSymbols(expSymbolNames, "str", "count", "err", "e", "ref")},
{79, 33, 9, concatSymbols(expSymbolNames, "e", "s")},
{88, 33, 10, concatSymbols(expSymbolNames, "x", "e", "s")}
};
}
@Test
private String createSymbolString(Symbol symbol) {
return (symbol.getName().isPresent() ? symbol.getName().get() : "") + symbol.kind();
}
private List<String> concatSymbols(List<String> moduleSymbols, String... symbols) {
return Stream.concat(moduleSymbols.stream(), Arrays.stream(symbols)).collect(Collectors.toList());
}
} |
I'd move this to line 118, you don't need to query the typeToken if the classNamestack is empty. | private void checkMethodNamePrefix(DetailAST methodDefToken) {
if (!TokenUtil.findFirstTokenByPredicate(methodDefToken,
c -> c.getType() == TokenTypes.PARAMETERS && c.getChildCount() == 1).isPresent()) {
return;
}
final DetailAST typeToken = methodDefToken.findFirstToken(TokenTypes.TYPE);
if (classNameStack.isEmpty()) {
return;
}
if (!TokenUtil.findFirstTokenByPredicate(
typeToken, c -> c.getType() == TokenTypes.IDENT && c.getText().equals(classNameStack.peekLast())).isPresent()) {
return;
}
final String methodName = methodDefToken.findFirstToken(TokenTypes.IDENT).getText();
avoidStartWords.forEach(avoidStartWord -> {
if (methodName.length() >= avoidStartWord.length() && methodName.startsWith(avoidStartWord)) {
log(methodDefToken, String.format(FLUENT_METHOD_ERR, methodName, avoidStartWord));
}
});
} | final DetailAST typeToken = methodDefToken.findFirstToken(TokenTypes.TYPE); | private void checkMethodNamePrefix(DetailAST methodDefToken) {
if (TokenUtil.findFirstTokenByPredicate(methodDefToken, parameters ->
parameters.getType() == TokenTypes.PARAMETERS && parameters.getChildCount() != 1).isPresent()) {
log(methodDefToken, "A fluent method should only have one parameter.");
}
final DetailAST typeToken = methodDefToken.findFirstToken(TokenTypes.TYPE);
if (TokenUtil.findFirstTokenByPredicate(typeToken, ident -> ident.getType() == TokenTypes.IDENT
&& !ident.getText().equals(classNameStack.peekLast())).isPresent()) {
log(methodDefToken, "Return type of fluent method should be the class itself");
}
final String methodName = methodDefToken.findFirstToken(TokenTypes.IDENT).getText();
avoidStartWords.forEach(avoidStartWord -> {
if (methodName.length() >= avoidStartWord.length() && methodName.startsWith(avoidStartWord)) {
log(methodDefToken, String.format("''%s'' fluent method name should not start with keyword ''%s''.",
methodName, avoidStartWord));
}
});
} | class names when traversals the AST tree.
*/
private Deque<String> classNameStack = new ArrayDeque<>();
/**
* Setter to specifies valid identifiers
* @param avoidStartWords the starting strings that should not start with in fluent method
*/
public final void setAvoidStartWords(String... avoidStartWords) {
Collections.addAll(this.avoidStartWords, avoidStartWords);
} | class names when traversals the AST tree.
*/
private final Deque<String> classNameStack = new ArrayDeque<>();
/**
* Adds words that methods in fluent classes should not be prefixed with.
* @param avoidStartWords the starting strings that should not start with in fluent method
*/
public final void setAvoidStartWords(String... avoidStartWords) {
Collections.addAll(this.avoidStartWords, avoidStartWords);
} |
does it mean that in shared nothing mode, create db/table statement is not allowed to specify 'storage volume' property? I would say, we should respect what the db storageVolumeId is no matter it is a shared data or shared nothing mode. If the db is not allowed to have a storage volume property, just block it when creating/altering sql. | private void handleShowCreateDb() throws AnalysisException {
ShowCreateDbStmt showStmt = (ShowCreateDbStmt) stmt;
String catalogName = showStmt.getCatalogName();
String dbName = showStmt.getDb();
List<List<String>> rows = Lists.newArrayList();
Database db;
if (Strings.isNullOrEmpty(catalogName) || CatalogMgr.isInternalCatalog(catalogName)) {
db = connectContext.getGlobalStateMgr().getDb(dbName);
} else {
db = GlobalStateMgr.getCurrentState().getMetadataMgr().getDb(catalogName, dbName);
}
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
StringBuilder createSqlBuilder = new StringBuilder();
createSqlBuilder.append("CREATE DATABASE `").append(showStmt.getDb()).append("`");
if (!Strings.isNullOrEmpty(db.getLocation())) {
createSqlBuilder.append("\nPROPERTIES (\"location\" = \"").append(db.getLocation()).append("\")");
}
if (RunMode.getCurrentRunMode() == RunMode.SHARED_DATA && !Strings.isNullOrEmpty(db.getStorageVolumeId())) {
StorageVolume sv = GlobalStateMgr.getCurrentState().getStorageVolumeMgr().getStorageVolume(db.getStorageVolumeId());
createSqlBuilder.append("\nPROPERTIES (\"storage_volume\" = \"").append(sv.getName()).append("\")");
}
rows.add(Lists.newArrayList(showStmt.getDb(), createSqlBuilder.toString()));
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
} | if (RunMode.getCurrentRunMode() == RunMode.SHARED_DATA && !Strings.isNullOrEmpty(db.getStorageVolumeId())) { | private void handleShowCreateDb() throws AnalysisException {
ShowCreateDbStmt showStmt = (ShowCreateDbStmt) stmt;
String catalogName = showStmt.getCatalogName();
String dbName = showStmt.getDb();
List<List<String>> rows = Lists.newArrayList();
Database db;
if (Strings.isNullOrEmpty(catalogName) || CatalogMgr.isInternalCatalog(catalogName)) {
db = connectContext.getGlobalStateMgr().getDb(dbName);
} else {
db = GlobalStateMgr.getCurrentState().getMetadataMgr().getDb(catalogName, dbName);
}
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
StringBuilder createSqlBuilder = new StringBuilder();
createSqlBuilder.append("CREATE DATABASE `").append(showStmt.getDb()).append("`");
if (!Strings.isNullOrEmpty(db.getLocation())) {
createSqlBuilder.append("\nPROPERTIES (\"location\" = \"").append(db.getLocation()).append("\")");
}
if (!Strings.isNullOrEmpty(db.getStorageVolumeId())) {
StorageVolume sv = GlobalStateMgr.getCurrentState().getStorageVolumeMgr().getStorageVolume(db.getStorageVolumeId());
createSqlBuilder.append("\nPROPERTIES (\"storage_volume\" = \"").append(sv.getName()).append("\")");
}
rows.add(Lists.newArrayList(showStmt.getDb(), createSqlBuilder.toString()));
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
} | class ShowExecutor {
private static final Logger LOG = LogManager.getLogger(ShowExecutor.class);
private static final List<List<String>> EMPTY_SET = Lists.newArrayList();
private final ConnectContext connectContext;
private final ShowStmt stmt;
private ShowResultSet resultSet;
private final MetadataMgr metadataMgr;
public ShowExecutor(ConnectContext connectContext, ShowStmt stmt) {
this.connectContext = connectContext;
this.stmt = stmt;
resultSet = null;
metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();
}
public ShowResultSet execute() throws AnalysisException, DdlException {
if (stmt instanceof ShowMaterializedViewsStmt) {
handleShowMaterializedView();
} else if (stmt instanceof ShowAuthorStmt) {
handleShowAuthor();
} else if (stmt instanceof ShowProcStmt) {
handleShowProc();
} else if (stmt instanceof HelpStmt) {
handleHelp();
} else if (stmt instanceof ShowWarehousesStmt) {
handleShowWarehouses();
} else if (stmt instanceof ShowClustersStmt) {
handleShowClusters();
} else if (stmt instanceof ShowDbStmt) {
handleShowDb();
} else if (stmt instanceof ShowTableStmt) {
handleShowTable();
} else if (stmt instanceof ShowTableStatusStmt) {
handleShowTableStatus();
} else if (stmt instanceof DescribeStmt) {
handleDescribe();
} else if (stmt instanceof ShowCreateTableStmt) {
handleShowCreateTable();
} else if (stmt instanceof ShowCreateDbStmt) {
handleShowCreateDb();
} else if (stmt instanceof ShowProcesslistStmt) {
handleShowProcesslist();
} else if (stmt instanceof ShowEnginesStmt) {
handleShowEngines();
} else if (stmt instanceof ShowFunctionsStmt) {
handleShowFunctions();
} else if (stmt instanceof ShowVariablesStmt) {
handleShowVariables();
} else if (stmt instanceof ShowColumnStmt) {
handleShowColumn();
} else if (stmt instanceof ShowLoadStmt) {
handleShowLoad();
} else if (stmt instanceof ShowRoutineLoadStmt) {
handleShowRoutineLoad();
} else if (stmt instanceof ShowRoutineLoadTaskStmt) {
handleShowRoutineLoadTask();
} else if (stmt instanceof ShowStreamLoadStmt) {
handleShowStreamLoad();
} else if (stmt instanceof ShowDeleteStmt) {
handleShowDelete();
} else if (stmt instanceof ShowAlterStmt) {
handleShowAlter();
} else if (stmt instanceof ShowUserPropertyStmt) {
handleShowUserProperty();
} else if (stmt instanceof ShowDataStmt) {
handleShowData();
} else if (stmt instanceof ShowCollationStmt) {
handleShowCollation();
} else if (stmt instanceof ShowPartitionsStmt) {
handleShowPartitions();
} else if (stmt instanceof ShowTabletStmt) {
handleShowTablet();
} else if (stmt instanceof ShowBackupStmt) {
handleShowBackup();
} else if (stmt instanceof ShowRestoreStmt) {
handleShowRestore();
} else if (stmt instanceof ShowBrokerStmt) {
handleShowBroker();
} else if (stmt instanceof ShowResourcesStmt) {
handleShowResources();
} else if (stmt instanceof ShowExportStmt) {
handleShowExport();
} else if (stmt instanceof ShowBackendsStmt) {
handleShowBackends();
} else if (stmt instanceof ShowFrontendsStmt) {
handleShowFrontends();
} else if (stmt instanceof ShowRepositoriesStmt) {
handleShowRepositories();
} else if (stmt instanceof ShowSnapshotStmt) {
handleShowSnapshot();
} else if (stmt instanceof ShowGrantsStmt) {
handleShowGrants();
} else if (stmt instanceof ShowRolesStmt) {
handleShowRoles();
} else if (stmt instanceof AdminShowReplicaStatusStmt) {
handleAdminShowTabletStatus();
} else if (stmt instanceof AdminShowReplicaDistributionStmt) {
handleAdminShowTabletDistribution();
} else if (stmt instanceof AdminShowConfigStmt) {
handleAdminShowConfig();
} else if (stmt instanceof ShowSmallFilesStmt) {
handleShowSmallFiles();
} else if (stmt instanceof ShowDynamicPartitionStmt) {
handleShowDynamicPartition();
} else if (stmt instanceof ShowIndexStmt) {
handleShowIndex();
} else if (stmt instanceof ShowTransactionStmt) {
handleShowTransaction();
} else if (stmt instanceof ShowPluginsStmt) {
handleShowPlugins();
} else if (stmt instanceof ShowSqlBlackListStmt) {
handleShowSqlBlackListStmt();
} else if (stmt instanceof ShowAnalyzeJobStmt) {
handleShowAnalyzeJob();
} else if (stmt instanceof ShowAnalyzeStatusStmt) {
handleShowAnalyzeStatus();
} else if (stmt instanceof ShowBasicStatsMetaStmt) {
handleShowBasicStatsMeta();
} else if (stmt instanceof ShowHistogramStatsMetaStmt) {
handleShowHistogramStatsMeta();
} else if (stmt instanceof ShowResourceGroupStmt) {
handleShowResourceGroup();
} else if (stmt instanceof ShowUserStmt) {
handleShowUser();
} else if (stmt instanceof ShowCatalogsStmt) {
handleShowCatalogs();
} else if (stmt instanceof ShowComputeNodesStmt) {
handleShowComputeNodes();
} else if (stmt instanceof ShowAuthenticationStmt) {
handleShowAuthentication();
} else if (stmt instanceof ShowCreateExternalCatalogStmt) {
handleShowCreateExternalCatalog();
} else if (stmt instanceof ShowCharsetStmt) {
handleShowCharset();
} else if (stmt instanceof ShowStorageVolumesStmt) {
handleShowStorageVolumes();
} else if (stmt instanceof DescStorageVolumeStmt) {
handleDescStorageVolume();
} else {
handleEmpty();
}
List<List<String>> rows = doPredicate(stmt, stmt.getMetaData(), resultSet.getResultRows());
return new ShowResultSet(resultSet.getMetaData(), rows);
}
private void handleShowAuthentication() {
final ShowAuthenticationStmt showAuthenticationStmt = (ShowAuthenticationStmt) stmt;
AuthenticationMgr authenticationManager = GlobalStateMgr.getCurrentState().getAuthenticationMgr();
List<List<String>> userAuthInfos = Lists.newArrayList();
Map<UserIdentity, UserAuthenticationInfo> authenticationInfoMap = new HashMap<>();
if (showAuthenticationStmt.isAll()) {
authenticationInfoMap.putAll(authenticationManager.getUserToAuthenticationInfo());
} else {
UserAuthenticationInfo userAuthenticationInfo;
if (showAuthenticationStmt.getUserIdent() == null) {
userAuthenticationInfo = authenticationManager
.getUserAuthenticationInfoByUserIdentity(connectContext.getCurrentUserIdentity());
} else {
userAuthenticationInfo =
authenticationManager.getUserAuthenticationInfoByUserIdentity(showAuthenticationStmt.getUserIdent());
}
authenticationInfoMap.put(showAuthenticationStmt.getUserIdent(), userAuthenticationInfo);
}
for (Map.Entry<UserIdentity, UserAuthenticationInfo> entry : authenticationInfoMap.entrySet()) {
UserAuthenticationInfo userAuthenticationInfo = entry.getValue();
userAuthInfos.add(Lists.newArrayList(
entry.getKey().toString(),
userAuthenticationInfo.getPassword().length == 0 ? "No" : "Yes",
userAuthenticationInfo.getAuthPlugin(),
userAuthenticationInfo.getTextForAuthPlugin()));
}
resultSet = new ShowResultSet(showAuthenticationStmt.getMetaData(), userAuthInfos);
}
private void handleShowComputeNodes() {
final ShowComputeNodesStmt showStmt = (ShowComputeNodesStmt) stmt;
List<List<String>> computeNodesInfos = ComputeNodeProcDir.getClusterComputeNodesInfos();
resultSet = new ShowResultSet(showStmt.getMetaData(), computeNodesInfos);
}
private void handleShowMaterializedView() throws AnalysisException {
ShowMaterializedViewsStmt showMaterializedViewsStmt = (ShowMaterializedViewsStmt) stmt;
String dbName = showMaterializedViewsStmt.getDb();
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
MetaUtils.checkDbNullAndReport(db, dbName);
List<MaterializedView> materializedViews = Lists.newArrayList();
List<Pair<OlapTable, MaterializedIndexMeta>> singleTableMVs = Lists.newArrayList();
db.readLock();
try {
PatternMatcher matcher = null;
if (showMaterializedViewsStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showMaterializedViewsStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
for (Table table : db.getTables()) {
if (table.isMaterializedView()) {
MaterializedView mvTable = (MaterializedView) table;
if (matcher != null && !matcher.match(mvTable.getName())) {
continue;
}
AtomicBoolean baseTableHasPrivilege = new AtomicBoolean(true);
mvTable.getBaseTableInfos().forEach(baseTableInfo -> {
Table baseTable = baseTableInfo.getTable();
if (baseTable != null && baseTable.isNativeTableOrMaterializedView() && !PrivilegeActions.
checkTableAction(connectContext, baseTableInfo.getDbName(),
baseTableInfo.getTableName(),
PrivilegeType.SELECT)) {
baseTableHasPrivilege.set(false);
}
});
if (!baseTableHasPrivilege.get()) {
continue;
}
if (!PrivilegeActions.checkAnyActionOnMaterializedView(connectContext, db.getFullName(),
mvTable.getName())) {
continue;
}
materializedViews.add(mvTable);
} else if (Table.TableType.OLAP == table.getType()) {
OlapTable olapTable = (OlapTable) table;
List<MaterializedIndexMeta> visibleMaterializedViews = olapTable.getVisibleIndexMetas();
long baseIdx = olapTable.getBaseIndexId();
for (MaterializedIndexMeta mvMeta : visibleMaterializedViews) {
if (baseIdx == mvMeta.getIndexId()) {
continue;
}
if (matcher != null && !matcher.match(olapTable.getIndexNameById(mvMeta.getIndexId()))) {
continue;
}
singleTableMVs.add(Pair.create(olapTable, mvMeta));
}
}
}
List<List<String>> rowSets = listMaterializedViewStatus(dbName, materializedViews, singleTableMVs);
resultSet = new ShowResultSet(stmt.getMetaData(), rowSets);
} catch (Exception e) {
LOG.warn("listMaterializedViews failed:", e);
throw e;
} finally {
db.readUnlock();
}
}
public static String buildCreateMVSql(OlapTable olapTable, String mv, MaterializedIndexMeta mvMeta) {
StringBuilder originStmtBuilder = new StringBuilder(
"create materialized view " + mv +
" as select ");
String groupByString = "";
for (Column column : mvMeta.getSchema()) {
if (column.isKey()) {
groupByString += column.getName() + ",";
}
}
originStmtBuilder.append(groupByString);
for (Column column : mvMeta.getSchema()) {
if (!column.isKey()) {
originStmtBuilder.append(column.getAggregationType().toString()).append("(")
.append(column.getName()).append(")").append(",");
}
}
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
originStmtBuilder.append(" from ").append(olapTable.getName()).append(" group by ")
.append(groupByString);
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
return originStmtBuilder.toString();
}
public static List<List<String>> listMaterializedViewStatus(
String dbName,
List<MaterializedView> materializedViews,
List<Pair<OlapTable, MaterializedIndexMeta>> singleTableMVs) {
List<List<String>> rowSets = Lists.newArrayList();
Map<String, TaskRunStatus> mvNameTaskMap = Maps.newHashMap();
if (!materializedViews.isEmpty()) {
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
TaskManager taskManager = globalStateMgr.getTaskManager();
mvNameTaskMap = taskManager.showMVLastRefreshTaskRunStatus(dbName);
}
for (MaterializedView mvTable : materializedViews) {
long mvId = mvTable.getId();
TaskRunStatus taskStatus = mvNameTaskMap.get(TaskBuilder.getMvTaskName(mvId));
ArrayList<String> resultRow = new ArrayList<>();
resultRow.add(String.valueOf(mvId));
resultRow.add(dbName);
resultRow.add(mvTable.getName());
MaterializedView.MvRefreshScheme refreshScheme = mvTable.getRefreshScheme();
if (refreshScheme == null) {
resultRow.add("UNKNOWN");
} else {
resultRow.add(String.valueOf(mvTable.getRefreshScheme().getType()));
}
resultRow.add(String.valueOf(mvTable.isActive()));
resultRow.add(String.valueOf(mvTable.getInactiveReason()));
if (mvTable.getPartitionInfo() != null && mvTable.getPartitionInfo().getType() != null) {
resultRow.add(mvTable.getPartitionInfo().getType().toString());
} else {
resultRow.add("");
}
setTaskRunStatus(resultRow, taskStatus);
resultRow.add(String.valueOf(mvTable.getRowCount()));
resultRow.add(mvTable.getMaterializedViewDdlStmt(true));
rowSets.add(resultRow);
}
for (Pair<OlapTable, MaterializedIndexMeta> singleTableMV : singleTableMVs) {
OlapTable olapTable = singleTableMV.first;
MaterializedIndexMeta mvMeta = singleTableMV.second;
long mvId = mvMeta.getIndexId();
ArrayList<String> resultRow = new ArrayList<>();
resultRow.add(String.valueOf(mvId));
resultRow.add(dbName);
resultRow.add(olapTable.getIndexNameById(mvId));
resultRow.add("ROLLUP");
resultRow.add(String.valueOf(true));
resultRow.add("");
if (olapTable.getPartitionInfo() != null && olapTable.getPartitionInfo().getType() != null) {
resultRow.add(olapTable.getPartitionInfo().getType().toString());
} else {
resultRow.add("");
}
setTaskRunStatus(resultRow, null);
if (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) {
Partition partition = olapTable.getPartitions().iterator().next();
MaterializedIndex index = partition.getIndex(mvId);
resultRow.add(String.valueOf(index.getRowCount()));
} else {
resultRow.add(String.valueOf(0L));
}
if (mvMeta.getOriginStmt() == null) {
String mvName = olapTable.getIndexNameById(mvId);
resultRow.add(buildCreateMVSql(olapTable, mvName, mvMeta));
} else {
resultRow.add(mvMeta.getOriginStmt().replace("\n", "").replace("\t", "")
.replaceAll("[ ]+", " "));
}
rowSets.add(resultRow);
}
return rowSets;
}
private static void setTaskRunStatus(List<String> resultRow, TaskRunStatus taskStatus) {
if (taskStatus != null) {
resultRow.add(String.valueOf(taskStatus.getTaskId()));
resultRow.add(Strings.nullToEmpty(taskStatus.getTaskName()));
resultRow.add(String.valueOf(TimeUtils.longToTimeString(taskStatus.getCreateTime())));
resultRow.add(String.valueOf(TimeUtils.longToTimeString(taskStatus.getFinishTime())));
if (taskStatus.getFinishTime() > taskStatus.getCreateTime()) {
resultRow.add(DebugUtil.DECIMAL_FORMAT_SCALE_3
.format((taskStatus.getFinishTime() - taskStatus.getCreateTime()) / 1000D));
} else {
resultRow.add("0.000");
}
resultRow.add(String.valueOf(taskStatus.getState()));
MVTaskRunExtraMessage extraMessage = taskStatus.getMvTaskRunExtraMessage();
resultRow.add(extraMessage.isForceRefresh() ? "true" : "false");
resultRow.add(Strings.nullToEmpty(extraMessage.getPartitionStart()));
resultRow.add(Strings.nullToEmpty(extraMessage.getPartitionEnd()));
resultRow.add(Strings.nullToEmpty(extraMessage.getBasePartitionsToRefreshMapString()));
resultRow.add(Strings.nullToEmpty(extraMessage.getMvPartitionsToRefreshString()));
resultRow.add(String.valueOf(taskStatus.getErrorCode()));
resultRow.add(Strings.nullToEmpty(taskStatus.getErrorMessage()));
} else {
resultRow.addAll(Collections.nCopies(13, ""));
}
}
private void handleShowProcesslist() {
ShowProcesslistStmt showStmt = (ShowProcesslistStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
List<ConnectContext.ThreadInfo> threadInfos = connectContext.getConnectScheduler()
.listConnection(connectContext.getQualifiedUser());
long nowMs = System.currentTimeMillis();
for (ConnectContext.ThreadInfo info : threadInfos) {
List<String> row = info.toRow(nowMs, showStmt.showFull());
if (row != null) {
rowSet.add(row);
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleEmpty() {
resultSet = new ShowResultSet(stmt.getMetaData(), EMPTY_SET);
}
private void handleShowAuthor() {
ShowAuthorStmt showAuthorStmt = (ShowAuthorStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
resultSet = new ShowResultSet(showAuthorStmt.getMetaData(), rowSet);
}
private void handleShowEngines() {
ShowEnginesStmt showStmt = (ShowEnginesStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
rowSet.add(Lists.newArrayList("OLAP", "YES", "Default storage engine of StarRocks", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("MySQL", "YES", "MySQL server which data is in it", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("ELASTICSEARCH", "YES", "ELASTICSEARCH cluster which data is in it", "NO", "NO",
"NO"));
rowSet.add(Lists.newArrayList("HIVE", "YES", "HIVE database which data is in it", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("ICEBERG", "YES", "ICEBERG data lake which data is in it", "NO", "NO", "NO"));
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowFunctions() throws AnalysisException {
ShowFunctionsStmt showStmt = (ShowFunctionsStmt) stmt;
List<Function> functions;
if (showStmt.getIsBuiltin()) {
functions = connectContext.getGlobalStateMgr().getBuiltinFunctions();
} else if (showStmt.getIsGlobal()) {
functions = connectContext.getGlobalStateMgr().getGlobalFunctionMgr().getFunctions();
} else {
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
functions = db.getFunctions();
}
List<List<Comparable>> rowSet = Lists.newArrayList();
for (Function function : functions) {
List<Comparable> row = function.getInfo(showStmt.getIsVerbose());
if (showStmt.getWild() == null || showStmt.like(function.functionName())) {
if (showStmt.getIsGlobal()) {
if (!PrivilegeActions.checkAnyActionOnGlobalFunction(connectContext, function.getFunctionId())) {
continue;
}
} else if (!showStmt.getIsBuiltin()) {
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
if (!PrivilegeActions.checkAnyActionOnFunction(
connectContext.getCurrentUserIdentity(), connectContext.getCurrentRoleIds(),
db.getId(), function.getFunctionId())) {
continue;
}
}
rowSet.add(row);
}
}
ListComparator<List<Comparable>> comparator;
OrderByPair orderByPair = new OrderByPair(0, false);
comparator = new ListComparator<>(orderByPair);
rowSet.sort(comparator);
List<List<String>> resultRowSet = Lists.newArrayList();
Set<String> functionNameSet = new HashSet<>();
for (List<Comparable> row : rowSet) {
List<String> resultRow = Lists.newArrayList();
if (functionNameSet.contains(row.get(0).toString())) {
continue;
}
for (Comparable column : row) {
resultRow.add(column.toString());
}
resultRowSet.add(resultRow);
functionNameSet.add(resultRow.get(0));
}
ShowResultSetMetaData showMetaData = showStmt.getIsVerbose() ? showStmt.getMetaData() :
ShowResultSetMetaData.builder()
.addColumn(new Column("Function Name", ScalarType.createVarchar(256))).build();
resultSet = new ShowResultSet(showMetaData, resultRowSet);
}
private void handleShowProc() throws AnalysisException {
ShowProcStmt showProcStmt = (ShowProcStmt) stmt;
ShowResultSetMetaData metaData = showProcStmt.getMetaData();
ProcNodeInterface procNode = showProcStmt.getNode();
List<List<String>> finalRows = procNode.fetchResult().getRows();
resultSet = new ShowResultSet(metaData, finalRows);
}
private void handleShowDb() {
ShowDbStmt showDbStmt = (ShowDbStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<String> dbNames;
String catalogName;
if (showDbStmt.getCatalogName() == null) {
catalogName = connectContext.getCurrentCatalog();
} else {
catalogName = showDbStmt.getCatalogName();
}
dbNames = metadataMgr.listDbNames(catalogName);
PatternMatcher matcher = null;
if (showDbStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showDbStmt.getPattern(),
CaseSensibility.DATABASE.getCaseSensibility());
}
Set<String> dbNameSet = Sets.newTreeSet();
for (String dbName : dbNames) {
if (matcher != null && !matcher.match(dbName)) {
continue;
}
if (!PrivilegeActions.checkAnyActionOnOrInDb(connectContext, catalogName, dbName)) {
continue;
}
dbNameSet.add(dbName);
}
for (String dbName : dbNameSet) {
rows.add(Lists.newArrayList(dbName));
}
resultSet = new ShowResultSet(showDbStmt.getMetaData(), rows);
}
private void handleShowTable() throws AnalysisException {
ShowTableStmt showTableStmt = (ShowTableStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
String catalogName = showTableStmt.getCatalogName();
if (catalogName == null) {
catalogName = connectContext.getCurrentCatalog();
}
String dbName = showTableStmt.getDb();
Database db = metadataMgr.getDb(catalogName, dbName);
PatternMatcher matcher = null;
if (showTableStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showTableStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
Map<String, String> tableMap = Maps.newTreeMap();
MetaUtils.checkDbNullAndReport(db, showTableStmt.getDb());
if (CatalogMgr.isInternalCatalog(catalogName)) {
db.readLock();
try {
for (Table tbl : db.getTables()) {
if (matcher != null && !matcher.match(tbl.getName())) {
continue;
}
if (tbl.isView()) {
if (!PrivilegeActions.checkAnyActionOnView(
connectContext, db.getFullName(), tbl.getName())) {
continue;
}
} else if (tbl.isMaterializedView()) {
if (!PrivilegeActions.checkAnyActionOnMaterializedView(
connectContext, db.getFullName(), tbl.getName())) {
continue;
}
} else if (!PrivilegeActions.checkAnyActionOnTable(
connectContext, db.getFullName(), tbl.getName())) {
continue;
}
tableMap.put(tbl.getName(), tbl.getMysqlType());
}
} finally {
db.readUnlock();
}
} else {
List<String> tableNames = metadataMgr.listTableNames(catalogName, dbName);
for (String tableName : tableNames) {
if (matcher != null && !matcher.match(tableName)) {
continue;
}
Table table = metadataMgr.getTable(catalogName, dbName, tableName);
if (table == null) {
LOG.warn("table {}.{}.{} does not exist", catalogName, dbName, tableName);
continue;
}
if (table.isView()) {
if (!PrivilegeActions.checkAnyActionOnView(
connectContext, catalogName, db.getFullName(), table.getName())) {
continue;
}
} else if (!PrivilegeActions.checkAnyActionOnTable(connectContext,
catalogName, dbName, tableName)) {
continue;
}
tableMap.put(tableName, table.getMysqlType());
}
}
for (Map.Entry<String, String> entry : tableMap.entrySet()) {
if (showTableStmt.isVerbose()) {
rows.add(Lists.newArrayList(entry.getKey(), entry.getValue()));
} else {
rows.add(Lists.newArrayList(entry.getKey()));
}
}
resultSet = new ShowResultSet(showTableStmt.getMetaData(), rows);
}
private void handleShowTableStatus() {
ShowTableStatusStmt showStmt = (ShowTableStatusStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
ZoneId currentTimeZoneId = TimeUtils.getTimeZone().toZoneId();
if (db != null) {
db.readLock();
try {
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
for (Table table : db.getTables()) {
if (matcher != null && !matcher.match(table.getName())) {
continue;
}
if (!PrivilegeActions.checkAnyActionOnTable(connectContext, db.getFullName(), table.getName())) {
continue;
}
TTableInfo info = new TTableInfo();
if (table.isNativeTableOrMaterializedView() || table.getType() == Table.TableType.OLAP_EXTERNAL) {
InformationSchemaDataSource.genNormalTableInfo(table, info);
} else {
InformationSchemaDataSource.genDefaultConfigInfo(info);
}
List<String> row = Lists.newArrayList();
row.add(table.getName());
row.add(table.getEngine());
row.add(null);
row.add("");
row.add(String.valueOf(info.getTable_rows()));
row.add(String.valueOf(info.getAvg_row_length()));
row.add(String.valueOf(info.getData_length()));
row.add(null);
row.add(null);
row.add(null);
row.add(null);
row.add(DateUtils.formatTimeStampInSeconds(table.getCreateTime(), currentTimeZoneId));
row.add(DateUtils.formatTimeStampInSeconds(info.getUpdate_time(), currentTimeZoneId));
row.add(null);
row.add(InformationSchemaDataSource.UTF8_GENERAL_CI);
row.add(null);
row.add("");
row.add(table.getDisplayComment());
rows.add(row);
}
} finally {
db.readUnlock();
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowVariables() {
ShowVariablesStmt showStmt = (ShowVariablesStmt) stmt;
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.VARIABLES.getCaseSensibility());
}
List<List<String>> rows = VariableMgr.dump(showStmt.getType(), connectContext.getSessionVariable(), matcher);
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowCreateTable() throws AnalysisException {
ShowCreateTableStmt showStmt = (ShowCreateTableStmt) stmt;
TableName tbl = showStmt.getTbl();
String catalogName = tbl.getCatalog();
if (catalogName == null) {
catalogName = connectContext.getCurrentCatalog();
}
if (CatalogMgr.isInternalCatalog(catalogName)) {
showCreateInternalCatalogTable(showStmt);
} else {
showCreateExternalCatalogTable(tbl, catalogName);
}
}
private void showCreateExternalCatalogTable(TableName tbl, String catalogName) {
String dbName = tbl.getDb();
String tableName = tbl.getTbl();
MetadataMgr metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();
Database db = metadataMgr.getDb(catalogName, dbName);
if (db == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);
}
Table table = metadataMgr.getTable(catalogName, dbName, tableName);
if (table == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);
}
StringBuilder createTableSql = new StringBuilder();
createTableSql.append("CREATE TABLE ")
.append("`").append(tableName).append("`")
.append(" (\n");
List<String> columns = table.getFullSchema().stream().map(
this::toMysqlDDL).collect(Collectors.toList());
createTableSql.append(String.join(",\n", columns))
.append("\n)");
if (table.getType() != JDBC && !table.isUnPartitioned()) {
createTableSql.append("\nPARTITION BY ( ")
.append(String.join(", ", table.getPartitionColumnNames()))
.append(" )");
}
String location = null;
if (table.isHiveTable() || table.isHudiTable()) {
location = ((HiveMetaStoreTable) table).getTableLocation();
} else if (table.isIcebergTable()) {
location = ((IcebergTable) table).getTableLocation();
} else if (table.isDeltalakeTable()) {
location = ((DeltaLakeTable) table).getTableLocation();
}
if (!Strings.isNullOrEmpty(location)) {
createTableSql.append("\nPROPERTIES (\"location\" = \"").append(location).append("\");");
}
List<List<String>> rows = Lists.newArrayList();
rows.add(Lists.newArrayList(tableName, createTableSql.toString()));
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private String toMysqlDDL(Column column) {
StringBuilder sb = new StringBuilder();
sb.append(" `").append(column.getName()).append("` ");
sb.append(column.getType().toSql());
sb.append(" DEFAULT NULL");
if (!Strings.isNullOrEmpty(column.getComment())) {
sb.append(" COMMENT \"").append(column.getDisplayComment()).append("\"");
}
return sb.toString();
}
private void showCreateInternalCatalogTable(ShowCreateTableStmt showStmt) throws AnalysisException {
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
List<List<String>> rows = Lists.newArrayList();
db.readLock();
try {
Table table = db.getTable(showStmt.getTable());
if (table == null) {
if (showStmt.getType() != ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTable());
} else {
for (Table tbl : db.getTables()) {
if (tbl.getType() == Table.TableType.OLAP) {
OlapTable olapTable = (OlapTable) tbl;
List<MaterializedIndexMeta> visibleMaterializedViews =
olapTable.getVisibleIndexMetas();
for (MaterializedIndexMeta mvMeta : visibleMaterializedViews) {
if (olapTable.getIndexNameById(mvMeta.getIndexId()).equals(showStmt.getTable())) {
if (mvMeta.getOriginStmt() == null) {
String mvName = olapTable.getIndexNameById(mvMeta.getIndexId());
rows.add(Lists.newArrayList(showStmt.getTable(), buildCreateMVSql(olapTable,
mvName, mvMeta), "utf8", "utf8_general_ci"));
} else {
rows.add(Lists.newArrayList(showStmt.getTable(), mvMeta.getOriginStmt(),
"utf8", "utf8_general_ci"));
}
resultSet = new ShowResultSet(ShowCreateTableStmt.getMaterializedViewMetaData(), rows);
return;
}
}
}
}
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTable());
}
}
List<String> createTableStmt = Lists.newArrayList();
GlobalStateMgr.getDdlStmt(table, createTableStmt, null, null, false, true /* hide password */);
if (createTableStmt.isEmpty()) {
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
return;
}
if (table instanceof View) {
if (showStmt.getType() == ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), "MATERIALIZED VIEW");
}
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0), "utf8", "utf8_general_ci"));
resultSet = new ShowResultSet(ShowCreateTableStmt.getViewMetaData(), rows);
} else if (table instanceof MaterializedView) {
if (showStmt.getType() == ShowCreateTableStmt.CreateTableType.VIEW) {
MaterializedView mv = (MaterializedView) table;
String sb = "CREATE VIEW `" + table.getName() + "` AS " + mv.getViewDefineSql();
rows.add(Lists.newArrayList(table.getName(), sb, "utf8", "utf8_general_ci"));
resultSet = new ShowResultSet(ShowCreateTableStmt.getViewMetaData(), rows);
} else {
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));
resultSet = new ShowResultSet(ShowCreateTableStmt.getMaterializedViewMetaData(), rows);
}
} else {
if (showStmt.getType() != ShowCreateTableStmt.CreateTableType.TABLE) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), showStmt.getType().getValue());
}
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
} finally {
db.readUnlock();
}
}
private void handleDescribe() throws AnalysisException {
DescribeStmt describeStmt = (DescribeStmt) stmt;
resultSet = new ShowResultSet(describeStmt.getMetaData(), describeStmt.getResultRows());
}
private void handleShowColumn() throws AnalysisException {
ShowColumnStmt showStmt = (ShowColumnStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
String catalogName = showStmt.getCatalog();
if (catalogName == null) {
catalogName = connectContext.getCurrentCatalog();
}
String dbName = showStmt.getDb();
Database db = metadataMgr.getDb(catalogName, dbName);
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
db.readLock();
try {
Table table = metadataMgr.getTable(catalogName, dbName, showStmt.getTable());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,
showStmt.getDb() + "." + showStmt.getTable());
}
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.COLUMN.getCaseSensibility());
}
List<Column> columns = table.getBaseSchema();
for (Column col : columns) {
if (matcher != null && !matcher.match(col.getName())) {
continue;
}
final String columnName = col.getName();
final String columnType = col.getType().canonicalName().toLowerCase();
final String isAllowNull = col.isAllowNull() ? "YES" : "NO";
final String isKey = col.isKey() ? "YES" : "NO";
final String defaultValue = col.getMetaDefaultValue(Lists.newArrayList());
final String aggType = col.getAggregationType() == null
|| col.isAggregationTypeImplicit() ? "" : col.getAggregationType().toSql();
if (showStmt.isVerbose()) {
rows.add(Lists.newArrayList(columnName,
columnType,
"",
isAllowNull,
isKey,
defaultValue,
aggType,
"",
col.getDisplayComment()));
} else {
rows.add(Lists.newArrayList(columnName,
columnType,
isAllowNull,
isKey,
defaultValue,
aggType));
}
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowIndex() throws AnalysisException {
ShowIndexStmt showStmt = (ShowIndexStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
db.readLock();
try {
Table table = db.getTable(showStmt.getTableName().getTbl());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,
db.getOriginName() + "." + showStmt.getTableName().toString());
} else if (table instanceof OlapTable) {
List<Index> indexes = ((OlapTable) table).getIndexes();
for (Index index : indexes) {
rows.add(Lists.newArrayList(showStmt.getTableName().toString(), "", index.getIndexName(),
"", String.join(",", index.getColumns()), "", "", "", "",
"", index.getIndexType().name(), index.getComment()));
}
} else {
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleHelp() {
HelpStmt helpStmt = (HelpStmt) stmt;
String mark = helpStmt.getMask();
HelpModule module = HelpModule.getInstance();
HelpTopic topic = module.getTopic(mark);
if (topic == null) {
List<String> topics = module.listTopicByKeyword(mark);
if (topics.size() == 0) {
topic = null;
} else if (topics.size() == 1) {
topic = module.getTopic(topics.get(0));
} else {
List<List<String>> rows = Lists.newArrayList();
for (String str : topics) {
rows.add(Lists.newArrayList(str, "N"));
}
List<String> categories = module.listCategoryByName(mark);
for (String str : categories) {
rows.add(Lists.newArrayList(str, "Y"));
}
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);
return;
}
}
if (topic != null) {
resultSet = new ShowResultSet(helpStmt.getMetaData(), Lists.<List<String>>newArrayList(
Lists.newArrayList(topic.getName(), topic.getDescription(), topic.getExample())));
} else {
List<String> categories = module.listCategoryByName(mark);
if (categories.isEmpty()) {
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), EMPTY_SET);
} else if (categories.size() > 1) {
resultSet = new ShowResultSet(helpStmt.getCategoryMetaData(),
Lists.<List<String>>newArrayList(categories));
} else {
List<List<String>> rows = Lists.newArrayList();
List<String> topics = module.listTopicByCategory(categories.get(0));
for (String str : topics) {
rows.add(Lists.newArrayList(str, "N"));
}
List<String> subCategories = module.listCategoryByCategory(categories.get(0));
for (String str : subCategories) {
rows.add(Lists.newArrayList(str, "Y"));
}
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);
}
}
}
private void handleShowLoad() throws AnalysisException {
ShowLoadStmt showStmt = (ShowLoadStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
long dbId = -1;
if (showStmt.isAll()) {
dbId = -1;
} else {
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
dbId = db.getId();
}
Set<String> statesValue = showStmt.getStates() == null ? null : showStmt.getStates().stream()
.map(Enum::name)
.collect(Collectors.toSet());
List<List<Comparable>> loadInfos =
globalStateMgr.getLoadMgr().getLoadJobInfosByDb(dbId, showStmt.getLabelValue(),
showStmt.isAccurateMatch(),
statesValue);
List<OrderByPair> orderByPairs = showStmt.getOrderByPairs();
ListComparator<List<Comparable>> comparator = null;
if (orderByPairs != null) {
OrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];
comparator = new ListComparator<>(orderByPairs.toArray(orderByPairArr));
} else {
comparator = new ListComparator<>(0);
}
loadInfos.sort(comparator);
List<List<String>> rows = Lists.newArrayList();
for (List<Comparable> loadInfo : loadInfos) {
List<String> oneInfo = new ArrayList<>(loadInfo.size());
for (Comparable element : loadInfo) {
oneInfo.add(element.toString());
}
rows.add(oneInfo);
}
long limit = showStmt.getLimit();
long offset = showStmt.getOffset() == -1L ? 0 : showStmt.getOffset();
if (offset >= rows.size()) {
rows = Lists.newArrayList();
} else if (limit != -1L) {
if ((limit + offset) < rows.size()) {
rows = rows.subList((int) offset, (int) (limit + offset));
} else {
rows = rows.subList((int) offset, rows.size());
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowRoutineLoad() throws AnalysisException {
ShowRoutineLoadStmt showRoutineLoadStmt = (ShowRoutineLoadStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<RoutineLoadJob> routineLoadJobList;
try {
routineLoadJobList = GlobalStateMgr.getCurrentState().getRoutineLoadMgr()
.getJob(showRoutineLoadStmt.getDbFullName(),
showRoutineLoadStmt.getName(),
showRoutineLoadStmt.isIncludeHistory());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (routineLoadJobList != null) {
Iterator<RoutineLoadJob> iterator = routineLoadJobList.iterator();
while (iterator.hasNext()) {
RoutineLoadJob routineLoadJob = iterator.next();
try {
if (!PrivilegeActions.checkAnyActionOnTable(connectContext,
routineLoadJob.getDbFullName(),
routineLoadJob.getTableName())) {
iterator.remove();
}
} catch (MetaNotFoundException e) {
}
}
}
if (routineLoadJobList != null) {
RoutineLoadFunctionalExprProvider fProvider = showRoutineLoadStmt.getFunctionalExprProvider(this.connectContext);
rows = routineLoadJobList.parallelStream()
.filter(fProvider.getPredicateChain())
.sorted(fProvider.getOrderComparator())
.skip(fProvider.getSkipCount())
.limit(fProvider.getLimitCount())
.map(RoutineLoadJob::getShowInfo)
.collect(Collectors.toList());
}
if (!Strings.isNullOrEmpty(showRoutineLoadStmt.getName()) && rows.isEmpty()) {
throw new AnalysisException("There is no running job named " + showRoutineLoadStmt.getName()
+ " in db " + showRoutineLoadStmt.getDbFullName()
+ ". Include history? " + showRoutineLoadStmt.isIncludeHistory()
+ ", you can try `show all routine load job for job_name` if you want to list stopped and cancelled jobs");
}
resultSet = new ShowResultSet(showRoutineLoadStmt.getMetaData(), rows);
}
private void handleShowRoutineLoadTask() throws AnalysisException {
ShowRoutineLoadTaskStmt showRoutineLoadTaskStmt = (ShowRoutineLoadTaskStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
RoutineLoadJob routineLoadJob;
try {
routineLoadJob =
GlobalStateMgr.getCurrentState().getRoutineLoadMgr()
.getJob(showRoutineLoadTaskStmt.getDbFullName(),
showRoutineLoadTaskStmt.getJobName());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (routineLoadJob == null) {
throw new AnalysisException("The job named " + showRoutineLoadTaskStmt.getJobName() + "does not exists "
+ "or job state is stopped or cancelled");
}
String dbFullName = showRoutineLoadTaskStmt.getDbFullName();
String tableName;
try {
tableName = routineLoadJob.getTableName();
} catch (MetaNotFoundException e) {
throw new AnalysisException(
"The table metadata of job has been changed. The job will be cancelled automatically", e);
}
if (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbFullName, tableName)) {
resultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);
return;
}
rows.addAll(routineLoadJob.getTasksShowInfo());
resultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);
}
private void handleShowStreamLoad() throws AnalysisException {
ShowStreamLoadStmt showStreamLoadStmt = (ShowStreamLoadStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<StreamLoadTask> streamLoadTaskList;
try {
streamLoadTaskList = GlobalStateMgr.getCurrentState().getStreamLoadMgr()
.getTask(showStreamLoadStmt.getDbFullName(),
showStreamLoadStmt.getName(),
showStreamLoadStmt.isIncludeHistory());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (streamLoadTaskList != null) {
StreamLoadFunctionalExprProvider fProvider = showStreamLoadStmt.getFunctionalExprProvider(this.connectContext);
rows = streamLoadTaskList.parallelStream()
.filter(fProvider.getPredicateChain())
.sorted(fProvider.getOrderComparator())
.skip(fProvider.getSkipCount())
.limit(fProvider.getLimitCount())
.map(StreamLoadTask::getShowInfo)
.collect(Collectors.toList());
}
if (!Strings.isNullOrEmpty(showStreamLoadStmt.getName()) && rows.isEmpty()) {
throw new AnalysisException("There is no label named " + showStreamLoadStmt.getName()
+ " in db " + showStreamLoadStmt.getDbFullName()
+ ". Include history? " + showStreamLoadStmt.isIncludeHistory());
}
resultSet = new ShowResultSet(showStreamLoadStmt.getMetaData(), rows);
}
private void handleShowUserProperty() throws AnalysisException {
ShowUserPropertyStmt showStmt = (ShowUserPropertyStmt) stmt;
resultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getRows(connectContext));
}
private void handleShowDelete() throws AnalysisException {
ShowDeleteStmt showStmt = (ShowDeleteStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
long dbId = db.getId();
DeleteMgr deleteHandler = globalStateMgr.getDeleteMgr();
List<List<Comparable>> deleteInfos = deleteHandler.getDeleteInfosByDb(dbId);
List<List<String>> rows = Lists.newArrayList();
for (List<Comparable> deleteInfo : deleteInfos) {
List<String> oneInfo = new ArrayList<>(deleteInfo.size());
for (Comparable element : deleteInfo) {
oneInfo.add(element.toString());
}
rows.add(oneInfo);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowAlter() throws AnalysisException {
ShowAlterStmt showStmt = (ShowAlterStmt) stmt;
ProcNodeInterface procNodeI = showStmt.getNode();
Preconditions.checkNotNull(procNodeI);
List<List<String>> rows;
if (procNodeI instanceof SchemaChangeProcDir) {
rows = ((SchemaChangeProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),
showStmt.getOrderPairs(), showStmt.getLimitElement()).getRows();
} else {
rows = procNodeI.fetchResult().getRows();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowCollation() {
ShowCollationStmt showStmt = (ShowCollationStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<String> row = Lists.newArrayList();
row.add("utf8_general_ci");
row.add("utf8");
row.add("33");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
row = Lists.newArrayList();
row.add("binary");
row.add("binary");
row.add("63");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
row = Lists.newArrayList();
row.add("gbk_chinese_ci");
row.add("gbk");
row.add("28");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowData() {
ShowDataStmt showStmt = (ShowDataStmt) stmt;
String dbName = showStmt.getDbName();
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
if (db == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);
}
db.readLock();
try {
String tableName = showStmt.getTableName();
List<List<String>> totalRows = showStmt.getResultRows();
if (tableName == null) {
long totalSize = 0;
long totalReplicaCount = 0;
List<Table> tables = db.getTables();
SortedSet<Table> sortedTables = new TreeSet<>(Comparator.comparing(Table::getName));
for (Table table : tables) {
if (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbName, table.getName())) {
continue;
}
sortedTables.add(table);
}
for (Table table : sortedTables) {
if (!table.isNativeTableOrMaterializedView()) {
continue;
}
OlapTable olapTable = (OlapTable) table;
long tableSize = olapTable.getDataSize();
long replicaCount = olapTable.getReplicaCount();
Pair<Double, String> tableSizePair = DebugUtil.getByteUint(tableSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(tableSizePair.first) + " "
+ tableSizePair.second;
List<String> row = Arrays.asList(table.getName(), readableSize, String.valueOf(replicaCount));
totalRows.add(row);
totalSize += tableSize;
totalReplicaCount += replicaCount;
}
Pair<Double, String> totalSizePair = DebugUtil.getByteUint(totalSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + " "
+ totalSizePair.second;
List<String> total = Arrays.asList("Total", readableSize, String.valueOf(totalReplicaCount));
totalRows.add(total);
long quota = db.getDataQuota();
long replicaQuota = db.getReplicaQuota();
Pair<Double, String> quotaPair = DebugUtil.getByteUint(quota);
String readableQuota = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(quotaPair.first) + " "
+ quotaPair.second;
List<String> quotaRow = Arrays.asList("Quota", readableQuota, String.valueOf(replicaQuota));
totalRows.add(quotaRow);
long left = Math.max(0, quota - totalSize);
long replicaCountLeft = Math.max(0, replicaQuota - totalReplicaCount);
Pair<Double, String> leftPair = DebugUtil.getByteUint(left);
String readableLeft = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(leftPair.first) + " "
+ leftPair.second;
List<String> leftRow = Arrays.asList("Left", readableLeft, String.valueOf(replicaCountLeft));
totalRows.add(leftRow);
} else {
if (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbName, tableName)) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW DATA",
connectContext.getQualifiedUser(),
connectContext.getRemoteIP(),
tableName);
}
Table table = db.getTable(tableName);
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);
}
if (!table.isNativeTableOrMaterializedView()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, tableName);
}
OlapTable olapTable = (OlapTable) table;
int i = 0;
long totalSize = 0;
long totalReplicaCount = 0;
Map<String, Long> indexNames = olapTable.getIndexNameToId();
Map<String, Long> sortedIndexNames = new TreeMap<>(indexNames);
for (Long indexId : sortedIndexNames.values()) {
long indexSize = 0;
long indexReplicaCount = 0;
long indexRowCount = 0;
for (Partition partition : olapTable.getAllPartitions()) {
MaterializedIndex mIndex = partition.getIndex(indexId);
indexSize += mIndex.getDataSize();
indexReplicaCount += mIndex.getReplicaCount();
indexRowCount += mIndex.getRowCount();
}
Pair<Double, String> indexSizePair = DebugUtil.getByteUint(indexSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(indexSizePair.first) + " "
+ indexSizePair.second;
List<String> row = null;
if (i == 0) {
row = Arrays.asList(tableName,
olapTable.getIndexNameById(indexId),
readableSize, String.valueOf(indexReplicaCount),
String.valueOf(indexRowCount));
} else {
row = Arrays.asList("",
olapTable.getIndexNameById(indexId),
readableSize, String.valueOf(indexReplicaCount),
String.valueOf(indexRowCount));
}
totalSize += indexSize;
totalReplicaCount += indexReplicaCount;
totalRows.add(row);
i++;
}
Pair<Double, String> totalSizePair = DebugUtil.getByteUint(totalSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + " "
+ totalSizePair.second;
List<String> row = Arrays.asList("", "Total", readableSize, String.valueOf(totalReplicaCount), "");
totalRows.add(row);
}
} catch (AnalysisException e) {
throw new SemanticException(e.getMessage());
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getResultRows());
}
private void handleShowPartitions() throws AnalysisException {
ShowPartitionsStmt showStmt = (ShowPartitionsStmt) stmt;
ProcNodeInterface procNodeI = showStmt.getNode();
Preconditions.checkNotNull(procNodeI);
List<List<String>> rows = ((PartitionsProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),
showStmt.getOrderByPairs(), showStmt.getLimitElement()).getRows();
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowTablet() throws AnalysisException {
ShowTabletStmt showStmt = (ShowTabletStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
if (showStmt.isShowSingleTablet()) {
long tabletId = showStmt.getTabletId();
TabletInvertedIndex invertedIndex = GlobalStateMgr.getCurrentInvertedIndex();
TabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);
Long dbId = tabletMeta != null ? tabletMeta.getDbId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String dbName = null;
Long tableId = tabletMeta != null ? tabletMeta.getTableId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String tableName = null;
Long partitionId = tabletMeta != null ? tabletMeta.getPartitionId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String partitionName = null;
Long indexId = tabletMeta != null ? tabletMeta.getIndexId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String indexName = null;
Boolean isSync = true;
do {
Database db = globalStateMgr.getDb(dbId);
if (db == null) {
isSync = false;
break;
}
dbName = db.getFullName();
db.readLock();
try {
Table table = db.getTable(tableId);
if (!(table instanceof OlapTable)) {
isSync = false;
break;
}
tableName = table.getName();
OlapTable olapTable = (OlapTable) table;
Partition partition = olapTable.getPartition(partitionId);
if (partition == null) {
isSync = false;
break;
}
partitionName = partition.getName();
MaterializedIndex index = partition.getIndex(indexId);
if (index == null) {
isSync = false;
break;
}
indexName = olapTable.getIndexNameById(indexId);
if (table.isCloudNativeTableOrMaterializedView()) {
break;
}
LocalTablet tablet = (LocalTablet) index.getTablet(tabletId);
if (tablet == null) {
isSync = false;
break;
}
List<Replica> replicas = tablet.getImmutableReplicas();
for (Replica replica : replicas) {
Replica tmp = invertedIndex.getReplica(tabletId, replica.getBackendId());
if (tmp == null) {
isSync = false;
break;
}
if (tmp != replica) {
isSync = false;
break;
}
}
} finally {
db.readUnlock();
}
} while (false);
String detailCmd = String.format("SHOW PROC '/dbs/%d/%d/partitions/%d/%d/%d';",
dbId, tableId, partitionId, indexId, tabletId);
rows.add(Lists.newArrayList(dbName, tableName, partitionName, indexName,
dbId.toString(), tableId.toString(),
partitionId.toString(), indexId.toString(),
isSync.toString(), detailCmd));
} else {
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
db.readLock();
try {
Table table = db.getTable(showStmt.getTableName());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTableName());
}
if (!table.isNativeTableOrMaterializedView()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, showStmt.getTableName());
}
OlapTable olapTable = (OlapTable) table;
long sizeLimit = -1;
if (showStmt.hasOffset() && showStmt.hasLimit()) {
sizeLimit = showStmt.getOffset() + showStmt.getLimit();
} else if (showStmt.hasLimit()) {
sizeLimit = showStmt.getLimit();
}
boolean stop = false;
Collection<Partition> partitions = new ArrayList<>();
if (showStmt.hasPartition()) {
PartitionNames partitionNames = showStmt.getPartitionNames();
for (String partName : partitionNames.getPartitionNames()) {
Partition partition = olapTable.getPartition(partName, partitionNames.isTemp());
if (partition == null) {
throw new AnalysisException("Unknown partition: " + partName);
}
partitions.add(partition);
}
} else {
partitions = olapTable.getPartitions();
}
List<List<Comparable>> tabletInfos = new ArrayList<>();
String indexName = showStmt.getIndexName();
long indexId = -1;
if (indexName != null) {
Long id = olapTable.getIndexIdByName(indexName);
if (id == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getIndexName());
}
indexId = id;
}
for (Partition partition : partitions) {
if (stop) {
break;
}
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) {
if (indexId > -1 && index.getId() != indexId) {
continue;
}
if (olapTable.isCloudNativeTableOrMaterializedView()) {
LakeTabletsProcNode procNode = new LakeTabletsProcNode(db, olapTable, index);
tabletInfos.addAll(procNode.fetchComparableResult());
} else {
LocalTabletsProcDir procDir = new LocalTabletsProcDir(db, olapTable, index);
tabletInfos.addAll(procDir.fetchComparableResult(
showStmt.getVersion(), showStmt.getBackendId(), showStmt.getReplicaState()));
}
if (sizeLimit > -1 && CollectionUtils.isEmpty(showStmt.getOrderByPairs())
&& tabletInfos.size() >= sizeLimit) {
stop = true;
break;
}
}
}
List<OrderByPair> orderByPairs = showStmt.getOrderByPairs();
ListComparator<List<Comparable>> comparator;
if (orderByPairs != null) {
OrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];
comparator = new ListComparator<>(orderByPairs.toArray(orderByPairArr));
} else {
comparator = new ListComparator<>(0, 1);
}
tabletInfos.sort(comparator);
if (sizeLimit > -1 && tabletInfos.size() >= sizeLimit) {
tabletInfos = tabletInfos.subList((int) showStmt.getOffset(), (int) sizeLimit);
}
for (List<Comparable> tabletInfo : tabletInfos) {
List<String> oneTablet = new ArrayList<>(tabletInfo.size());
for (Comparable column : tabletInfo) {
oneTablet.add(column.toString());
}
rows.add(oneTablet);
}
} finally {
db.readUnlock();
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowBroker() {
ShowBrokerStmt showStmt = (ShowBrokerStmt) stmt;
List<List<String>> rowSet = GlobalStateMgr.getCurrentState().getBrokerMgr().getBrokersInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowResources() {
ShowResourcesStmt showStmt = (ShowResourcesStmt) stmt;
List<List<String>> rowSet = GlobalStateMgr.getCurrentState().getResourceMgr().getResourcesInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowExport() throws AnalysisException {
ShowExportStmt showExportStmt = (ShowExportStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
Database db = globalStateMgr.getDb(showExportStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showExportStmt.getDbName());
long dbId = db.getId();
ExportMgr exportMgr = globalStateMgr.getExportMgr();
Set<ExportJob.JobState> states = null;
ExportJob.JobState state = showExportStmt.getJobState();
if (state != null) {
states = Sets.newHashSet(state);
}
List<List<String>> infos = exportMgr.getExportJobInfosByIdOrState(
dbId, showExportStmt.getJobId(), states, showExportStmt.getQueryId(),
showExportStmt.getOrderByPairs(), showExportStmt.getLimit());
resultSet = new ShowResultSet(showExportStmt.getMetaData(), infos);
}
private void handleShowBackends() {
final ShowBackendsStmt showStmt = (ShowBackendsStmt) stmt;
List<List<String>> backendInfos = BackendsProcDir.getClusterBackendInfos();
resultSet = new ShowResultSet(showStmt.getMetaData(), backendInfos);
}
private void handleShowFrontends() {
final ShowFrontendsStmt showStmt = (ShowFrontendsStmt) stmt;
List<List<String>> infos = Lists.newArrayList();
FrontendsProcNode.getFrontendsInfo(GlobalStateMgr.getCurrentState(), infos);
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowRepositories() {
final ShowRepositoriesStmt showStmt = (ShowRepositoriesStmt) stmt;
List<List<String>> repoInfos = GlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getReposInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), repoInfos);
}
private void handleShowSnapshot() throws AnalysisException {
final ShowSnapshotStmt showStmt = (ShowSnapshotStmt) stmt;
Repository repo =
GlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getRepo(showStmt.getRepoName());
if (repo == null) {
throw new AnalysisException("Repository " + showStmt.getRepoName() + " does not exist");
}
List<List<String>> snapshotInfos = repo.getSnapshotInfos(showStmt.getSnapshotName(), showStmt.getTimestamp(),
showStmt.getSnapshotNames());
resultSet = new ShowResultSet(showStmt.getMetaData(), snapshotInfos);
}
private void handleShowBackup() {
ShowBackupStmt showStmt = (ShowBackupStmt) stmt;
Database filterDb = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());
List<List<String>> infos = Lists.newArrayList();
List<Database> dbs = Lists.newArrayList();
if (filterDb == null) {
for (Map.Entry<Long, Database> entry : GlobalStateMgr.getCurrentState().getIdToDb().entrySet()) {
dbs.add(entry.getValue());
}
} else {
dbs.add(filterDb);
}
for (Database db : dbs) {
AbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());
if (!(jobI instanceof BackupJob)) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
continue;
}
BackupJob backupJob = (BackupJob) jobI;
List<TableRef> tableRefs = backupJob.getTableRef();
AtomicBoolean privilegeDeny = new AtomicBoolean(false);
tableRefs.forEach(tableRef -> {
TableName tableName = tableRef.getName();
if (!PrivilegeActions.checkTableAction(connectContext, tableName.getDb(), tableName.getTbl(),
PrivilegeType.EXPORT)) {
privilegeDeny.set(true);
}
});
if (privilegeDeny.get()) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
return;
}
List<String> info = backupJob.getInfo();
infos.add(info);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowRestore() {
ShowRestoreStmt showStmt = (ShowRestoreStmt) stmt;
Database filterDb = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());
List<List<String>> infos = Lists.newArrayList();
List<Database> dbs = Lists.newArrayList();
if (filterDb == null) {
for (Map.Entry<Long, Database> entry : GlobalStateMgr.getCurrentState().getIdToDb().entrySet()) {
dbs.add(entry.getValue());
}
} else {
dbs.add(filterDb);
}
for (Database db : dbs) {
AbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());
if (!(jobI instanceof RestoreJob)) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
continue;
}
RestoreJob restoreJob = (RestoreJob) jobI;
List<String> info = restoreJob.getInfo();
infos.add(info);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private String getCatalogNameById(long catalogId) throws MetaNotFoundException {
if (CatalogMgr.isInternalCatalog(catalogId)) {
return InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME;
}
CatalogMgr catalogMgr = GlobalStateMgr.getCurrentState().getCatalogMgr();
Optional<Catalog> catalogOptional = catalogMgr.getCatalogById(catalogId);
if (!catalogOptional.isPresent()) {
throw new MetaNotFoundException("cannot find catalog");
}
return catalogOptional.get().getName();
}
private String getCatalogNameFromPEntry(ObjectType objectType, PrivilegeCollection.PrivilegeEntry privilegeEntry)
throws MetaNotFoundException {
if (objectType.equals(ObjectType.CATALOG)) {
CatalogPEntryObject catalogPEntryObject =
(CatalogPEntryObject) privilegeEntry.getObject();
if (catalogPEntryObject.getId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {
return null;
} else {
return getCatalogNameById(catalogPEntryObject.getId());
}
} else if (objectType.equals(ObjectType.DATABASE)) {
DbPEntryObject dbPEntryObject = (DbPEntryObject) privilegeEntry.getObject();
if (dbPEntryObject.getCatalogId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {
return null;
}
return getCatalogNameById(dbPEntryObject.getCatalogId());
} else if (objectType.equals(ObjectType.TABLE)) {
TablePEntryObject tablePEntryObject = (TablePEntryObject) privilegeEntry.getObject();
if (tablePEntryObject.getCatalogId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {
return null;
}
return getCatalogNameById(tablePEntryObject.getCatalogId());
} else {
return InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME;
}
}
private List<List<String>> privilegeToRowString(AuthorizationMgr authorizationManager, GrantRevokeClause userOrRoleName,
Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>>
typeToPrivilegeEntryList) throws PrivilegeException {
List<List<String>> infos = new ArrayList<>();
for (Map.Entry<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntry
: typeToPrivilegeEntryList.entrySet()) {
for (PrivilegeCollection.PrivilegeEntry privilegeEntry : typeToPrivilegeEntry.getValue()) {
ObjectType objectType = typeToPrivilegeEntry.getKey();
String catalogName;
try {
catalogName = getCatalogNameFromPEntry(objectType, privilegeEntry);
} catch (MetaNotFoundException e) {
continue;
}
List<String> info = new ArrayList<>();
info.add(userOrRoleName.getRoleName() != null ?
userOrRoleName.getRoleName() : userOrRoleName.getUserIdentity().toString());
info.add(catalogName);
GrantPrivilegeStmt grantPrivilegeStmt = new GrantPrivilegeStmt(new ArrayList<>(), objectType.name(),
userOrRoleName, null, privilegeEntry.isWithGrantOption());
grantPrivilegeStmt.setObjectType(objectType);
ActionSet actionSet = privilegeEntry.getActionSet();
List<PrivilegeType> privList = authorizationManager.analyzeActionSet(objectType, actionSet);
grantPrivilegeStmt.setPrivilegeTypes(privList);
grantPrivilegeStmt.setObjectList(Lists.newArrayList(privilegeEntry.getObject()));
try {
info.add(AstToSQLBuilder.toSQL(grantPrivilegeStmt));
infos.add(info);
} catch (com.starrocks.sql.common.MetaNotFoundException e) {
}
}
}
return infos;
}
private void handleShowGrants() {
ShowGrantsStmt showStmt = (ShowGrantsStmt) stmt;
AuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();
try {
List<List<String>> infos = new ArrayList<>();
if (showStmt.getRole() != null) {
List<String> granteeRole = authorizationManager.getGranteeRoleDetailsForRole(showStmt.getRole());
if (granteeRole != null) {
infos.add(granteeRole);
}
Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntryList =
authorizationManager.getTypeToPrivilegeEntryListByRole(showStmt.getRole());
infos.addAll(privilegeToRowString(authorizationManager,
new GrantRevokeClause(null, showStmt.getRole()), typeToPrivilegeEntryList));
} else {
List<String> granteeRole = authorizationManager.getGranteeRoleDetailsForUser(showStmt.getUserIdent());
if (granteeRole != null) {
infos.add(granteeRole);
}
Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntryList =
authorizationManager.getTypeToPrivilegeEntryListByUser(showStmt.getUserIdent());
infos.addAll(privilegeToRowString(authorizationManager,
new GrantRevokeClause(showStmt.getUserIdent(), null), typeToPrivilegeEntryList));
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
} catch (PrivilegeException e) {
throw new SemanticException(e.getMessage());
}
}
private void handleShowRoles() {
ShowRolesStmt showStmt = (ShowRolesStmt) stmt;
List<List<String>> infos = new ArrayList<>();
AuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();
List<String> roles = authorizationManager.getAllRoles();
roles.forEach(e -> infos.add(Lists.newArrayList(e,
authorizationManager.isBuiltinRole(e) ? "true" : "false",
authorizationManager.getRoleComment(e))));
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowUser() {
List<List<String>> rowSet = Lists.newArrayList();
ShowUserStmt showUserStmt = (ShowUserStmt) stmt;
if (showUserStmt.isAll()) {
AuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();
List<String> users = authorizationManager.getAllUsers();
users.forEach(u -> rowSet.add(Lists.newArrayList(u)));
} else {
List<String> row = Lists.newArrayList();
row.add(connectContext.getCurrentUserIdentity().toString());
rowSet.add(row);
}
resultSet = new ShowResultSet(stmt.getMetaData(), rowSet);
}
private void handleAdminShowTabletStatus() throws AnalysisException {
AdminShowReplicaStatusStmt showStmt = (AdminShowReplicaStatusStmt) stmt;
List<List<String>> results;
try {
results = MetadataViewer.getTabletStatus(showStmt);
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleAdminShowTabletDistribution() throws AnalysisException {
AdminShowReplicaDistributionStmt showStmt = (AdminShowReplicaDistributionStmt) stmt;
List<List<String>> results;
try {
results = MetadataViewer.getTabletDistribution(showStmt);
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleAdminShowConfig() throws AnalysisException {
AdminShowConfigStmt showStmt = (AdminShowConfigStmt) stmt;
List<List<String>> results;
try {
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.CONFIG.getCaseSensibility());
}
results = ConfigBase.getConfigInfo(matcher);
results.sort(Comparator.comparing(o -> o.get(0)));
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleShowSmallFiles() throws AnalysisException {
ShowSmallFilesStmt showStmt = (ShowSmallFilesStmt) stmt;
List<List<String>> results;
try {
results = GlobalStateMgr.getCurrentState().getSmallFileMgr().getInfo(showStmt.getDbName());
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleShowDynamicPartition() {
ShowDynamicPartitionStmt showDynamicPartitionStmt = (ShowDynamicPartitionStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showDynamicPartitionStmt.getDb());
if (db != null) {
db.readLock();
try {
for (Table tbl : db.getTables()) {
if (!(tbl instanceof OlapTable)) {
continue;
}
DynamicPartitionScheduler dynamicPartitionScheduler =
GlobalStateMgr.getCurrentState().getDynamicPartitionScheduler();
OlapTable olapTable = (OlapTable) tbl;
if (!olapTable.dynamicPartitionExists()) {
dynamicPartitionScheduler.removeRuntimeInfo(olapTable.getName());
continue;
}
if (!PrivilegeActions.checkAnyActionOnTable(ConnectContext.get(),
db.getFullName(), olapTable.getName())) {
continue;
}
DynamicPartitionProperty dynamicPartitionProperty =
olapTable.getTableProperty().getDynamicPartitionProperty();
String tableName = olapTable.getName();
int replicationNum = dynamicPartitionProperty.getReplicationNum();
replicationNum = (replicationNum == DynamicPartitionProperty.NOT_SET_REPLICATION_NUM) ?
olapTable.getDefaultReplicationNum() : RunMode.defaultReplicationNum();
rows.add(Lists.newArrayList(
tableName,
String.valueOf(dynamicPartitionProperty.getEnable()),
dynamicPartitionProperty.getTimeUnit().toUpperCase(),
String.valueOf(dynamicPartitionProperty.getStart()),
String.valueOf(dynamicPartitionProperty.getEnd()),
dynamicPartitionProperty.getPrefix(),
String.valueOf(dynamicPartitionProperty.getBuckets()),
String.valueOf(replicationNum),
dynamicPartitionProperty.getStartOfInfo(),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_UPDATE_TIME),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_SCHEDULER_TIME),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.DYNAMIC_PARTITION_STATE),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.CREATE_PARTITION_MSG),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.DROP_PARTITION_MSG)));
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showDynamicPartitionStmt.getMetaData(), rows);
}
}
private void handleShowTransaction() throws AnalysisException {
ShowTransactionStmt showStmt = (ShowTransactionStmt) stmt;
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
long txnId = showStmt.getTxnId();
GlobalTransactionMgr transactionMgr = GlobalStateMgr.getCurrentGlobalTransactionMgr();
resultSet = new ShowResultSet(showStmt.getMetaData(), transactionMgr.getSingleTranInfo(db.getId(), txnId));
}
private void handleShowPlugins() {
ShowPluginsStmt pluginsStmt = (ShowPluginsStmt) stmt;
List<List<String>> rows = GlobalStateMgr.getCurrentPluginMgr().getPluginShowInfos();
resultSet = new ShowResultSet(pluginsStmt.getMetaData(), rows);
}
private void handleShowCharset() {
ShowCharsetStmt showCharsetStmt = (ShowCharsetStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<String> row = Lists.newArrayList();
row.add("utf8");
row.add("UTF-8 Unicode");
row.add("utf8_general_ci");
row.add("3");
rows.add(row);
resultSet = new ShowResultSet(showCharsetStmt.getMetaData(), rows);
}
private void handleShowSqlBlackListStmt() {
ShowSqlBlackListStmt showStmt = (ShowSqlBlackListStmt) stmt;
List<List<String>> rows = new ArrayList<>();
for (Map.Entry<String, BlackListSql> entry : SqlBlackList.getInstance().sqlBlackListMap.entrySet()) {
List<String> oneSql = new ArrayList<>();
oneSql.add(String.valueOf(entry.getValue().id));
oneSql.add(entry.getKey());
rows.add(oneSql);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowAnalyzeJob() {
List<AnalyzeJob> jobs = connectContext.getGlobalStateMgr().getAnalyzeMgr().getAllAnalyzeJobList();
List<List<String>> rows = Lists.newArrayList();
jobs.sort(Comparator.comparing(AnalyzeJob::getId));
for (AnalyzeJob job : jobs) {
try {
List<String> result = ShowAnalyzeJobStmt.showAnalyzeJobs(connectContext, job);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowAnalyzeStatus() {
List<AnalyzeStatus> statuses = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()
.getAnalyzeStatusMap().values());
List<List<String>> rows = Lists.newArrayList();
statuses.sort(Comparator.comparing(AnalyzeStatus::getId));
for (AnalyzeStatus status : statuses) {
try {
List<String> result = ShowAnalyzeStatusStmt.showAnalyzeStatus(connectContext, status);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowBasicStatsMeta() {
List<BasicStatsMeta> metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()
.getBasicStatsMetaMap().values());
List<List<String>> rows = Lists.newArrayList();
for (BasicStatsMeta meta : metas) {
try {
List<String> result = ShowBasicStatsMetaStmt.showBasicStatsMeta(connectContext, meta);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowHistogramStatsMeta() {
List<HistogramStatsMeta> metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()
.getHistogramStatsMetaMap().values());
List<List<String>> rows = Lists.newArrayList();
for (HistogramStatsMeta meta : metas) {
try {
List<String> result = ShowHistogramStatsMetaStmt.showHistogramStatsMeta(connectContext, meta);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowResourceGroup() throws AnalysisException {
ShowResourceGroupStmt showResourceGroupStmt = (ShowResourceGroupStmt) stmt;
List<List<String>> rows =
GlobalStateMgr.getCurrentState().getResourceGroupMgr().showResourceGroup(showResourceGroupStmt);
resultSet = new ShowResultSet(showResourceGroupStmt.getMetaData(), rows);
}
private void handleShowCatalogs() {
ShowCatalogsStmt showCatalogsStmt = (ShowCatalogsStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
CatalogMgr catalogMgr = globalStateMgr.getCatalogMgr();
List<List<String>> rowSet = catalogMgr.getCatalogsInfo().stream()
.filter(row -> {
if (!InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME.equals(row.get(0))) {
return PrivilegeActions.checkAnyActionOnOrInCatalog(
connectContext.getCurrentUserIdentity(),
connectContext.getCurrentRoleIds(), row.get(0));
}
return true;
}
)
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showCatalogsStmt.getMetaData(), rowSet);
}
private void handleShowWarehouses() {
ShowWarehousesStmt showStmt = (ShowWarehousesStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
WarehouseManager warehouseMgr = globalStateMgr.getWarehouseMgr();
List<List<String>> rowSet = warehouseMgr.getWarehousesInfo().stream()
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowClusters() {
ShowClustersStmt showStmt = (ShowClustersStmt) stmt;
WarehouseManager warehouseMgr = GlobalStateMgr.getCurrentWarehouseMgr();
Warehouse warehouse = warehouseMgr.getWarehouse(showStmt.getWarehouseName());
List<List<String>> rowSet = warehouse.getClusterInfo().stream()
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private List<List<String>> doPredicate(ShowStmt showStmt,
ShowResultSetMetaData showResultSetMetaData,
List<List<String>> rows) {
Predicate predicate = showStmt.getPredicate();
if (predicate == null) {
return rows;
}
SlotRef slotRef = (SlotRef) predicate.getChild(0);
StringLiteral stringLiteral = (StringLiteral) predicate.getChild(1);
List<List<String>> returnRows = new ArrayList<>();
BinaryPredicate binaryPredicate = (BinaryPredicate) predicate;
int idx = showResultSetMetaData.getColumnIdx(slotRef.getColumnName());
if (binaryPredicate.getOp().isEquivalence()) {
for (List<String> row : rows) {
if (row.get(idx).equals(stringLiteral.getStringValue())) {
returnRows.add(row);
}
}
}
return returnRows;
}
private void handleShowCreateExternalCatalog() throws AnalysisException {
ShowCreateExternalCatalogStmt showStmt = (ShowCreateExternalCatalogStmt) stmt;
String catalogName = showStmt.getCatalogName();
List<List<String>> rows = Lists.newArrayList();
if (InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME.equalsIgnoreCase(catalogName)) {
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
return;
}
Catalog catalog = connectContext.getGlobalStateMgr().getCatalogMgr().getCatalogByName(catalogName);
if (catalog == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_CATALOG_ERROR, catalogName);
}
StringBuilder createCatalogSql = new StringBuilder();
createCatalogSql.append("CREATE EXTERNAL CATALOG ")
.append("`").append(catalogName).append("`")
.append("\n");
String comment = catalog.getComment();
if (comment != null) {
createCatalogSql.append("comment \"").append(catalog.getDisplayComment()).append("\"\n");
}
Map<String, String> clonedConfig = new HashMap<>(catalog.getConfig());
CloudCredentialUtil.maskCloudCredential(clonedConfig);
createCatalogSql.append("PROPERTIES (")
.append(new PrintableMap<>(clonedConfig, " = ", true, true))
.append("\n)");
rows.add(Lists.newArrayList(catalogName, createCatalogSql.toString()));
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowStorageVolumes() throws DdlException {
ShowStorageVolumesStmt showStmt = (ShowStorageVolumesStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
StorageVolumeMgr storageVolumeMgr = globalStateMgr.getStorageVolumeMgr();
List<String> storageVolumeNames = storageVolumeMgr.listStorageVolumeNames();
PatternMatcher matcher = null;
List<List<String>> rows = Lists.newArrayList();
if (!showStmt.getPattern().isEmpty()) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
PatternMatcher finalMatcher = matcher;
storageVolumeNames = storageVolumeNames.stream()
.filter(storageVolumeName -> finalMatcher == null || finalMatcher.match(storageVolumeName))
.filter(storageVolumeName -> PrivilegeActions.checkAnyActionOnStorageVolume(connectContext, storageVolumeName))
.collect(Collectors.toList());
for (String storageVolumeName : storageVolumeNames) {
rows.add(Lists.newArrayList(storageVolumeName));
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleDescStorageVolume() throws AnalysisException {
DescStorageVolumeStmt desc = (DescStorageVolumeStmt) stmt;
resultSet = new ShowResultSet(desc.getMetaData(), desc.getResultRows());
}
} | class ShowExecutor {
private static final Logger LOG = LogManager.getLogger(ShowExecutor.class);
private static final List<List<String>> EMPTY_SET = Lists.newArrayList();
private final ConnectContext connectContext;
private final ShowStmt stmt;
private ShowResultSet resultSet;
private final MetadataMgr metadataMgr;
public ShowExecutor(ConnectContext connectContext, ShowStmt stmt) {
this.connectContext = connectContext;
this.stmt = stmt;
resultSet = null;
metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();
}
public ShowResultSet execute() throws AnalysisException, DdlException {
if (stmt instanceof ShowMaterializedViewsStmt) {
handleShowMaterializedView();
} else if (stmt instanceof ShowAuthorStmt) {
handleShowAuthor();
} else if (stmt instanceof ShowProcStmt) {
handleShowProc();
} else if (stmt instanceof HelpStmt) {
handleHelp();
} else if (stmt instanceof ShowWarehousesStmt) {
handleShowWarehouses();
} else if (stmt instanceof ShowClustersStmt) {
handleShowClusters();
} else if (stmt instanceof ShowDbStmt) {
handleShowDb();
} else if (stmt instanceof ShowTableStmt) {
handleShowTable();
} else if (stmt instanceof ShowTableStatusStmt) {
handleShowTableStatus();
} else if (stmt instanceof DescribeStmt) {
handleDescribe();
} else if (stmt instanceof ShowCreateTableStmt) {
handleShowCreateTable();
} else if (stmt instanceof ShowCreateDbStmt) {
handleShowCreateDb();
} else if (stmt instanceof ShowProcesslistStmt) {
handleShowProcesslist();
} else if (stmt instanceof ShowEnginesStmt) {
handleShowEngines();
} else if (stmt instanceof ShowFunctionsStmt) {
handleShowFunctions();
} else if (stmt instanceof ShowVariablesStmt) {
handleShowVariables();
} else if (stmt instanceof ShowColumnStmt) {
handleShowColumn();
} else if (stmt instanceof ShowLoadStmt) {
handleShowLoad();
} else if (stmt instanceof ShowRoutineLoadStmt) {
handleShowRoutineLoad();
} else if (stmt instanceof ShowRoutineLoadTaskStmt) {
handleShowRoutineLoadTask();
} else if (stmt instanceof ShowStreamLoadStmt) {
handleShowStreamLoad();
} else if (stmt instanceof ShowDeleteStmt) {
handleShowDelete();
} else if (stmt instanceof ShowAlterStmt) {
handleShowAlter();
} else if (stmt instanceof ShowUserPropertyStmt) {
handleShowUserProperty();
} else if (stmt instanceof ShowDataStmt) {
handleShowData();
} else if (stmt instanceof ShowCollationStmt) {
handleShowCollation();
} else if (stmt instanceof ShowPartitionsStmt) {
handleShowPartitions();
} else if (stmt instanceof ShowTabletStmt) {
handleShowTablet();
} else if (stmt instanceof ShowBackupStmt) {
handleShowBackup();
} else if (stmt instanceof ShowRestoreStmt) {
handleShowRestore();
} else if (stmt instanceof ShowBrokerStmt) {
handleShowBroker();
} else if (stmt instanceof ShowResourcesStmt) {
handleShowResources();
} else if (stmt instanceof ShowExportStmt) {
handleShowExport();
} else if (stmt instanceof ShowBackendsStmt) {
handleShowBackends();
} else if (stmt instanceof ShowFrontendsStmt) {
handleShowFrontends();
} else if (stmt instanceof ShowRepositoriesStmt) {
handleShowRepositories();
} else if (stmt instanceof ShowSnapshotStmt) {
handleShowSnapshot();
} else if (stmt instanceof ShowGrantsStmt) {
handleShowGrants();
} else if (stmt instanceof ShowRolesStmt) {
handleShowRoles();
} else if (stmt instanceof AdminShowReplicaStatusStmt) {
handleAdminShowTabletStatus();
} else if (stmt instanceof AdminShowReplicaDistributionStmt) {
handleAdminShowTabletDistribution();
} else if (stmt instanceof AdminShowConfigStmt) {
handleAdminShowConfig();
} else if (stmt instanceof ShowSmallFilesStmt) {
handleShowSmallFiles();
} else if (stmt instanceof ShowDynamicPartitionStmt) {
handleShowDynamicPartition();
} else if (stmt instanceof ShowIndexStmt) {
handleShowIndex();
} else if (stmt instanceof ShowTransactionStmt) {
handleShowTransaction();
} else if (stmt instanceof ShowPluginsStmt) {
handleShowPlugins();
} else if (stmt instanceof ShowSqlBlackListStmt) {
handleShowSqlBlackListStmt();
} else if (stmt instanceof ShowAnalyzeJobStmt) {
handleShowAnalyzeJob();
} else if (stmt instanceof ShowAnalyzeStatusStmt) {
handleShowAnalyzeStatus();
} else if (stmt instanceof ShowBasicStatsMetaStmt) {
handleShowBasicStatsMeta();
} else if (stmt instanceof ShowHistogramStatsMetaStmt) {
handleShowHistogramStatsMeta();
} else if (stmt instanceof ShowResourceGroupStmt) {
handleShowResourceGroup();
} else if (stmt instanceof ShowUserStmt) {
handleShowUser();
} else if (stmt instanceof ShowCatalogsStmt) {
handleShowCatalogs();
} else if (stmt instanceof ShowComputeNodesStmt) {
handleShowComputeNodes();
} else if (stmt instanceof ShowAuthenticationStmt) {
handleShowAuthentication();
} else if (stmt instanceof ShowCreateExternalCatalogStmt) {
handleShowCreateExternalCatalog();
} else if (stmt instanceof ShowCharsetStmt) {
handleShowCharset();
} else if (stmt instanceof ShowStorageVolumesStmt) {
handleShowStorageVolumes();
} else if (stmt instanceof DescStorageVolumeStmt) {
handleDescStorageVolume();
} else {
handleEmpty();
}
List<List<String>> rows = doPredicate(stmt, stmt.getMetaData(), resultSet.getResultRows());
return new ShowResultSet(resultSet.getMetaData(), rows);
}
private void handleShowAuthentication() {
final ShowAuthenticationStmt showAuthenticationStmt = (ShowAuthenticationStmt) stmt;
AuthenticationMgr authenticationManager = GlobalStateMgr.getCurrentState().getAuthenticationMgr();
List<List<String>> userAuthInfos = Lists.newArrayList();
Map<UserIdentity, UserAuthenticationInfo> authenticationInfoMap = new HashMap<>();
if (showAuthenticationStmt.isAll()) {
authenticationInfoMap.putAll(authenticationManager.getUserToAuthenticationInfo());
} else {
UserAuthenticationInfo userAuthenticationInfo;
if (showAuthenticationStmt.getUserIdent() == null) {
userAuthenticationInfo = authenticationManager
.getUserAuthenticationInfoByUserIdentity(connectContext.getCurrentUserIdentity());
} else {
userAuthenticationInfo =
authenticationManager.getUserAuthenticationInfoByUserIdentity(showAuthenticationStmt.getUserIdent());
}
authenticationInfoMap.put(showAuthenticationStmt.getUserIdent(), userAuthenticationInfo);
}
for (Map.Entry<UserIdentity, UserAuthenticationInfo> entry : authenticationInfoMap.entrySet()) {
UserAuthenticationInfo userAuthenticationInfo = entry.getValue();
userAuthInfos.add(Lists.newArrayList(
entry.getKey().toString(),
userAuthenticationInfo.getPassword().length == 0 ? "No" : "Yes",
userAuthenticationInfo.getAuthPlugin(),
userAuthenticationInfo.getTextForAuthPlugin()));
}
resultSet = new ShowResultSet(showAuthenticationStmt.getMetaData(), userAuthInfos);
}
private void handleShowComputeNodes() {
final ShowComputeNodesStmt showStmt = (ShowComputeNodesStmt) stmt;
List<List<String>> computeNodesInfos = ComputeNodeProcDir.getClusterComputeNodesInfos();
resultSet = new ShowResultSet(showStmt.getMetaData(), computeNodesInfos);
}
private void handleShowMaterializedView() throws AnalysisException {
ShowMaterializedViewsStmt showMaterializedViewsStmt = (ShowMaterializedViewsStmt) stmt;
String dbName = showMaterializedViewsStmt.getDb();
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
MetaUtils.checkDbNullAndReport(db, dbName);
List<MaterializedView> materializedViews = Lists.newArrayList();
List<Pair<OlapTable, MaterializedIndexMeta>> singleTableMVs = Lists.newArrayList();
db.readLock();
try {
PatternMatcher matcher = null;
if (showMaterializedViewsStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showMaterializedViewsStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
for (Table table : db.getTables()) {
if (table.isMaterializedView()) {
MaterializedView mvTable = (MaterializedView) table;
if (matcher != null && !matcher.match(mvTable.getName())) {
continue;
}
AtomicBoolean baseTableHasPrivilege = new AtomicBoolean(true);
mvTable.getBaseTableInfos().forEach(baseTableInfo -> {
Table baseTable = baseTableInfo.getTable();
if (baseTable != null && baseTable.isNativeTableOrMaterializedView() && !PrivilegeActions.
checkTableAction(connectContext, baseTableInfo.getDbName(),
baseTableInfo.getTableName(),
PrivilegeType.SELECT)) {
baseTableHasPrivilege.set(false);
}
});
if (!baseTableHasPrivilege.get()) {
continue;
}
if (!PrivilegeActions.checkAnyActionOnMaterializedView(connectContext, db.getFullName(),
mvTable.getName())) {
continue;
}
materializedViews.add(mvTable);
} else if (Table.TableType.OLAP == table.getType()) {
OlapTable olapTable = (OlapTable) table;
List<MaterializedIndexMeta> visibleMaterializedViews = olapTable.getVisibleIndexMetas();
long baseIdx = olapTable.getBaseIndexId();
for (MaterializedIndexMeta mvMeta : visibleMaterializedViews) {
if (baseIdx == mvMeta.getIndexId()) {
continue;
}
if (matcher != null && !matcher.match(olapTable.getIndexNameById(mvMeta.getIndexId()))) {
continue;
}
singleTableMVs.add(Pair.create(olapTable, mvMeta));
}
}
}
List<List<String>> rowSets = listMaterializedViewStatus(dbName, materializedViews, singleTableMVs);
resultSet = new ShowResultSet(stmt.getMetaData(), rowSets);
} catch (Exception e) {
LOG.warn("listMaterializedViews failed:", e);
throw e;
} finally {
db.readUnlock();
}
}
public static String buildCreateMVSql(OlapTable olapTable, String mv, MaterializedIndexMeta mvMeta) {
StringBuilder originStmtBuilder = new StringBuilder(
"create materialized view " + mv +
" as select ");
String groupByString = "";
for (Column column : mvMeta.getSchema()) {
if (column.isKey()) {
groupByString += column.getName() + ",";
}
}
originStmtBuilder.append(groupByString);
for (Column column : mvMeta.getSchema()) {
if (!column.isKey()) {
originStmtBuilder.append(column.getAggregationType().toString()).append("(")
.append(column.getName()).append(")").append(",");
}
}
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
originStmtBuilder.append(" from ").append(olapTable.getName()).append(" group by ")
.append(groupByString);
originStmtBuilder.delete(originStmtBuilder.length() - 1, originStmtBuilder.length());
return originStmtBuilder.toString();
}
public static List<List<String>> listMaterializedViewStatus(
String dbName,
List<MaterializedView> materializedViews,
List<Pair<OlapTable, MaterializedIndexMeta>> singleTableMVs) {
List<List<String>> rowSets = Lists.newArrayList();
Map<String, TaskRunStatus> mvNameTaskMap = Maps.newHashMap();
if (!materializedViews.isEmpty()) {
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
TaskManager taskManager = globalStateMgr.getTaskManager();
mvNameTaskMap = taskManager.showMVLastRefreshTaskRunStatus(dbName);
}
for (MaterializedView mvTable : materializedViews) {
long mvId = mvTable.getId();
TaskRunStatus taskStatus = mvNameTaskMap.get(TaskBuilder.getMvTaskName(mvId));
ArrayList<String> resultRow = new ArrayList<>();
resultRow.add(String.valueOf(mvId));
resultRow.add(dbName);
resultRow.add(mvTable.getName());
MaterializedView.MvRefreshScheme refreshScheme = mvTable.getRefreshScheme();
if (refreshScheme == null) {
resultRow.add("UNKNOWN");
} else {
resultRow.add(String.valueOf(mvTable.getRefreshScheme().getType()));
}
resultRow.add(String.valueOf(mvTable.isActive()));
resultRow.add(String.valueOf(mvTable.getInactiveReason()));
if (mvTable.getPartitionInfo() != null && mvTable.getPartitionInfo().getType() != null) {
resultRow.add(mvTable.getPartitionInfo().getType().toString());
} else {
resultRow.add("");
}
setTaskRunStatus(resultRow, taskStatus);
resultRow.add(String.valueOf(mvTable.getRowCount()));
resultRow.add(mvTable.getMaterializedViewDdlStmt(true));
rowSets.add(resultRow);
}
for (Pair<OlapTable, MaterializedIndexMeta> singleTableMV : singleTableMVs) {
OlapTable olapTable = singleTableMV.first;
MaterializedIndexMeta mvMeta = singleTableMV.second;
long mvId = mvMeta.getIndexId();
ArrayList<String> resultRow = new ArrayList<>();
resultRow.add(String.valueOf(mvId));
resultRow.add(dbName);
resultRow.add(olapTable.getIndexNameById(mvId));
resultRow.add("ROLLUP");
resultRow.add(String.valueOf(true));
resultRow.add("");
if (olapTable.getPartitionInfo() != null && olapTable.getPartitionInfo().getType() != null) {
resultRow.add(olapTable.getPartitionInfo().getType().toString());
} else {
resultRow.add("");
}
setTaskRunStatus(resultRow, null);
if (olapTable.getPartitionInfo().getType() == PartitionType.UNPARTITIONED) {
Partition partition = olapTable.getPartitions().iterator().next();
MaterializedIndex index = partition.getIndex(mvId);
resultRow.add(String.valueOf(index.getRowCount()));
} else {
resultRow.add(String.valueOf(0L));
}
if (mvMeta.getOriginStmt() == null) {
String mvName = olapTable.getIndexNameById(mvId);
resultRow.add(buildCreateMVSql(olapTable, mvName, mvMeta));
} else {
resultRow.add(mvMeta.getOriginStmt().replace("\n", "").replace("\t", "")
.replaceAll("[ ]+", " "));
}
rowSets.add(resultRow);
}
return rowSets;
}
private static void setTaskRunStatus(List<String> resultRow, TaskRunStatus taskStatus) {
if (taskStatus != null) {
resultRow.add(String.valueOf(taskStatus.getTaskId()));
resultRow.add(Strings.nullToEmpty(taskStatus.getTaskName()));
resultRow.add(String.valueOf(TimeUtils.longToTimeString(taskStatus.getCreateTime())));
resultRow.add(String.valueOf(TimeUtils.longToTimeString(taskStatus.getFinishTime())));
if (taskStatus.getFinishTime() > taskStatus.getCreateTime()) {
resultRow.add(DebugUtil.DECIMAL_FORMAT_SCALE_3
.format((taskStatus.getFinishTime() - taskStatus.getCreateTime()) / 1000D));
} else {
resultRow.add("0.000");
}
resultRow.add(String.valueOf(taskStatus.getState()));
MVTaskRunExtraMessage extraMessage = taskStatus.getMvTaskRunExtraMessage();
resultRow.add(extraMessage.isForceRefresh() ? "true" : "false");
resultRow.add(Strings.nullToEmpty(extraMessage.getPartitionStart()));
resultRow.add(Strings.nullToEmpty(extraMessage.getPartitionEnd()));
resultRow.add(Strings.nullToEmpty(extraMessage.getBasePartitionsToRefreshMapString()));
resultRow.add(Strings.nullToEmpty(extraMessage.getMvPartitionsToRefreshString()));
resultRow.add(String.valueOf(taskStatus.getErrorCode()));
resultRow.add(Strings.nullToEmpty(taskStatus.getErrorMessage()));
} else {
resultRow.addAll(Collections.nCopies(13, ""));
}
}
private void handleShowProcesslist() {
ShowProcesslistStmt showStmt = (ShowProcesslistStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
List<ConnectContext.ThreadInfo> threadInfos = connectContext.getConnectScheduler()
.listConnection(connectContext.getQualifiedUser());
long nowMs = System.currentTimeMillis();
for (ConnectContext.ThreadInfo info : threadInfos) {
List<String> row = info.toRow(nowMs, showStmt.showFull());
if (row != null) {
rowSet.add(row);
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleEmpty() {
resultSet = new ShowResultSet(stmt.getMetaData(), EMPTY_SET);
}
private void handleShowAuthor() {
ShowAuthorStmt showAuthorStmt = (ShowAuthorStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
resultSet = new ShowResultSet(showAuthorStmt.getMetaData(), rowSet);
}
private void handleShowEngines() {
ShowEnginesStmt showStmt = (ShowEnginesStmt) stmt;
List<List<String>> rowSet = Lists.newArrayList();
rowSet.add(Lists.newArrayList("OLAP", "YES", "Default storage engine of StarRocks", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("MySQL", "YES", "MySQL server which data is in it", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("ELASTICSEARCH", "YES", "ELASTICSEARCH cluster which data is in it", "NO", "NO",
"NO"));
rowSet.add(Lists.newArrayList("HIVE", "YES", "HIVE database which data is in it", "NO", "NO", "NO"));
rowSet.add(Lists.newArrayList("ICEBERG", "YES", "ICEBERG data lake which data is in it", "NO", "NO", "NO"));
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowFunctions() throws AnalysisException {
ShowFunctionsStmt showStmt = (ShowFunctionsStmt) stmt;
List<Function> functions;
if (showStmt.getIsBuiltin()) {
functions = connectContext.getGlobalStateMgr().getBuiltinFunctions();
} else if (showStmt.getIsGlobal()) {
functions = connectContext.getGlobalStateMgr().getGlobalFunctionMgr().getFunctions();
} else {
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
functions = db.getFunctions();
}
List<List<Comparable>> rowSet = Lists.newArrayList();
for (Function function : functions) {
List<Comparable> row = function.getInfo(showStmt.getIsVerbose());
if (showStmt.getWild() == null || showStmt.like(function.functionName())) {
if (showStmt.getIsGlobal()) {
if (!PrivilegeActions.checkAnyActionOnGlobalFunction(connectContext, function.getFunctionId())) {
continue;
}
} else if (!showStmt.getIsBuiltin()) {
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
if (!PrivilegeActions.checkAnyActionOnFunction(
connectContext.getCurrentUserIdentity(), connectContext.getCurrentRoleIds(),
db.getId(), function.getFunctionId())) {
continue;
}
}
rowSet.add(row);
}
}
ListComparator<List<Comparable>> comparator;
OrderByPair orderByPair = new OrderByPair(0, false);
comparator = new ListComparator<>(orderByPair);
rowSet.sort(comparator);
List<List<String>> resultRowSet = Lists.newArrayList();
Set<String> functionNameSet = new HashSet<>();
for (List<Comparable> row : rowSet) {
List<String> resultRow = Lists.newArrayList();
if (functionNameSet.contains(row.get(0).toString())) {
continue;
}
for (Comparable column : row) {
resultRow.add(column.toString());
}
resultRowSet.add(resultRow);
functionNameSet.add(resultRow.get(0));
}
ShowResultSetMetaData showMetaData = showStmt.getIsVerbose() ? showStmt.getMetaData() :
ShowResultSetMetaData.builder()
.addColumn(new Column("Function Name", ScalarType.createVarchar(256))).build();
resultSet = new ShowResultSet(showMetaData, resultRowSet);
}
private void handleShowProc() throws AnalysisException {
ShowProcStmt showProcStmt = (ShowProcStmt) stmt;
ShowResultSetMetaData metaData = showProcStmt.getMetaData();
ProcNodeInterface procNode = showProcStmt.getNode();
List<List<String>> finalRows = procNode.fetchResult().getRows();
resultSet = new ShowResultSet(metaData, finalRows);
}
private void handleShowDb() {
ShowDbStmt showDbStmt = (ShowDbStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<String> dbNames;
String catalogName;
if (showDbStmt.getCatalogName() == null) {
catalogName = connectContext.getCurrentCatalog();
} else {
catalogName = showDbStmt.getCatalogName();
}
dbNames = metadataMgr.listDbNames(catalogName);
PatternMatcher matcher = null;
if (showDbStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showDbStmt.getPattern(),
CaseSensibility.DATABASE.getCaseSensibility());
}
Set<String> dbNameSet = Sets.newTreeSet();
for (String dbName : dbNames) {
if (matcher != null && !matcher.match(dbName)) {
continue;
}
if (!PrivilegeActions.checkAnyActionOnOrInDb(connectContext, catalogName, dbName)) {
continue;
}
dbNameSet.add(dbName);
}
for (String dbName : dbNameSet) {
rows.add(Lists.newArrayList(dbName));
}
resultSet = new ShowResultSet(showDbStmt.getMetaData(), rows);
}
private void handleShowTable() throws AnalysisException {
ShowTableStmt showTableStmt = (ShowTableStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
String catalogName = showTableStmt.getCatalogName();
if (catalogName == null) {
catalogName = connectContext.getCurrentCatalog();
}
String dbName = showTableStmt.getDb();
Database db = metadataMgr.getDb(catalogName, dbName);
PatternMatcher matcher = null;
if (showTableStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showTableStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
Map<String, String> tableMap = Maps.newTreeMap();
MetaUtils.checkDbNullAndReport(db, showTableStmt.getDb());
if (CatalogMgr.isInternalCatalog(catalogName)) {
db.readLock();
try {
for (Table tbl : db.getTables()) {
if (matcher != null && !matcher.match(tbl.getName())) {
continue;
}
if (tbl.isView()) {
if (!PrivilegeActions.checkAnyActionOnView(
connectContext, db.getFullName(), tbl.getName())) {
continue;
}
} else if (tbl.isMaterializedView()) {
if (!PrivilegeActions.checkAnyActionOnMaterializedView(
connectContext, db.getFullName(), tbl.getName())) {
continue;
}
} else if (!PrivilegeActions.checkAnyActionOnTable(
connectContext, db.getFullName(), tbl.getName())) {
continue;
}
tableMap.put(tbl.getName(), tbl.getMysqlType());
}
} finally {
db.readUnlock();
}
} else {
List<String> tableNames = metadataMgr.listTableNames(catalogName, dbName);
for (String tableName : tableNames) {
if (matcher != null && !matcher.match(tableName)) {
continue;
}
Table table = metadataMgr.getTable(catalogName, dbName, tableName);
if (table == null) {
LOG.warn("table {}.{}.{} does not exist", catalogName, dbName, tableName);
continue;
}
if (table.isView()) {
if (!PrivilegeActions.checkAnyActionOnView(
connectContext, catalogName, db.getFullName(), table.getName())) {
continue;
}
} else if (!PrivilegeActions.checkAnyActionOnTable(connectContext,
catalogName, dbName, tableName)) {
continue;
}
tableMap.put(tableName, table.getMysqlType());
}
}
for (Map.Entry<String, String> entry : tableMap.entrySet()) {
if (showTableStmt.isVerbose()) {
rows.add(Lists.newArrayList(entry.getKey(), entry.getValue()));
} else {
rows.add(Lists.newArrayList(entry.getKey()));
}
}
resultSet = new ShowResultSet(showTableStmt.getMetaData(), rows);
}
private void handleShowTableStatus() {
ShowTableStatusStmt showStmt = (ShowTableStatusStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
ZoneId currentTimeZoneId = TimeUtils.getTimeZone().toZoneId();
if (db != null) {
db.readLock();
try {
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
for (Table table : db.getTables()) {
if (matcher != null && !matcher.match(table.getName())) {
continue;
}
if (!PrivilegeActions.checkAnyActionOnTable(connectContext, db.getFullName(), table.getName())) {
continue;
}
TTableInfo info = new TTableInfo();
if (table.isNativeTableOrMaterializedView() || table.getType() == Table.TableType.OLAP_EXTERNAL) {
InformationSchemaDataSource.genNormalTableInfo(table, info);
} else {
InformationSchemaDataSource.genDefaultConfigInfo(info);
}
List<String> row = Lists.newArrayList();
row.add(table.getName());
row.add(table.getEngine());
row.add(null);
row.add("");
row.add(String.valueOf(info.getTable_rows()));
row.add(String.valueOf(info.getAvg_row_length()));
row.add(String.valueOf(info.getData_length()));
row.add(null);
row.add(null);
row.add(null);
row.add(null);
row.add(DateUtils.formatTimeStampInSeconds(table.getCreateTime(), currentTimeZoneId));
row.add(DateUtils.formatTimeStampInSeconds(info.getUpdate_time(), currentTimeZoneId));
row.add(null);
row.add(InformationSchemaDataSource.UTF8_GENERAL_CI);
row.add(null);
row.add("");
row.add(table.getDisplayComment());
rows.add(row);
}
} finally {
db.readUnlock();
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowVariables() {
ShowVariablesStmt showStmt = (ShowVariablesStmt) stmt;
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.VARIABLES.getCaseSensibility());
}
List<List<String>> rows = VariableMgr.dump(showStmt.getType(), connectContext.getSessionVariable(), matcher);
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowCreateTable() throws AnalysisException {
ShowCreateTableStmt showStmt = (ShowCreateTableStmt) stmt;
TableName tbl = showStmt.getTbl();
String catalogName = tbl.getCatalog();
if (catalogName == null) {
catalogName = connectContext.getCurrentCatalog();
}
if (CatalogMgr.isInternalCatalog(catalogName)) {
showCreateInternalCatalogTable(showStmt);
} else {
showCreateExternalCatalogTable(tbl, catalogName);
}
}
private void showCreateExternalCatalogTable(TableName tbl, String catalogName) {
String dbName = tbl.getDb();
String tableName = tbl.getTbl();
MetadataMgr metadataMgr = GlobalStateMgr.getCurrentState().getMetadataMgr();
Database db = metadataMgr.getDb(catalogName, dbName);
if (db == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);
}
Table table = metadataMgr.getTable(catalogName, dbName, tableName);
if (table == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);
}
StringBuilder createTableSql = new StringBuilder();
createTableSql.append("CREATE TABLE ")
.append("`").append(tableName).append("`")
.append(" (\n");
List<String> columns = table.getFullSchema().stream().map(
this::toMysqlDDL).collect(Collectors.toList());
createTableSql.append(String.join(",\n", columns))
.append("\n)");
if (table.getType() != JDBC && !table.isUnPartitioned()) {
createTableSql.append("\nPARTITION BY ( ")
.append(String.join(", ", table.getPartitionColumnNames()))
.append(" )");
}
String location = null;
if (table.isHiveTable() || table.isHudiTable()) {
location = ((HiveMetaStoreTable) table).getTableLocation();
} else if (table.isIcebergTable()) {
location = ((IcebergTable) table).getTableLocation();
} else if (table.isDeltalakeTable()) {
location = ((DeltaLakeTable) table).getTableLocation();
}
if (!Strings.isNullOrEmpty(location)) {
createTableSql.append("\nPROPERTIES (\"location\" = \"").append(location).append("\");");
}
List<List<String>> rows = Lists.newArrayList();
rows.add(Lists.newArrayList(tableName, createTableSql.toString()));
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private String toMysqlDDL(Column column) {
StringBuilder sb = new StringBuilder();
sb.append(" `").append(column.getName()).append("` ");
sb.append(column.getType().toSql());
sb.append(" DEFAULT NULL");
if (!Strings.isNullOrEmpty(column.getComment())) {
sb.append(" COMMENT \"").append(column.getDisplayComment()).append("\"");
}
return sb.toString();
}
private void showCreateInternalCatalogTable(ShowCreateTableStmt showStmt) throws AnalysisException {
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDb());
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
List<List<String>> rows = Lists.newArrayList();
db.readLock();
try {
Table table = db.getTable(showStmt.getTable());
if (table == null) {
if (showStmt.getType() != ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTable());
} else {
for (Table tbl : db.getTables()) {
if (tbl.getType() == Table.TableType.OLAP) {
OlapTable olapTable = (OlapTable) tbl;
List<MaterializedIndexMeta> visibleMaterializedViews =
olapTable.getVisibleIndexMetas();
for (MaterializedIndexMeta mvMeta : visibleMaterializedViews) {
if (olapTable.getIndexNameById(mvMeta.getIndexId()).equals(showStmt.getTable())) {
if (mvMeta.getOriginStmt() == null) {
String mvName = olapTable.getIndexNameById(mvMeta.getIndexId());
rows.add(Lists.newArrayList(showStmt.getTable(), buildCreateMVSql(olapTable,
mvName, mvMeta), "utf8", "utf8_general_ci"));
} else {
rows.add(Lists.newArrayList(showStmt.getTable(), mvMeta.getOriginStmt(),
"utf8", "utf8_general_ci"));
}
resultSet = new ShowResultSet(ShowCreateTableStmt.getMaterializedViewMetaData(), rows);
return;
}
}
}
}
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTable());
}
}
List<String> createTableStmt = Lists.newArrayList();
GlobalStateMgr.getDdlStmt(table, createTableStmt, null, null, false, true /* hide password */);
if (createTableStmt.isEmpty()) {
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
return;
}
if (table instanceof View) {
if (showStmt.getType() == ShowCreateTableStmt.CreateTableType.MATERIALIZED_VIEW) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), "MATERIALIZED VIEW");
}
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0), "utf8", "utf8_general_ci"));
resultSet = new ShowResultSet(ShowCreateTableStmt.getViewMetaData(), rows);
} else if (table instanceof MaterializedView) {
if (showStmt.getType() == ShowCreateTableStmt.CreateTableType.VIEW) {
MaterializedView mv = (MaterializedView) table;
String sb = "CREATE VIEW `" + table.getName() + "` AS " + mv.getViewDefineSql();
rows.add(Lists.newArrayList(table.getName(), sb, "utf8", "utf8_general_ci"));
resultSet = new ShowResultSet(ShowCreateTableStmt.getViewMetaData(), rows);
} else {
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));
resultSet = new ShowResultSet(ShowCreateTableStmt.getMaterializedViewMetaData(), rows);
}
} else {
if (showStmt.getType() != ShowCreateTableStmt.CreateTableType.TABLE) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), showStmt.getType().getValue());
}
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
} finally {
db.readUnlock();
}
}
private void handleDescribe() throws AnalysisException {
DescribeStmt describeStmt = (DescribeStmt) stmt;
resultSet = new ShowResultSet(describeStmt.getMetaData(), describeStmt.getResultRows());
}
private void handleShowColumn() throws AnalysisException {
ShowColumnStmt showStmt = (ShowColumnStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
String catalogName = showStmt.getCatalog();
if (catalogName == null) {
catalogName = connectContext.getCurrentCatalog();
}
String dbName = showStmt.getDb();
Database db = metadataMgr.getDb(catalogName, dbName);
MetaUtils.checkDbNullAndReport(db, showStmt.getDb());
db.readLock();
try {
Table table = metadataMgr.getTable(catalogName, dbName, showStmt.getTable());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,
showStmt.getDb() + "." + showStmt.getTable());
}
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.COLUMN.getCaseSensibility());
}
List<Column> columns = table.getBaseSchema();
for (Column col : columns) {
if (matcher != null && !matcher.match(col.getName())) {
continue;
}
final String columnName = col.getName();
final String columnType = col.getType().canonicalName().toLowerCase();
final String isAllowNull = col.isAllowNull() ? "YES" : "NO";
final String isKey = col.isKey() ? "YES" : "NO";
final String defaultValue = col.getMetaDefaultValue(Lists.newArrayList());
final String aggType = col.getAggregationType() == null
|| col.isAggregationTypeImplicit() ? "" : col.getAggregationType().toSql();
if (showStmt.isVerbose()) {
rows.add(Lists.newArrayList(columnName,
columnType,
"",
isAllowNull,
isKey,
defaultValue,
aggType,
"",
col.getDisplayComment()));
} else {
rows.add(Lists.newArrayList(columnName,
columnType,
isAllowNull,
isKey,
defaultValue,
aggType));
}
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowIndex() throws AnalysisException {
ShowIndexStmt showStmt = (ShowIndexStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
db.readLock();
try {
Table table = db.getTable(showStmt.getTableName().getTbl());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR,
db.getOriginName() + "." + showStmt.getTableName().toString());
} else if (table instanceof OlapTable) {
List<Index> indexes = ((OlapTable) table).getIndexes();
for (Index index : indexes) {
rows.add(Lists.newArrayList(showStmt.getTableName().toString(), "", index.getIndexName(),
"", String.join(",", index.getColumns()), "", "", "", "",
"", index.getIndexType().name(), index.getComment()));
}
} else {
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleHelp() {
HelpStmt helpStmt = (HelpStmt) stmt;
String mark = helpStmt.getMask();
HelpModule module = HelpModule.getInstance();
HelpTopic topic = module.getTopic(mark);
if (topic == null) {
List<String> topics = module.listTopicByKeyword(mark);
if (topics.size() == 0) {
topic = null;
} else if (topics.size() == 1) {
topic = module.getTopic(topics.get(0));
} else {
List<List<String>> rows = Lists.newArrayList();
for (String str : topics) {
rows.add(Lists.newArrayList(str, "N"));
}
List<String> categories = module.listCategoryByName(mark);
for (String str : categories) {
rows.add(Lists.newArrayList(str, "Y"));
}
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);
return;
}
}
if (topic != null) {
resultSet = new ShowResultSet(helpStmt.getMetaData(), Lists.<List<String>>newArrayList(
Lists.newArrayList(topic.getName(), topic.getDescription(), topic.getExample())));
} else {
List<String> categories = module.listCategoryByName(mark);
if (categories.isEmpty()) {
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), EMPTY_SET);
} else if (categories.size() > 1) {
resultSet = new ShowResultSet(helpStmt.getCategoryMetaData(),
Lists.<List<String>>newArrayList(categories));
} else {
List<List<String>> rows = Lists.newArrayList();
List<String> topics = module.listTopicByCategory(categories.get(0));
for (String str : topics) {
rows.add(Lists.newArrayList(str, "N"));
}
List<String> subCategories = module.listCategoryByCategory(categories.get(0));
for (String str : subCategories) {
rows.add(Lists.newArrayList(str, "Y"));
}
resultSet = new ShowResultSet(helpStmt.getKeywordMetaData(), rows);
}
}
}
private void handleShowLoad() throws AnalysisException {
ShowLoadStmt showStmt = (ShowLoadStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
long dbId = -1;
if (showStmt.isAll()) {
dbId = -1;
} else {
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
dbId = db.getId();
}
Set<String> statesValue = showStmt.getStates() == null ? null : showStmt.getStates().stream()
.map(Enum::name)
.collect(Collectors.toSet());
List<List<Comparable>> loadInfos =
globalStateMgr.getLoadMgr().getLoadJobInfosByDb(dbId, showStmt.getLabelValue(),
showStmt.isAccurateMatch(),
statesValue);
List<OrderByPair> orderByPairs = showStmt.getOrderByPairs();
ListComparator<List<Comparable>> comparator = null;
if (orderByPairs != null) {
OrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];
comparator = new ListComparator<>(orderByPairs.toArray(orderByPairArr));
} else {
comparator = new ListComparator<>(0);
}
loadInfos.sort(comparator);
List<List<String>> rows = Lists.newArrayList();
for (List<Comparable> loadInfo : loadInfos) {
List<String> oneInfo = new ArrayList<>(loadInfo.size());
for (Comparable element : loadInfo) {
oneInfo.add(element.toString());
}
rows.add(oneInfo);
}
long limit = showStmt.getLimit();
long offset = showStmt.getOffset() == -1L ? 0 : showStmt.getOffset();
if (offset >= rows.size()) {
rows = Lists.newArrayList();
} else if (limit != -1L) {
if ((limit + offset) < rows.size()) {
rows = rows.subList((int) offset, (int) (limit + offset));
} else {
rows = rows.subList((int) offset, rows.size());
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowRoutineLoad() throws AnalysisException {
ShowRoutineLoadStmt showRoutineLoadStmt = (ShowRoutineLoadStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<RoutineLoadJob> routineLoadJobList;
try {
routineLoadJobList = GlobalStateMgr.getCurrentState().getRoutineLoadMgr()
.getJob(showRoutineLoadStmt.getDbFullName(),
showRoutineLoadStmt.getName(),
showRoutineLoadStmt.isIncludeHistory());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (routineLoadJobList != null) {
Iterator<RoutineLoadJob> iterator = routineLoadJobList.iterator();
while (iterator.hasNext()) {
RoutineLoadJob routineLoadJob = iterator.next();
try {
if (!PrivilegeActions.checkAnyActionOnTable(connectContext,
routineLoadJob.getDbFullName(),
routineLoadJob.getTableName())) {
iterator.remove();
}
} catch (MetaNotFoundException e) {
}
}
}
if (routineLoadJobList != null) {
RoutineLoadFunctionalExprProvider fProvider = showRoutineLoadStmt.getFunctionalExprProvider(this.connectContext);
rows = routineLoadJobList.parallelStream()
.filter(fProvider.getPredicateChain())
.sorted(fProvider.getOrderComparator())
.skip(fProvider.getSkipCount())
.limit(fProvider.getLimitCount())
.map(RoutineLoadJob::getShowInfo)
.collect(Collectors.toList());
}
if (!Strings.isNullOrEmpty(showRoutineLoadStmt.getName()) && rows.isEmpty()) {
throw new AnalysisException("There is no running job named " + showRoutineLoadStmt.getName()
+ " in db " + showRoutineLoadStmt.getDbFullName()
+ ". Include history? " + showRoutineLoadStmt.isIncludeHistory()
+ ", you can try `show all routine load job for job_name` if you want to list stopped and cancelled jobs");
}
resultSet = new ShowResultSet(showRoutineLoadStmt.getMetaData(), rows);
}
private void handleShowRoutineLoadTask() throws AnalysisException {
ShowRoutineLoadTaskStmt showRoutineLoadTaskStmt = (ShowRoutineLoadTaskStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
RoutineLoadJob routineLoadJob;
try {
routineLoadJob =
GlobalStateMgr.getCurrentState().getRoutineLoadMgr()
.getJob(showRoutineLoadTaskStmt.getDbFullName(),
showRoutineLoadTaskStmt.getJobName());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (routineLoadJob == null) {
throw new AnalysisException("The job named " + showRoutineLoadTaskStmt.getJobName() + "does not exists "
+ "or job state is stopped or cancelled");
}
String dbFullName = showRoutineLoadTaskStmt.getDbFullName();
String tableName;
try {
tableName = routineLoadJob.getTableName();
} catch (MetaNotFoundException e) {
throw new AnalysisException(
"The table metadata of job has been changed. The job will be cancelled automatically", e);
}
if (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbFullName, tableName)) {
resultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);
return;
}
rows.addAll(routineLoadJob.getTasksShowInfo());
resultSet = new ShowResultSet(showRoutineLoadTaskStmt.getMetaData(), rows);
}
private void handleShowStreamLoad() throws AnalysisException {
ShowStreamLoadStmt showStreamLoadStmt = (ShowStreamLoadStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<StreamLoadTask> streamLoadTaskList;
try {
streamLoadTaskList = GlobalStateMgr.getCurrentState().getStreamLoadMgr()
.getTask(showStreamLoadStmt.getDbFullName(),
showStreamLoadStmt.getName(),
showStreamLoadStmt.isIncludeHistory());
} catch (MetaNotFoundException e) {
LOG.warn(e.getMessage(), e);
throw new AnalysisException(e.getMessage());
}
if (streamLoadTaskList != null) {
StreamLoadFunctionalExprProvider fProvider = showStreamLoadStmt.getFunctionalExprProvider(this.connectContext);
rows = streamLoadTaskList.parallelStream()
.filter(fProvider.getPredicateChain())
.sorted(fProvider.getOrderComparator())
.skip(fProvider.getSkipCount())
.limit(fProvider.getLimitCount())
.map(StreamLoadTask::getShowInfo)
.collect(Collectors.toList());
}
if (!Strings.isNullOrEmpty(showStreamLoadStmt.getName()) && rows.isEmpty()) {
throw new AnalysisException("There is no label named " + showStreamLoadStmt.getName()
+ " in db " + showStreamLoadStmt.getDbFullName()
+ ". Include history? " + showStreamLoadStmt.isIncludeHistory());
}
resultSet = new ShowResultSet(showStreamLoadStmt.getMetaData(), rows);
}
private void handleShowUserProperty() throws AnalysisException {
ShowUserPropertyStmt showStmt = (ShowUserPropertyStmt) stmt;
resultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getRows(connectContext));
}
private void handleShowDelete() throws AnalysisException {
ShowDeleteStmt showStmt = (ShowDeleteStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
long dbId = db.getId();
DeleteMgr deleteHandler = globalStateMgr.getDeleteMgr();
List<List<Comparable>> deleteInfos = deleteHandler.getDeleteInfosByDb(dbId);
List<List<String>> rows = Lists.newArrayList();
for (List<Comparable> deleteInfo : deleteInfos) {
List<String> oneInfo = new ArrayList<>(deleteInfo.size());
for (Comparable element : deleteInfo) {
oneInfo.add(element.toString());
}
rows.add(oneInfo);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowAlter() throws AnalysisException {
ShowAlterStmt showStmt = (ShowAlterStmt) stmt;
ProcNodeInterface procNodeI = showStmt.getNode();
Preconditions.checkNotNull(procNodeI);
List<List<String>> rows;
if (procNodeI instanceof SchemaChangeProcDir) {
rows = ((SchemaChangeProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),
showStmt.getOrderPairs(), showStmt.getLimitElement()).getRows();
} else {
rows = procNodeI.fetchResult().getRows();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowCollation() {
ShowCollationStmt showStmt = (ShowCollationStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<String> row = Lists.newArrayList();
row.add("utf8_general_ci");
row.add("utf8");
row.add("33");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
row = Lists.newArrayList();
row.add("binary");
row.add("binary");
row.add("63");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
row = Lists.newArrayList();
row.add("gbk_chinese_ci");
row.add("gbk");
row.add("28");
row.add("Yes");
row.add("Yes");
row.add("1");
rows.add(row);
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowData() {
ShowDataStmt showStmt = (ShowDataStmt) stmt;
String dbName = showStmt.getDbName();
Database db = GlobalStateMgr.getCurrentState().getDb(dbName);
if (db == null) {
ErrorReport.reportSemanticException(ErrorCode.ERR_BAD_DB_ERROR, dbName);
}
db.readLock();
try {
String tableName = showStmt.getTableName();
List<List<String>> totalRows = showStmt.getResultRows();
if (tableName == null) {
long totalSize = 0;
long totalReplicaCount = 0;
List<Table> tables = db.getTables();
SortedSet<Table> sortedTables = new TreeSet<>(Comparator.comparing(Table::getName));
for (Table table : tables) {
if (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbName, table.getName())) {
continue;
}
sortedTables.add(table);
}
for (Table table : sortedTables) {
if (!table.isNativeTableOrMaterializedView()) {
continue;
}
OlapTable olapTable = (OlapTable) table;
long tableSize = olapTable.getDataSize();
long replicaCount = olapTable.getReplicaCount();
Pair<Double, String> tableSizePair = DebugUtil.getByteUint(tableSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(tableSizePair.first) + " "
+ tableSizePair.second;
List<String> row = Arrays.asList(table.getName(), readableSize, String.valueOf(replicaCount));
totalRows.add(row);
totalSize += tableSize;
totalReplicaCount += replicaCount;
}
Pair<Double, String> totalSizePair = DebugUtil.getByteUint(totalSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + " "
+ totalSizePair.second;
List<String> total = Arrays.asList("Total", readableSize, String.valueOf(totalReplicaCount));
totalRows.add(total);
long quota = db.getDataQuota();
long replicaQuota = db.getReplicaQuota();
Pair<Double, String> quotaPair = DebugUtil.getByteUint(quota);
String readableQuota = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(quotaPair.first) + " "
+ quotaPair.second;
List<String> quotaRow = Arrays.asList("Quota", readableQuota, String.valueOf(replicaQuota));
totalRows.add(quotaRow);
long left = Math.max(0, quota - totalSize);
long replicaCountLeft = Math.max(0, replicaQuota - totalReplicaCount);
Pair<Double, String> leftPair = DebugUtil.getByteUint(left);
String readableLeft = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(leftPair.first) + " "
+ leftPair.second;
List<String> leftRow = Arrays.asList("Left", readableLeft, String.valueOf(replicaCountLeft));
totalRows.add(leftRow);
} else {
if (!PrivilegeActions.checkAnyActionOnTable(connectContext, dbName, tableName)) {
ErrorReport.reportSemanticException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW DATA",
connectContext.getQualifiedUser(),
connectContext.getRemoteIP(),
tableName);
}
Table table = db.getTable(tableName);
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, tableName);
}
if (!table.isNativeTableOrMaterializedView()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, tableName);
}
OlapTable olapTable = (OlapTable) table;
int i = 0;
long totalSize = 0;
long totalReplicaCount = 0;
Map<String, Long> indexNames = olapTable.getIndexNameToId();
Map<String, Long> sortedIndexNames = new TreeMap<>(indexNames);
for (Long indexId : sortedIndexNames.values()) {
long indexSize = 0;
long indexReplicaCount = 0;
long indexRowCount = 0;
for (Partition partition : olapTable.getAllPartitions()) {
MaterializedIndex mIndex = partition.getIndex(indexId);
indexSize += mIndex.getDataSize();
indexReplicaCount += mIndex.getReplicaCount();
indexRowCount += mIndex.getRowCount();
}
Pair<Double, String> indexSizePair = DebugUtil.getByteUint(indexSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(indexSizePair.first) + " "
+ indexSizePair.second;
List<String> row = null;
if (i == 0) {
row = Arrays.asList(tableName,
olapTable.getIndexNameById(indexId),
readableSize, String.valueOf(indexReplicaCount),
String.valueOf(indexRowCount));
} else {
row = Arrays.asList("",
olapTable.getIndexNameById(indexId),
readableSize, String.valueOf(indexReplicaCount),
String.valueOf(indexRowCount));
}
totalSize += indexSize;
totalReplicaCount += indexReplicaCount;
totalRows.add(row);
i++;
}
Pair<Double, String> totalSizePair = DebugUtil.getByteUint(totalSize);
String readableSize = DebugUtil.DECIMAL_FORMAT_SCALE_3.format(totalSizePair.first) + " "
+ totalSizePair.second;
List<String> row = Arrays.asList("", "Total", readableSize, String.valueOf(totalReplicaCount), "");
totalRows.add(row);
}
} catch (AnalysisException e) {
throw new SemanticException(e.getMessage());
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showStmt.getMetaData(), showStmt.getResultRows());
}
private void handleShowPartitions() throws AnalysisException {
ShowPartitionsStmt showStmt = (ShowPartitionsStmt) stmt;
ProcNodeInterface procNodeI = showStmt.getNode();
Preconditions.checkNotNull(procNodeI);
List<List<String>> rows = ((PartitionsProcDir) procNodeI).fetchResultByFilter(showStmt.getFilterMap(),
showStmt.getOrderByPairs(), showStmt.getLimitElement()).getRows();
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowTablet() throws AnalysisException {
ShowTabletStmt showStmt = (ShowTabletStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
if (showStmt.isShowSingleTablet()) {
long tabletId = showStmt.getTabletId();
TabletInvertedIndex invertedIndex = GlobalStateMgr.getCurrentInvertedIndex();
TabletMeta tabletMeta = invertedIndex.getTabletMeta(tabletId);
Long dbId = tabletMeta != null ? tabletMeta.getDbId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String dbName = null;
Long tableId = tabletMeta != null ? tabletMeta.getTableId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String tableName = null;
Long partitionId = tabletMeta != null ? tabletMeta.getPartitionId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String partitionName = null;
Long indexId = tabletMeta != null ? tabletMeta.getIndexId() : TabletInvertedIndex.NOT_EXIST_VALUE;
String indexName = null;
Boolean isSync = true;
do {
Database db = globalStateMgr.getDb(dbId);
if (db == null) {
isSync = false;
break;
}
dbName = db.getFullName();
db.readLock();
try {
Table table = db.getTable(tableId);
if (!(table instanceof OlapTable)) {
isSync = false;
break;
}
tableName = table.getName();
OlapTable olapTable = (OlapTable) table;
Partition partition = olapTable.getPartition(partitionId);
if (partition == null) {
isSync = false;
break;
}
partitionName = partition.getName();
MaterializedIndex index = partition.getIndex(indexId);
if (index == null) {
isSync = false;
break;
}
indexName = olapTable.getIndexNameById(indexId);
if (table.isCloudNativeTableOrMaterializedView()) {
break;
}
LocalTablet tablet = (LocalTablet) index.getTablet(tabletId);
if (tablet == null) {
isSync = false;
break;
}
List<Replica> replicas = tablet.getImmutableReplicas();
for (Replica replica : replicas) {
Replica tmp = invertedIndex.getReplica(tabletId, replica.getBackendId());
if (tmp == null) {
isSync = false;
break;
}
if (tmp != replica) {
isSync = false;
break;
}
}
} finally {
db.readUnlock();
}
} while (false);
String detailCmd = String.format("SHOW PROC '/dbs/%d/%d/partitions/%d/%d/%d';",
dbId, tableId, partitionId, indexId, tabletId);
rows.add(Lists.newArrayList(dbName, tableName, partitionName, indexName,
dbId.toString(), tableId.toString(),
partitionId.toString(), indexId.toString(),
isSync.toString(), detailCmd));
} else {
Database db = globalStateMgr.getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
db.readLock();
try {
Table table = db.getTable(showStmt.getTableName());
if (table == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getTableName());
}
if (!table.isNativeTableOrMaterializedView()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_NOT_OLAP_TABLE, showStmt.getTableName());
}
OlapTable olapTable = (OlapTable) table;
long sizeLimit = -1;
if (showStmt.hasOffset() && showStmt.hasLimit()) {
sizeLimit = showStmt.getOffset() + showStmt.getLimit();
} else if (showStmt.hasLimit()) {
sizeLimit = showStmt.getLimit();
}
boolean stop = false;
Collection<Partition> partitions = new ArrayList<>();
if (showStmt.hasPartition()) {
PartitionNames partitionNames = showStmt.getPartitionNames();
for (String partName : partitionNames.getPartitionNames()) {
Partition partition = olapTable.getPartition(partName, partitionNames.isTemp());
if (partition == null) {
throw new AnalysisException("Unknown partition: " + partName);
}
partitions.add(partition);
}
} else {
partitions = olapTable.getPartitions();
}
List<List<Comparable>> tabletInfos = new ArrayList<>();
String indexName = showStmt.getIndexName();
long indexId = -1;
if (indexName != null) {
Long id = olapTable.getIndexIdByName(indexName);
if (id == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_TABLE_ERROR, showStmt.getIndexName());
}
indexId = id;
}
for (Partition partition : partitions) {
if (stop) {
break;
}
for (MaterializedIndex index : partition.getMaterializedIndices(IndexExtState.ALL)) {
if (indexId > -1 && index.getId() != indexId) {
continue;
}
if (olapTable.isCloudNativeTableOrMaterializedView()) {
LakeTabletsProcNode procNode = new LakeTabletsProcNode(db, olapTable, index);
tabletInfos.addAll(procNode.fetchComparableResult());
} else {
LocalTabletsProcDir procDir = new LocalTabletsProcDir(db, olapTable, index);
tabletInfos.addAll(procDir.fetchComparableResult(
showStmt.getVersion(), showStmt.getBackendId(), showStmt.getReplicaState()));
}
if (sizeLimit > -1 && CollectionUtils.isEmpty(showStmt.getOrderByPairs())
&& tabletInfos.size() >= sizeLimit) {
stop = true;
break;
}
}
}
List<OrderByPair> orderByPairs = showStmt.getOrderByPairs();
ListComparator<List<Comparable>> comparator;
if (orderByPairs != null) {
OrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];
comparator = new ListComparator<>(orderByPairs.toArray(orderByPairArr));
} else {
comparator = new ListComparator<>(0, 1);
}
tabletInfos.sort(comparator);
if (sizeLimit > -1 && tabletInfos.size() >= sizeLimit) {
tabletInfos = tabletInfos.subList((int) showStmt.getOffset(), (int) sizeLimit);
}
for (List<Comparable> tabletInfo : tabletInfos) {
List<String> oneTablet = new ArrayList<>(tabletInfo.size());
for (Comparable column : tabletInfo) {
oneTablet.add(column.toString());
}
rows.add(oneTablet);
}
} finally {
db.readUnlock();
}
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowBroker() {
ShowBrokerStmt showStmt = (ShowBrokerStmt) stmt;
List<List<String>> rowSet = GlobalStateMgr.getCurrentState().getBrokerMgr().getBrokersInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowResources() {
ShowResourcesStmt showStmt = (ShowResourcesStmt) stmt;
List<List<String>> rowSet = GlobalStateMgr.getCurrentState().getResourceMgr().getResourcesInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowExport() throws AnalysisException {
ShowExportStmt showExportStmt = (ShowExportStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
Database db = globalStateMgr.getDb(showExportStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showExportStmt.getDbName());
long dbId = db.getId();
ExportMgr exportMgr = globalStateMgr.getExportMgr();
Set<ExportJob.JobState> states = null;
ExportJob.JobState state = showExportStmt.getJobState();
if (state != null) {
states = Sets.newHashSet(state);
}
List<List<String>> infos = exportMgr.getExportJobInfosByIdOrState(
dbId, showExportStmt.getJobId(), states, showExportStmt.getQueryId(),
showExportStmt.getOrderByPairs(), showExportStmt.getLimit());
resultSet = new ShowResultSet(showExportStmt.getMetaData(), infos);
}
private void handleShowBackends() {
final ShowBackendsStmt showStmt = (ShowBackendsStmt) stmt;
List<List<String>> backendInfos = BackendsProcDir.getClusterBackendInfos();
resultSet = new ShowResultSet(showStmt.getMetaData(), backendInfos);
}
private void handleShowFrontends() {
final ShowFrontendsStmt showStmt = (ShowFrontendsStmt) stmt;
List<List<String>> infos = Lists.newArrayList();
FrontendsProcNode.getFrontendsInfo(GlobalStateMgr.getCurrentState(), infos);
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowRepositories() {
final ShowRepositoriesStmt showStmt = (ShowRepositoriesStmt) stmt;
List<List<String>> repoInfos = GlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getReposInfo();
resultSet = new ShowResultSet(showStmt.getMetaData(), repoInfos);
}
private void handleShowSnapshot() throws AnalysisException {
final ShowSnapshotStmt showStmt = (ShowSnapshotStmt) stmt;
Repository repo =
GlobalStateMgr.getCurrentState().getBackupHandler().getRepoMgr().getRepo(showStmt.getRepoName());
if (repo == null) {
throw new AnalysisException("Repository " + showStmt.getRepoName() + " does not exist");
}
List<List<String>> snapshotInfos = repo.getSnapshotInfos(showStmt.getSnapshotName(), showStmt.getTimestamp(),
showStmt.getSnapshotNames());
resultSet = new ShowResultSet(showStmt.getMetaData(), snapshotInfos);
}
private void handleShowBackup() {
ShowBackupStmt showStmt = (ShowBackupStmt) stmt;
Database filterDb = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());
List<List<String>> infos = Lists.newArrayList();
List<Database> dbs = Lists.newArrayList();
if (filterDb == null) {
for (Map.Entry<Long, Database> entry : GlobalStateMgr.getCurrentState().getIdToDb().entrySet()) {
dbs.add(entry.getValue());
}
} else {
dbs.add(filterDb);
}
for (Database db : dbs) {
AbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());
if (!(jobI instanceof BackupJob)) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
continue;
}
BackupJob backupJob = (BackupJob) jobI;
List<TableRef> tableRefs = backupJob.getTableRef();
AtomicBoolean privilegeDeny = new AtomicBoolean(false);
tableRefs.forEach(tableRef -> {
TableName tableName = tableRef.getName();
if (!PrivilegeActions.checkTableAction(connectContext, tableName.getDb(), tableName.getTbl(),
PrivilegeType.EXPORT)) {
privilegeDeny.set(true);
}
});
if (privilegeDeny.get()) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
return;
}
List<String> info = backupJob.getInfo();
infos.add(info);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowRestore() {
ShowRestoreStmt showStmt = (ShowRestoreStmt) stmt;
Database filterDb = GlobalStateMgr.getCurrentState().getDb(showStmt.getDbName());
List<List<String>> infos = Lists.newArrayList();
List<Database> dbs = Lists.newArrayList();
if (filterDb == null) {
for (Map.Entry<Long, Database> entry : GlobalStateMgr.getCurrentState().getIdToDb().entrySet()) {
dbs.add(entry.getValue());
}
} else {
dbs.add(filterDb);
}
for (Database db : dbs) {
AbstractJob jobI = GlobalStateMgr.getCurrentState().getBackupHandler().getJob(db.getId());
if (!(jobI instanceof RestoreJob)) {
resultSet = new ShowResultSet(showStmt.getMetaData(), EMPTY_SET);
continue;
}
RestoreJob restoreJob = (RestoreJob) jobI;
List<String> info = restoreJob.getInfo();
infos.add(info);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private String getCatalogNameById(long catalogId) throws MetaNotFoundException {
if (CatalogMgr.isInternalCatalog(catalogId)) {
return InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME;
}
CatalogMgr catalogMgr = GlobalStateMgr.getCurrentState().getCatalogMgr();
Optional<Catalog> catalogOptional = catalogMgr.getCatalogById(catalogId);
if (!catalogOptional.isPresent()) {
throw new MetaNotFoundException("cannot find catalog");
}
return catalogOptional.get().getName();
}
private String getCatalogNameFromPEntry(ObjectType objectType, PrivilegeCollection.PrivilegeEntry privilegeEntry)
throws MetaNotFoundException {
if (objectType.equals(ObjectType.CATALOG)) {
CatalogPEntryObject catalogPEntryObject =
(CatalogPEntryObject) privilegeEntry.getObject();
if (catalogPEntryObject.getId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {
return null;
} else {
return getCatalogNameById(catalogPEntryObject.getId());
}
} else if (objectType.equals(ObjectType.DATABASE)) {
DbPEntryObject dbPEntryObject = (DbPEntryObject) privilegeEntry.getObject();
if (dbPEntryObject.getCatalogId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {
return null;
}
return getCatalogNameById(dbPEntryObject.getCatalogId());
} else if (objectType.equals(ObjectType.TABLE)) {
TablePEntryObject tablePEntryObject = (TablePEntryObject) privilegeEntry.getObject();
if (tablePEntryObject.getCatalogId() == PrivilegeBuiltinConstants.ALL_CATALOGS_ID) {
return null;
}
return getCatalogNameById(tablePEntryObject.getCatalogId());
} else {
return InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME;
}
}
private List<List<String>> privilegeToRowString(AuthorizationMgr authorizationManager, GrantRevokeClause userOrRoleName,
Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>>
typeToPrivilegeEntryList) throws PrivilegeException {
List<List<String>> infos = new ArrayList<>();
for (Map.Entry<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntry
: typeToPrivilegeEntryList.entrySet()) {
for (PrivilegeCollection.PrivilegeEntry privilegeEntry : typeToPrivilegeEntry.getValue()) {
ObjectType objectType = typeToPrivilegeEntry.getKey();
String catalogName;
try {
catalogName = getCatalogNameFromPEntry(objectType, privilegeEntry);
} catch (MetaNotFoundException e) {
continue;
}
List<String> info = new ArrayList<>();
info.add(userOrRoleName.getRoleName() != null ?
userOrRoleName.getRoleName() : userOrRoleName.getUserIdentity().toString());
info.add(catalogName);
GrantPrivilegeStmt grantPrivilegeStmt = new GrantPrivilegeStmt(new ArrayList<>(), objectType.name(),
userOrRoleName, null, privilegeEntry.isWithGrantOption());
grantPrivilegeStmt.setObjectType(objectType);
ActionSet actionSet = privilegeEntry.getActionSet();
List<PrivilegeType> privList = authorizationManager.analyzeActionSet(objectType, actionSet);
grantPrivilegeStmt.setPrivilegeTypes(privList);
grantPrivilegeStmt.setObjectList(Lists.newArrayList(privilegeEntry.getObject()));
try {
info.add(AstToSQLBuilder.toSQL(grantPrivilegeStmt));
infos.add(info);
} catch (com.starrocks.sql.common.MetaNotFoundException e) {
}
}
}
return infos;
}
private void handleShowGrants() {
ShowGrantsStmt showStmt = (ShowGrantsStmt) stmt;
AuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();
try {
List<List<String>> infos = new ArrayList<>();
if (showStmt.getRole() != null) {
List<String> granteeRole = authorizationManager.getGranteeRoleDetailsForRole(showStmt.getRole());
if (granteeRole != null) {
infos.add(granteeRole);
}
Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntryList =
authorizationManager.getTypeToPrivilegeEntryListByRole(showStmt.getRole());
infos.addAll(privilegeToRowString(authorizationManager,
new GrantRevokeClause(null, showStmt.getRole()), typeToPrivilegeEntryList));
} else {
List<String> granteeRole = authorizationManager.getGranteeRoleDetailsForUser(showStmt.getUserIdent());
if (granteeRole != null) {
infos.add(granteeRole);
}
Map<ObjectType, List<PrivilegeCollection.PrivilegeEntry>> typeToPrivilegeEntryList =
authorizationManager.getTypeToPrivilegeEntryListByUser(showStmt.getUserIdent());
infos.addAll(privilegeToRowString(authorizationManager,
new GrantRevokeClause(showStmt.getUserIdent(), null), typeToPrivilegeEntryList));
}
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
} catch (PrivilegeException e) {
throw new SemanticException(e.getMessage());
}
}
private void handleShowRoles() {
ShowRolesStmt showStmt = (ShowRolesStmt) stmt;
List<List<String>> infos = new ArrayList<>();
AuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();
List<String> roles = authorizationManager.getAllRoles();
roles.forEach(e -> infos.add(Lists.newArrayList(e,
authorizationManager.isBuiltinRole(e) ? "true" : "false",
authorizationManager.getRoleComment(e))));
resultSet = new ShowResultSet(showStmt.getMetaData(), infos);
}
private void handleShowUser() {
List<List<String>> rowSet = Lists.newArrayList();
ShowUserStmt showUserStmt = (ShowUserStmt) stmt;
if (showUserStmt.isAll()) {
AuthorizationMgr authorizationManager = GlobalStateMgr.getCurrentState().getAuthorizationMgr();
List<String> users = authorizationManager.getAllUsers();
users.forEach(u -> rowSet.add(Lists.newArrayList(u)));
} else {
List<String> row = Lists.newArrayList();
row.add(connectContext.getCurrentUserIdentity().toString());
rowSet.add(row);
}
resultSet = new ShowResultSet(stmt.getMetaData(), rowSet);
}
private void handleAdminShowTabletStatus() throws AnalysisException {
AdminShowReplicaStatusStmt showStmt = (AdminShowReplicaStatusStmt) stmt;
List<List<String>> results;
try {
results = MetadataViewer.getTabletStatus(showStmt);
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleAdminShowTabletDistribution() throws AnalysisException {
AdminShowReplicaDistributionStmt showStmt = (AdminShowReplicaDistributionStmt) stmt;
List<List<String>> results;
try {
results = MetadataViewer.getTabletDistribution(showStmt);
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleAdminShowConfig() throws AnalysisException {
AdminShowConfigStmt showStmt = (AdminShowConfigStmt) stmt;
List<List<String>> results;
try {
PatternMatcher matcher = null;
if (showStmt.getPattern() != null) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.CONFIG.getCaseSensibility());
}
results = ConfigBase.getConfigInfo(matcher);
results.sort(Comparator.comparing(o -> o.get(0)));
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleShowSmallFiles() throws AnalysisException {
ShowSmallFilesStmt showStmt = (ShowSmallFilesStmt) stmt;
List<List<String>> results;
try {
results = GlobalStateMgr.getCurrentState().getSmallFileMgr().getInfo(showStmt.getDbName());
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
resultSet = new ShowResultSet(showStmt.getMetaData(), results);
}
private void handleShowDynamicPartition() {
ShowDynamicPartitionStmt showDynamicPartitionStmt = (ShowDynamicPartitionStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
Database db = connectContext.getGlobalStateMgr().getDb(showDynamicPartitionStmt.getDb());
if (db != null) {
db.readLock();
try {
for (Table tbl : db.getTables()) {
if (!(tbl instanceof OlapTable)) {
continue;
}
DynamicPartitionScheduler dynamicPartitionScheduler =
GlobalStateMgr.getCurrentState().getDynamicPartitionScheduler();
OlapTable olapTable = (OlapTable) tbl;
if (!olapTable.dynamicPartitionExists()) {
dynamicPartitionScheduler.removeRuntimeInfo(olapTable.getName());
continue;
}
if (!PrivilegeActions.checkAnyActionOnTable(ConnectContext.get(),
db.getFullName(), olapTable.getName())) {
continue;
}
DynamicPartitionProperty dynamicPartitionProperty =
olapTable.getTableProperty().getDynamicPartitionProperty();
String tableName = olapTable.getName();
int replicationNum = dynamicPartitionProperty.getReplicationNum();
replicationNum = (replicationNum == DynamicPartitionProperty.NOT_SET_REPLICATION_NUM) ?
olapTable.getDefaultReplicationNum() : RunMode.defaultReplicationNum();
rows.add(Lists.newArrayList(
tableName,
String.valueOf(dynamicPartitionProperty.getEnable()),
dynamicPartitionProperty.getTimeUnit().toUpperCase(),
String.valueOf(dynamicPartitionProperty.getStart()),
String.valueOf(dynamicPartitionProperty.getEnd()),
dynamicPartitionProperty.getPrefix(),
String.valueOf(dynamicPartitionProperty.getBuckets()),
String.valueOf(replicationNum),
dynamicPartitionProperty.getStartOfInfo(),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_UPDATE_TIME),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.LAST_SCHEDULER_TIME),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.DYNAMIC_PARTITION_STATE),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.CREATE_PARTITION_MSG),
dynamicPartitionScheduler
.getRuntimeInfo(tableName, DynamicPartitionScheduler.DROP_PARTITION_MSG)));
}
} finally {
db.readUnlock();
}
resultSet = new ShowResultSet(showDynamicPartitionStmt.getMetaData(), rows);
}
}
private void handleShowTransaction() throws AnalysisException {
ShowTransactionStmt showStmt = (ShowTransactionStmt) stmt;
Database db = connectContext.getGlobalStateMgr().getDb(showStmt.getDbName());
MetaUtils.checkDbNullAndReport(db, showStmt.getDbName());
long txnId = showStmt.getTxnId();
GlobalTransactionMgr transactionMgr = GlobalStateMgr.getCurrentGlobalTransactionMgr();
resultSet = new ShowResultSet(showStmt.getMetaData(), transactionMgr.getSingleTranInfo(db.getId(), txnId));
}
private void handleShowPlugins() {
ShowPluginsStmt pluginsStmt = (ShowPluginsStmt) stmt;
List<List<String>> rows = GlobalStateMgr.getCurrentPluginMgr().getPluginShowInfos();
resultSet = new ShowResultSet(pluginsStmt.getMetaData(), rows);
}
private void handleShowCharset() {
ShowCharsetStmt showCharsetStmt = (ShowCharsetStmt) stmt;
List<List<String>> rows = Lists.newArrayList();
List<String> row = Lists.newArrayList();
row.add("utf8");
row.add("UTF-8 Unicode");
row.add("utf8_general_ci");
row.add("3");
rows.add(row);
resultSet = new ShowResultSet(showCharsetStmt.getMetaData(), rows);
}
private void handleShowSqlBlackListStmt() {
ShowSqlBlackListStmt showStmt = (ShowSqlBlackListStmt) stmt;
List<List<String>> rows = new ArrayList<>();
for (Map.Entry<String, BlackListSql> entry : SqlBlackList.getInstance().sqlBlackListMap.entrySet()) {
List<String> oneSql = new ArrayList<>();
oneSql.add(String.valueOf(entry.getValue().id));
oneSql.add(entry.getKey());
rows.add(oneSql);
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleShowAnalyzeJob() {
List<AnalyzeJob> jobs = connectContext.getGlobalStateMgr().getAnalyzeMgr().getAllAnalyzeJobList();
List<List<String>> rows = Lists.newArrayList();
jobs.sort(Comparator.comparing(AnalyzeJob::getId));
for (AnalyzeJob job : jobs) {
try {
List<String> result = ShowAnalyzeJobStmt.showAnalyzeJobs(connectContext, job);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowAnalyzeStatus() {
List<AnalyzeStatus> statuses = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()
.getAnalyzeStatusMap().values());
List<List<String>> rows = Lists.newArrayList();
statuses.sort(Comparator.comparing(AnalyzeStatus::getId));
for (AnalyzeStatus status : statuses) {
try {
List<String> result = ShowAnalyzeStatusStmt.showAnalyzeStatus(connectContext, status);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowBasicStatsMeta() {
List<BasicStatsMeta> metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()
.getBasicStatsMetaMap().values());
List<List<String>> rows = Lists.newArrayList();
for (BasicStatsMeta meta : metas) {
try {
List<String> result = ShowBasicStatsMetaStmt.showBasicStatsMeta(connectContext, meta);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowHistogramStatsMeta() {
List<HistogramStatsMeta> metas = new ArrayList<>(connectContext.getGlobalStateMgr().getAnalyzeMgr()
.getHistogramStatsMetaMap().values());
List<List<String>> rows = Lists.newArrayList();
for (HistogramStatsMeta meta : metas) {
try {
List<String> result = ShowHistogramStatsMetaStmt.showHistogramStatsMeta(connectContext, meta);
if (result != null) {
rows.add(result);
}
} catch (MetaNotFoundException e) {
}
}
rows = doPredicate(stmt, stmt.getMetaData(), rows);
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowResourceGroup() throws AnalysisException {
ShowResourceGroupStmt showResourceGroupStmt = (ShowResourceGroupStmt) stmt;
List<List<String>> rows =
GlobalStateMgr.getCurrentState().getResourceGroupMgr().showResourceGroup(showResourceGroupStmt);
resultSet = new ShowResultSet(showResourceGroupStmt.getMetaData(), rows);
}
private void handleShowCatalogs() {
ShowCatalogsStmt showCatalogsStmt = (ShowCatalogsStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
CatalogMgr catalogMgr = globalStateMgr.getCatalogMgr();
List<List<String>> rowSet = catalogMgr.getCatalogsInfo().stream()
.filter(row -> {
if (!InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME.equals(row.get(0))) {
return PrivilegeActions.checkAnyActionOnOrInCatalog(
connectContext.getCurrentUserIdentity(),
connectContext.getCurrentRoleIds(), row.get(0));
}
return true;
}
)
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showCatalogsStmt.getMetaData(), rowSet);
}
private void handleShowWarehouses() {
ShowWarehousesStmt showStmt = (ShowWarehousesStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
WarehouseManager warehouseMgr = globalStateMgr.getWarehouseMgr();
List<List<String>> rowSet = warehouseMgr.getWarehousesInfo().stream()
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private void handleShowClusters() {
ShowClustersStmt showStmt = (ShowClustersStmt) stmt;
WarehouseManager warehouseMgr = GlobalStateMgr.getCurrentWarehouseMgr();
Warehouse warehouse = warehouseMgr.getWarehouse(showStmt.getWarehouseName());
List<List<String>> rowSet = warehouse.getClusterInfo().stream()
.sorted(Comparator.comparing(o -> o.get(0))).collect(Collectors.toList());
resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet);
}
private List<List<String>> doPredicate(ShowStmt showStmt,
ShowResultSetMetaData showResultSetMetaData,
List<List<String>> rows) {
Predicate predicate = showStmt.getPredicate();
if (predicate == null) {
return rows;
}
SlotRef slotRef = (SlotRef) predicate.getChild(0);
StringLiteral stringLiteral = (StringLiteral) predicate.getChild(1);
List<List<String>> returnRows = new ArrayList<>();
BinaryPredicate binaryPredicate = (BinaryPredicate) predicate;
int idx = showResultSetMetaData.getColumnIdx(slotRef.getColumnName());
if (binaryPredicate.getOp().isEquivalence()) {
for (List<String> row : rows) {
if (row.get(idx).equals(stringLiteral.getStringValue())) {
returnRows.add(row);
}
}
}
return returnRows;
}
private void handleShowCreateExternalCatalog() throws AnalysisException {
ShowCreateExternalCatalogStmt showStmt = (ShowCreateExternalCatalogStmt) stmt;
String catalogName = showStmt.getCatalogName();
List<List<String>> rows = Lists.newArrayList();
if (InternalCatalog.DEFAULT_INTERNAL_CATALOG_NAME.equalsIgnoreCase(catalogName)) {
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
return;
}
Catalog catalog = connectContext.getGlobalStateMgr().getCatalogMgr().getCatalogByName(catalogName);
if (catalog == null) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_CATALOG_ERROR, catalogName);
}
StringBuilder createCatalogSql = new StringBuilder();
createCatalogSql.append("CREATE EXTERNAL CATALOG ")
.append("`").append(catalogName).append("`")
.append("\n");
String comment = catalog.getComment();
if (comment != null) {
createCatalogSql.append("comment \"").append(catalog.getDisplayComment()).append("\"\n");
}
Map<String, String> clonedConfig = new HashMap<>(catalog.getConfig());
CloudCredentialUtil.maskCloudCredential(clonedConfig);
createCatalogSql.append("PROPERTIES (")
.append(new PrintableMap<>(clonedConfig, " = ", true, true))
.append("\n)");
rows.add(Lists.newArrayList(catalogName, createCatalogSql.toString()));
resultSet = new ShowResultSet(stmt.getMetaData(), rows);
}
private void handleShowStorageVolumes() throws DdlException {
ShowStorageVolumesStmt showStmt = (ShowStorageVolumesStmt) stmt;
GlobalStateMgr globalStateMgr = GlobalStateMgr.getCurrentState();
StorageVolumeMgr storageVolumeMgr = globalStateMgr.getStorageVolumeMgr();
List<String> storageVolumeNames = storageVolumeMgr.listStorageVolumeNames();
PatternMatcher matcher = null;
List<List<String>> rows = Lists.newArrayList();
if (!showStmt.getPattern().isEmpty()) {
matcher = PatternMatcher.createMysqlPattern(showStmt.getPattern(),
CaseSensibility.TABLE.getCaseSensibility());
}
PatternMatcher finalMatcher = matcher;
storageVolumeNames = storageVolumeNames.stream()
.filter(storageVolumeName -> finalMatcher == null || finalMatcher.match(storageVolumeName))
.filter(storageVolumeName -> PrivilegeActions.checkAnyActionOnStorageVolume(connectContext, storageVolumeName))
.collect(Collectors.toList());
for (String storageVolumeName : storageVolumeNames) {
rows.add(Lists.newArrayList(storageVolumeName));
}
resultSet = new ShowResultSet(showStmt.getMetaData(), rows);
}
private void handleDescStorageVolume() throws AnalysisException {
DescStorageVolumeStmt desc = (DescStorageVolumeStmt) stmt;
resultSet = new ShowResultSet(desc.getMetaData(), desc.getResultRows());
}
} |
Can we please use one term? Extra vs Additional. | private BType checkInvocationParam(BLangInvocation iExpr, AnalyzerData data) {
if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);
return symTable.semanticError;
}
BType invocableType = Types.getReferredType(iExpr.symbol.type);
if (invocableType.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);
List<BType> paramTypes = ((BInvokableType) invocableType).getParameterTypes();
List<BVarSymbol> incRecordParams = new ArrayList<>();
BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,
incRecordParams);
int parameterCountForPositionalArgs = paramTypes.size();
int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();
iExpr.requiredArgs = new ArrayList<>();
for (BVarSymbol symbol : invokableSymbol.params) {
if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||
Types.getReferredType(symbol.type).tag != TypeTags.RECORD) {
continue;
}
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(symbol.type)).fields;
if (fields.isEmpty()) {
continue;
}
for (String field : fields.keySet()) {
if (Types.getReferredType(fields.get(field).type).tag != TypeTags.NEVER) {
parameterCountForNamedArgs = parameterCountForNamedArgs - 1;
break;
}
}
}
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
boolean isIncRecordAllowExtraFields = incRecordParamAllowAdditionalFields != null;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
foundNamedArg = true;
BLangNamedArgsExpression namedArg = (BLangNamedArgsExpression) expr;
boolean isNamedArgForIncRecordParam =
isNamedArgForIncRecordParam(namedArg.name.value, incRecordParamAllowAdditionalFields);
if (i < parameterCountForNamedArgs) {
if (isNamedArgForIncRecordParam) {
isIncRecordAllowExtraFields = false;
}
iExpr.requiredArgs.add(expr);
} else {
if (isIncRecordAllowExtraFields && !isNamedArgForIncRecordParam) {
iExpr.requiredArgs.add(expr);
} else {
dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCountForPositionalArgs) {
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,
incRecordParamAllowAdditionalFields, data);
} | boolean isIncRecordAllowExtraFields = incRecordParamAllowAdditionalFields != null; | private BType checkInvocationParam(BLangInvocation iExpr, AnalyzerData data) {
if (Symbols.isFlagOn(iExpr.symbol.type.flags, Flags.ANY_FUNCTION)) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_POINTER_INVOCATION_WITH_TYPE);
return symTable.semanticError;
}
BType invocableType = Types.getReferredType(iExpr.symbol.type);
if (invocableType.tag != TypeTags.INVOKABLE) {
dlog.error(iExpr.pos, DiagnosticErrorCode.INVALID_FUNCTION_INVOCATION, iExpr.symbol.type);
return symTable.noType;
}
BInvokableSymbol invokableSymbol = ((BInvokableSymbol) iExpr.symbol);
List<BType> paramTypes = ((BInvokableType) invocableType).getParameterTypes();
List<BVarSymbol> incRecordParams = new ArrayList<>();
BVarSymbol incRecordParamAllowAdditionalFields = checkForIncRecordParamAllowAdditionalFields(invokableSymbol,
incRecordParams);
int parameterCountForPositionalArgs = paramTypes.size();
int parameterCountForNamedArgs = parameterCountForPositionalArgs + incRecordParams.size();
iExpr.requiredArgs = new ArrayList<>();
for (BVarSymbol symbol : invokableSymbol.params) {
if (!Symbols.isFlagOn(Flags.asMask(symbol.getFlags()), Flags.INCLUDED) ||
Types.getReferredType(symbol.type).tag != TypeTags.RECORD) {
continue;
}
LinkedHashMap<String, BField> fields =
((BRecordType) Types.getReferredType(symbol.type)).fields;
if (fields.isEmpty()) {
continue;
}
for (String field : fields.keySet()) {
if (Types.getReferredType(fields.get(field).type).tag != TypeTags.NEVER) {
parameterCountForNamedArgs = parameterCountForNamedArgs - 1;
break;
}
}
}
int i = 0;
BLangExpression vararg = null;
boolean foundNamedArg = false;
boolean incRecordAllowAdditionalFields = incRecordParamAllowAdditionalFields != null;
for (BLangExpression expr : iExpr.argExprs) {
switch (expr.getKind()) {
case NAMED_ARGS_EXPR:
foundNamedArg = true;
boolean namedArgForIncRecordParam =
isNamedArgForIncRecordParam(((BLangNamedArgsExpression) expr).name.value,
incRecordParamAllowAdditionalFields);
if (i < parameterCountForNamedArgs) {
if (namedArgForIncRecordParam) {
incRecordAllowAdditionalFields = false;
}
iExpr.requiredArgs.add(expr);
} else {
if (incRecordAllowAdditionalFields && !namedArgForIncRecordParam) {
iExpr.requiredArgs.add(expr);
} else {
checkTypeParamExpr(expr, new BNoType(TypeTags.NONE), iExpr.langLibInvocation, data);
dlog.error(expr.pos, DiagnosticErrorCode.TOO_MANY_ARGS_FUNC_CALL, iExpr.name.value);
}
}
i++;
break;
case REST_ARGS_EXPR:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.REST_ARG_DEFINED_AFTER_NAMED_ARG);
continue;
}
vararg = expr;
break;
default:
if (foundNamedArg) {
dlog.error(expr.pos, DiagnosticErrorCode.POSITIONAL_ARG_DEFINED_AFTER_NAMED_ARG);
}
if (i < parameterCountForPositionalArgs) {
if (Symbols.isFlagOn(invokableSymbol.params.get(i).flags, Flags.INCLUDED)) {
incRecordAllowAdditionalFields = false;
}
iExpr.requiredArgs.add(expr);
} else {
iExpr.restArgs.add(expr);
}
i++;
break;
}
}
return checkInvocationArgs(iExpr, paramTypes, vararg, incRecordParams,
incRecordParamAllowAdditionalFields, data);
} | class InferredTupleDetails {
List<BType> fixedMemberTypes = new ArrayList<>();
List<BType> restMemberTypes = new ArrayList<>();
} | class InferredTupleDetails {
List<BType> fixedMemberTypes = new ArrayList<>();
List<BType> restMemberTypes = new ArrayList<>();
} |
How about return both `slot id` and `expr`. I'm afraid that `slot id` may be used for some tracing debug. And please fix all related unit tests after you change here. | public String toSqlImpl() {
StringBuilder sb = new StringBuilder();
if (tblName != null) {
return tblName.toSql() + "." + label + sb.toString();
} else if (label != null) {
return label + sb.toString();
} else if (desc.getSourceExprs() != null) {
for (Expr expr : desc.getSourceExprs()) {
sb.append(expr.toSql());
sb.append(" ");
}
return sb.toString();
} else {
return "<slot " + Integer.toString(desc.getId().asInt()) + ">" + sb.toString();
}
} | return sb.toString(); | public String toSqlImpl() {
StringBuilder sb = new StringBuilder();
if (tblName != null) {
return tblName.toSql() + "." + label + sb.toString();
} else if (label != null) {
return label + sb.toString();
} else if (desc.getSourceExprs() != null) {
if (desc.getId().asInt() != 1) {
sb.append("<slot " + Integer.toString(desc.getId().asInt()) + ">");
}
for (Expr expr : desc.getSourceExprs()) {
sb.append(" ");
sb.append(expr.toSql());
}
return sb.toString();
} else {
return "<slot " + Integer.toString(desc.getId().asInt()) + ">" + sb.toString();
}
} | class SlotRef extends Expr {
private static final Logger LOG = LogManager.getLogger(SlotRef.class);
private TableName tblName;
private String col;
private String label;
protected SlotDescriptor desc;
private SlotRef() {
super();
}
public SlotRef(TableName tblName, String col) {
super();
this.tblName = tblName;
this.col = col;
this.label = "`" + col + "`";
}
public SlotRef(SlotDescriptor desc) {
super();
this.tblName = null;
this.col = null;
this.desc = desc;
this.type = desc.getType();
this.label = null;
if (this.type.equals(Type.CHAR)) {
this.type = Type.VARCHAR;
}
analysisDone();
}
protected SlotRef(SlotRef other) {
super(other);
tblName = other.tblName;
col = other.col;
label = other.label;
desc = other.desc;
}
@Override
public Expr clone() {
return new SlotRef(this);
}
public SlotDescriptor getDesc() {
Preconditions.checkState(isAnalyzed);
Preconditions.checkNotNull(desc);
return desc;
}
public SlotId getSlotId() {
Preconditions.checkState(isAnalyzed);
Preconditions.checkNotNull(desc);
return desc.getId();
}
public void setTblName(TableName name) {
this.tblName = name;
}
@Override
public void vectorizedAnalyze(Analyzer analyzer) {
computeOutputColumn(analyzer);
}
@Override
public void computeOutputColumn(Analyzer analyzer) {
outputColumn = desc.getSlotOffset();
LOG.debug("SlotRef: " + debugString() + " outputColumn: " + outputColumn);
}
@Override
public void analyzeImpl(Analyzer analyzer) throws AnalysisException {
desc = analyzer.registerColumnRef(tblName, col);
type = desc.getType();
if (this.type.equals(Type.CHAR)) {
this.type = Type.VARCHAR;
}
if (!type.isSupported()) {
throw new AnalysisException(
"Unsupported type '" + type.toString() + "' in '" + toSql() + "'.");
}
numDistinctValues = desc.getStats().getNumDistinctValues();
if (type.equals(Type.BOOLEAN)) {
selectivity = DEFAULT_SELECTIVITY;
}
}
@Override
public String debugString() {
MoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this);
helper.add("slotDesc", desc != null ? desc.debugString() : "null");
helper.add("col", col);
helper.add("label", label);
helper.add("tblName", tblName != null ? tblName.toSql() : "null");
return helper.toString();
}
@Override
@Override
public String toMySql() {
if (col != null) {
return col;
} else {
return "<slot " + Integer.toString(desc.getId().asInt()) + ">";
}
}
public TableName getTableName() {
return tblName;
}
@Override
public String toColumnLabel() {
return col;
}
@Override
protected void toThrift(TExprNode msg) {
msg.node_type = TExprNodeType.SLOT_REF;
msg.slot_ref = new TSlotRef(desc.getId().asInt(), desc.getParent().getId().asInt());
msg.setOutput_column(outputColumn);
}
@Override
public void markAgg() {
desc.setIsAgg(true);
}
@Override
public int hashCode() {
if (desc != null) {
return desc.getId().hashCode();
}
return Objects.hashCode((tblName == null ? "" : tblName.toSql() + "." + label).toLowerCase());
}
@Override
public boolean equals(Object obj) {
if (!super.equals(obj)) {
return false;
}
SlotRef other = (SlotRef) obj;
if (desc != null && other.desc != null) {
return desc.getId().equals(other.desc.getId());
}
if ((tblName == null) != (other.tblName == null)) {
return false;
}
if (tblName != null && !tblName.equals(other.tblName)) {
return false;
}
if ((col == null) != (other.col == null)) {
return false;
}
if (col != null && !col.toLowerCase().equals(other.col.toLowerCase())) {
return false;
}
return true;
}
@Override
protected boolean isConstantImpl() { return false; }
@Override
public boolean isBoundByTupleIds(List<TupleId> tids) {
Preconditions.checkState(desc != null);
for (TupleId tid: tids) {
if (tid.equals(desc.getParent().getId())) return true;
}
return false;
}
@Override
public boolean isBound(SlotId slotId) {
Preconditions.checkState(isAnalyzed);
return desc.getId().equals(slotId);
}
@Override
public void getIds(List<TupleId> tupleIds, List<SlotId> slotIds) {
Preconditions.checkState(type != Type.INVALID);
Preconditions.checkState(desc != null);
if (slotIds != null) {
slotIds.add(desc.getId());
}
if (tupleIds != null) {
tupleIds.add(desc.getParent().getId());
}
}
@Override
public void getTableNameToColumnNames(Map<String, Set<String>> tupleDescToColumnNames) {
Preconditions.checkState(desc != null);
if (!desc.isMaterialized()) {
return;
}
if (col == null) {
for (Expr expr : desc.getSourceExprs()) {
expr.getTableNameToColumnNames(tupleDescToColumnNames);
}
} else {
Table table = desc.getParent().getTable();
if (table == null) {
return;
}
String tableName = table.getName();
Set<String> columnNames = tupleDescToColumnNames.get(tableName);
if (columnNames == null) {
columnNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
tupleDescToColumnNames.put(tableName, columnNames);
}
columnNames.add(desc.getColumn().getName());
}
}
public String getColumnName() {
return col;
}
public void setCol(String col) {
this.col = col;
}
@Override
public boolean supportSerializable() {
return true;
}
@Override
public void write(DataOutput out) throws IOException {
if (tblName == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
tblName.write(out);
}
Text.writeString(out, col);
}
public void readFields(DataInput in) throws IOException {
if (in.readBoolean()) {
tblName = new TableName();
tblName.readFields(in);
}
col = Text.readString(in);
}
public static SlotRef read(DataInput in) throws IOException {
SlotRef slotRef = new SlotRef();
slotRef.readFields(in);
return slotRef;
}
} | class SlotRef extends Expr {
private static final Logger LOG = LogManager.getLogger(SlotRef.class);
private TableName tblName;
private String col;
private String label;
protected SlotDescriptor desc;
private SlotRef() {
super();
}
public SlotRef(TableName tblName, String col) {
super();
this.tblName = tblName;
this.col = col;
this.label = "`" + col + "`";
}
public SlotRef(SlotDescriptor desc) {
super();
this.tblName = null;
this.col = null;
this.desc = desc;
this.type = desc.getType();
this.label = null;
if (this.type.equals(Type.CHAR)) {
this.type = Type.VARCHAR;
}
analysisDone();
}
protected SlotRef(SlotRef other) {
super(other);
tblName = other.tblName;
col = other.col;
label = other.label;
desc = other.desc;
}
@Override
public Expr clone() {
return new SlotRef(this);
}
public SlotDescriptor getDesc() {
Preconditions.checkState(isAnalyzed);
Preconditions.checkNotNull(desc);
return desc;
}
public SlotId getSlotId() {
Preconditions.checkState(isAnalyzed);
Preconditions.checkNotNull(desc);
return desc.getId();
}
public void setTblName(TableName name) {
this.tblName = name;
}
@Override
public void vectorizedAnalyze(Analyzer analyzer) {
computeOutputColumn(analyzer);
}
@Override
public void computeOutputColumn(Analyzer analyzer) {
outputColumn = desc.getSlotOffset();
LOG.debug("SlotRef: " + debugString() + " outputColumn: " + outputColumn);
}
@Override
public void analyzeImpl(Analyzer analyzer) throws AnalysisException {
desc = analyzer.registerColumnRef(tblName, col);
type = desc.getType();
if (this.type.equals(Type.CHAR)) {
this.type = Type.VARCHAR;
}
if (!type.isSupported()) {
throw new AnalysisException(
"Unsupported type '" + type.toString() + "' in '" + toSql() + "'.");
}
numDistinctValues = desc.getStats().getNumDistinctValues();
if (type.equals(Type.BOOLEAN)) {
selectivity = DEFAULT_SELECTIVITY;
}
}
@Override
public String debugString() {
MoreObjects.ToStringHelper helper = MoreObjects.toStringHelper(this);
helper.add("slotDesc", desc != null ? desc.debugString() : "null");
helper.add("col", col);
helper.add("label", label);
helper.add("tblName", tblName != null ? tblName.toSql() : "null");
return helper.toString();
}
@Override
@Override
public String toMySql() {
if (col != null) {
return col;
} else {
return "<slot " + Integer.toString(desc.getId().asInt()) + ">";
}
}
public TableName getTableName() {
return tblName;
}
@Override
public String toColumnLabel() {
return col;
}
@Override
protected void toThrift(TExprNode msg) {
msg.node_type = TExprNodeType.SLOT_REF;
msg.slot_ref = new TSlotRef(desc.getId().asInt(), desc.getParent().getId().asInt());
msg.setOutput_column(outputColumn);
}
@Override
public void markAgg() {
desc.setIsAgg(true);
}
@Override
public int hashCode() {
if (desc != null) {
return desc.getId().hashCode();
}
return Objects.hashCode((tblName == null ? "" : tblName.toSql() + "." + label).toLowerCase());
}
@Override
public boolean equals(Object obj) {
if (!super.equals(obj)) {
return false;
}
SlotRef other = (SlotRef) obj;
if (desc != null && other.desc != null) {
return desc.getId().equals(other.desc.getId());
}
if ((tblName == null) != (other.tblName == null)) {
return false;
}
if (tblName != null && !tblName.equals(other.tblName)) {
return false;
}
if ((col == null) != (other.col == null)) {
return false;
}
if (col != null && !col.toLowerCase().equals(other.col.toLowerCase())) {
return false;
}
return true;
}
@Override
protected boolean isConstantImpl() { return false; }
@Override
public boolean isBoundByTupleIds(List<TupleId> tids) {
Preconditions.checkState(desc != null);
for (TupleId tid: tids) {
if (tid.equals(desc.getParent().getId())) return true;
}
return false;
}
@Override
public boolean isBound(SlotId slotId) {
Preconditions.checkState(isAnalyzed);
return desc.getId().equals(slotId);
}
@Override
public void getIds(List<TupleId> tupleIds, List<SlotId> slotIds) {
Preconditions.checkState(type != Type.INVALID);
Preconditions.checkState(desc != null);
if (slotIds != null) {
slotIds.add(desc.getId());
}
if (tupleIds != null) {
tupleIds.add(desc.getParent().getId());
}
}
@Override
public void getTableNameToColumnNames(Map<String, Set<String>> tupleDescToColumnNames) {
Preconditions.checkState(desc != null);
if (!desc.isMaterialized()) {
return;
}
if (col == null) {
for (Expr expr : desc.getSourceExprs()) {
expr.getTableNameToColumnNames(tupleDescToColumnNames);
}
} else {
Table table = desc.getParent().getTable();
if (table == null) {
return;
}
String tableName = table.getName();
Set<String> columnNames = tupleDescToColumnNames.get(tableName);
if (columnNames == null) {
columnNames = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);
tupleDescToColumnNames.put(tableName, columnNames);
}
columnNames.add(desc.getColumn().getName());
}
}
public String getColumnName() {
return col;
}
public void setCol(String col) {
this.col = col;
}
@Override
public boolean supportSerializable() {
return true;
}
@Override
public void write(DataOutput out) throws IOException {
if (tblName == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
tblName.write(out);
}
Text.writeString(out, col);
}
public void readFields(DataInput in) throws IOException {
if (in.readBoolean()) {
tblName = new TableName();
tblName.readFields(in);
}
col = Text.readString(in);
}
public static SlotRef read(DataInput in) throws IOException {
SlotRef slotRef = new SlotRef();
slotRef.readFields(in);
return slotRef;
}
} |
Don't we need to change the doc comment with respect to the change? As this method only sets compression header now. | public static void setCompressionHeaders(Context context, HTTPCarbonMessage outboundMessage) {
AnnAttachmentInfo configAnn = context.getServiceInfo().getAnnotationAttachmentInfo(
HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.ANN_NAME_CONFIG);
if (configAnn != null) {
AnnAttributeValue compressionEnabled = configAnn.getAttributeValue(
HttpConstants.ANN_CONFIG_ATTR_COMPRESSION_ENABLED);
if (compressionEnabled != null && !compressionEnabled.getBooleanValue()) {
outboundMessage.setHeader(HttpHeaderNames.CONTENT_ENCODING.toString(),
Constants.HTTP_TRANSFER_ENCODING_IDENTITY);
}
}
} | AnnAttributeValue compressionEnabled = configAnn.getAttributeValue( | public static void setCompressionHeaders(Context context, HTTPCarbonMessage outboundMessage) {
AnnAttachmentInfo configAnn = context.getServiceInfo().getAnnotationAttachmentInfo(
HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.ANN_NAME_CONFIG);
if (configAnn != null) {
AnnAttributeValue compressionEnabled = configAnn.getAttributeValue(
HttpConstants.ANN_CONFIG_ATTR_COMPRESSION_ENABLED);
if (compressionEnabled != null && !compressionEnabled.getBooleanValue()) {
outboundMessage.setHeader(HttpHeaderNames.CONTENT_ENCODING.toString(),
Constants.HTTP_TRANSFER_ENCODING_IDENTITY);
}
}
} | class HttpUtil {
private static final Logger log = LoggerFactory.getLogger(HttpUtil.class);
private static final String METHOD_ACCESSED = "isMethodAccessed";
private static final String IO_EXCEPTION_OCCURED = "I/O exception occurred";
public static BValue[] getProperty(Context context,
AbstractNativeFunction abstractNativeFunction, boolean isRequest) {
BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0);
HTTPCarbonMessage httpCarbonMessage = HttpUtil
.getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest));
String propertyName = abstractNativeFunction.getStringArgument(context, 0);
Object propertyValue = httpCarbonMessage.getProperty(propertyName);
if (propertyValue == null) {
return AbstractNativeFunction.VOID_RETURN;
}
if (propertyValue instanceof String) {
return abstractNativeFunction.getBValues(new BString((String) propertyValue));
} else {
throw new BallerinaException("Property value is of unknown type : " + propertyValue.getClass().getName());
}
}
public static BValue[] setProperty(Context context,
AbstractNativeFunction abstractNativeFunction, boolean isRequest) {
BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0);
String propertyName = abstractNativeFunction.getStringArgument(context, 0);
String propertyValue = abstractNativeFunction.getStringArgument(context, 1);
if (propertyName != null && propertyValue != null) {
HTTPCarbonMessage httpCarbonMessage = HttpUtil
.getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest));
httpCarbonMessage.setProperty(propertyName, propertyValue);
}
return AbstractNativeFunction.VOID_RETURN;
}
/**
* Set the given entity to request or response message.
*
* @param context Ballerina context
* @param abstractNativeFunction Reference to abstract native ballerina function
* @param isRequest boolean representing whether the message is a request or a response
* @return void return
*/
public static BValue[] setEntity(Context context, AbstractNativeFunction abstractNativeFunction,
boolean isRequest) {
BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, HTTP_MESSAGE_INDEX);
HTTPCarbonMessage httpCarbonMessage = HttpUtil
.getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest));
httpCarbonMessage.waitAndReleaseAllEntities();
BStruct entity = (BStruct) abstractNativeFunction.getRefArgument(context, ENTITY_INDEX);
String baseType = MimeUtil.getContentType(entity);
if (baseType == null) {
baseType = OCTET_STREAM;
}
HttpUtil.setHeaderToEntity(entity, CONTENT_TYPE, baseType);
httpMessageStruct.addNativeData(MESSAGE_ENTITY, entity);
httpMessageStruct.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, EntityBodyHandler
.checkEntityBodyAvailability(entity));
return AbstractNativeFunction.VOID_RETURN;
}
/**
* Get the entity from request or response.
*
* @param context Ballerina context
* @param abstractNativeFunction Reference to abstract native ballerina function
* @param isRequest boolean representing whether the message is a request or a response
* @param isEntityBodyRequired boolean representing whether the entity body is required
* @return Entity of the request or response
*/
public static BValue[] getEntity(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest,
boolean isEntityBodyRequired) {
BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, HTTP_MESSAGE_INDEX);
BStruct entity = (BStruct) httpMessageStruct.getNativeData(MESSAGE_ENTITY);
boolean isByteChannelAlreadySet = false;
if (httpMessageStruct.getNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET) != null) {
isByteChannelAlreadySet = (Boolean) httpMessageStruct.getNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET);
}
if (entity != null && isEntityBodyRequired && !isByteChannelAlreadySet) {
populateEntityBody(context, httpMessageStruct, entity, isRequest);
}
if (entity == null) {
entity = createNewEntity(context, httpMessageStruct);
}
return abstractNativeFunction.getBValues(entity);
}
/**
* Populate entity with the relevant body content.
*
* @param context Represent ballerina context
* @param httpMessageStruct Represent ballerina request/response
* @param entity Represent an entity
* @param isRequest boolean representing whether the message is a request or a response
*/
protected static void populateEntityBody(Context context, BStruct httpMessageStruct, BStruct entity,
boolean isRequest) {
HTTPCarbonMessage httpCarbonMessage = HttpUtil
.getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest));
HttpMessageDataStreamer httpMessageDataStreamer = new HttpMessageDataStreamer(httpCarbonMessage);
String contentType = httpCarbonMessage.getHeader(CONTENT_TYPE);
if (isRequest && MimeUtil.isNotNullAndEmpty(contentType) && contentType.startsWith(MULTIPART_AS_PRIMARY_TYPE)
&& context != null) {
MultipartDecoder.parseBody(context, entity, contentType, httpMessageDataStreamer.getInputStream());
} else {
int contentLength = NO_CONTENT_LENGTH_FOUND;
String lengthStr = httpCarbonMessage.getHeader(HttpConstants.HTTP_CONTENT_LENGTH);
try {
contentLength = lengthStr != null ? Integer.parseInt(lengthStr) : contentLength;
if (contentLength == NO_CONTENT_LENGTH_FOUND) {
contentLength = httpCarbonMessage.getFullMessageLength();
}
MimeUtil.setContentLength(entity, contentLength);
} catch (NumberFormatException e) {
throw new BallerinaException("Invalid content length");
}
EntityBodyHandler.setDiscreteMediaTypeBodyContent(entity, httpMessageDataStreamer
.getInputStream());
}
httpMessageStruct.addNativeData(MESSAGE_ENTITY, entity);
httpMessageStruct.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, true);
}
public static void closeMessageOutputStream(OutputStream messageOutputStream) {
try {
if (messageOutputStream != null) {
messageOutputStream.close();
}
} catch (IOException e) {
log.error("Couldn't close message output stream", e);
}
}
/**
* Helper method to start pending http server connectors.
*
* @throws BallerinaConnectorException
*/
public static void startPendingHttpConnectors(BallerinaHttpServerConnector httpServerConnector)
throws BallerinaConnectorException {
try {
HttpConnectionManager.getInstance().startPendingHTTPConnectors(httpServerConnector);
} catch (ServerConnectorException e) {
throw new BallerinaConnectorException(e);
}
}
public static void prepareOutboundResponse(Context context, HTTPCarbonMessage inboundRequestMsg,
HTTPCarbonMessage outboundResponseMsg, BStruct outboundResponseStruct) {
HttpUtil.checkEntityAvailability(context, outboundResponseStruct);
HttpUtil.addHTTPSessionAndCorsHeaders(context, inboundRequestMsg, outboundResponseMsg);
setCompressionHeaders(context, outboundResponseMsg);
HttpUtil.enrichOutboundMessage(outboundResponseMsg, outboundResponseStruct);
}
public static BStruct createSessionStruct(Context context, Session session) {
BStruct sessionStruct = ConnectorUtils
.createAndGetStruct(context, HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.SESSION);
sessionStruct.addNativeData(HttpConstants.HTTP_SESSION, session);
return sessionStruct;
}
public static String getSessionID(String cookieHeader) {
return Arrays.stream(cookieHeader.split(";"))
.filter(cookie -> cookie.trim().startsWith(HttpConstants.SESSION_ID))
.findFirst().get().trim().substring(HttpConstants.SESSION_ID.length());
}
public static void addHTTPSessionAndCorsHeaders(Context context, HTTPCarbonMessage requestMsg,
HTTPCarbonMessage responseMsg) {
Session session = (Session) requestMsg.getProperty(HttpConstants.HTTP_SESSION);
if (session != null) {
boolean isSecureRequest = false;
AnnAttachmentInfo configAnn = context.getServiceInfo().getAnnotationAttachmentInfo(
HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.ANN_NAME_CONFIG);
if (configAnn != null) {
AnnAttributeValue httpsPortAttrVal = configAnn
.getAttributeValue(HttpConstants.ANN_CONFIG_ATTR_HTTPS_PORT);
if (httpsPortAttrVal != null) {
Integer listenerPort = (Integer) requestMsg.getProperty(HttpConstants.LISTENER_PORT);
if (listenerPort != null && httpsPortAttrVal.getIntValue() == listenerPort) {
isSecureRequest = true;
}
}
}
session.generateSessionHeader(responseMsg, isSecureRequest);
}
if (requestMsg.getHeader(HttpConstants.ORIGIN) != null) {
CorsHeaderGenerator.process(requestMsg, responseMsg, true);
}
}
public static HttpResponseFuture sendOutboundResponse(HTTPCarbonMessage requestMsg,
HTTPCarbonMessage responseMsg) {
HttpResponseFuture responseFuture;
try {
responseFuture = requestMsg.respond(responseMsg);
} catch (org.wso2.transport.http.netty.contract.ServerConnectorException e) {
throw new BallerinaConnectorException("Error occurred during response", e);
}
return responseFuture;
}
public static void handleFailure(HTTPCarbonMessage requestMessage, BallerinaConnectorException ex) {
Object carbonStatusCode = requestMessage.getProperty(HttpConstants.HTTP_STATUS_CODE);
int statusCode = (carbonStatusCode == null) ? 500 : Integer.parseInt(carbonStatusCode.toString());
String errorMsg = ex.getMessage();
log.error(errorMsg);
ErrorHandlerUtils.printError(ex);
sendOutboundResponse(requestMessage, createErrorMessage(errorMsg, statusCode));
}
public static HTTPCarbonMessage createErrorMessage(String payload, int statusCode) {
HTTPCarbonMessage response = HttpUtil.createHttpCarbonMessage(false);
response.waitAndReleaseAllEntities();
if (payload != null) {
payload = lowerCaseTheFirstLetter(payload);
response.addHttpContent(new DefaultLastHttpContent(Unpooled.wrappedBuffer(payload.getBytes())));
} else {
response.addHttpContent(new DefaultLastHttpContent());
}
setHttpStatusCodes(statusCode, response);
return response;
}
private static String lowerCaseTheFirstLetter(String payload) {
if (!payload.isEmpty()) {
char[] characters = payload.toCharArray();
characters[0] = Character.toLowerCase(characters[0]);
payload = new String(characters);
}
return payload;
}
private static void setHttpStatusCodes(int statusCode, HTTPCarbonMessage response) {
HttpHeaders httpHeaders = response.getHeaders();
httpHeaders.set(org.wso2.transport.http.netty.common.Constants.HTTP_CONTENT_TYPE,
org.wso2.transport.http.netty.common.Constants.TEXT_PLAIN);
response.setProperty(org.wso2.transport.http.netty.common.Constants.HTTP_STATUS_CODE, statusCode);
}
public static BStruct getServerConnectorError(Context context, Throwable throwable) {
PackageInfo httpPackageInfo = context.getProgramFile()
.getPackageInfo(HttpConstants.PROTOCOL_PACKAGE_HTTP);
StructInfo errorStructInfo = httpPackageInfo.getStructInfo(HttpConstants.HTTP_CONNECTOR_ERROR);
BStruct httpConnectorError = new BStruct(errorStructInfo.getType());
if (throwable.getMessage() == null) {
httpConnectorError.setStringField(0, IO_EXCEPTION_OCCURED);
} else {
httpConnectorError.setStringField(0, throwable.getMessage());
}
return httpConnectorError;
}
public static HTTPCarbonMessage getCarbonMsg(BStruct struct, HTTPCarbonMessage defaultMsg) {
HTTPCarbonMessage httpCarbonMessage = (HTTPCarbonMessage) struct
.getNativeData(HttpConstants.TRANSPORT_MESSAGE);
if (httpCarbonMessage != null) {
return httpCarbonMessage;
}
addCarbonMsg(struct, defaultMsg);
return defaultMsg;
}
public static void addCarbonMsg(BStruct struct, HTTPCarbonMessage httpCarbonMessage) {
struct.addNativeData(HttpConstants.TRANSPORT_MESSAGE, httpCarbonMessage);
}
public static void populateInboundRequest(BStruct inboundRequestStruct, BStruct entity, BStruct mediaType,
HTTPCarbonMessage inboundRequestMsg) {
inboundRequestStruct.addNativeData(HttpConstants.TRANSPORT_MESSAGE, inboundRequestMsg);
inboundRequestStruct.addNativeData(HttpConstants.IN_REQUEST, true);
enrichWithInboundRequestInfo(inboundRequestStruct, inboundRequestMsg);
enrichWithInboundRequestHeaders(inboundRequestStruct, inboundRequestMsg);
populateEntity(entity, mediaType, inboundRequestMsg);
inboundRequestStruct.addNativeData(MESSAGE_ENTITY, entity);
inboundRequestStruct.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
}
private static void enrichWithInboundRequestHeaders(BStruct inboundRequestStruct,
HTTPCarbonMessage inboundRequestMsg) {
if (inboundRequestMsg.getHeader(HttpConstants.USER_AGENT_HEADER) != null) {
inboundRequestStruct.setStringField(HttpConstants.IN_REQUEST_USER_AGENT_INDEX,
inboundRequestMsg.getHeader(HttpConstants.USER_AGENT_HEADER));
inboundRequestMsg.removeHeader(HttpConstants.USER_AGENT_HEADER);
}
}
private static void enrichWithInboundRequestInfo(BStruct inboundRequestStruct,
HTTPCarbonMessage inboundRequestMsg) {
inboundRequestStruct.setStringField(HttpConstants.IN_REQUEST_RAW_PATH_INDEX,
(String) inboundRequestMsg.getProperty(HttpConstants.REQUEST_URL));
inboundRequestStruct.setStringField(HttpConstants.IN_REQUEST_METHOD_INDEX,
(String) inboundRequestMsg.getProperty(HttpConstants.HTTP_METHOD));
inboundRequestStruct.setStringField(HttpConstants.IN_REQUEST_VERSION_INDEX,
(String) inboundRequestMsg.getProperty(HttpConstants.HTTP_VERSION));
Map<String, String> resourceArgValues =
(Map<String, String>) inboundRequestMsg.getProperty(HttpConstants.RESOURCE_ARGS);
inboundRequestStruct.setStringField(HttpConstants.IN_REQUEST_EXTRA_PATH_INFO_INDEX,
resourceArgValues.get(HttpConstants.EXTRA_PATH_INFO));
}
public static void enrichConnectionInfo(BStruct connection, HTTPCarbonMessage cMsg) {
connection.addNativeData(HttpConstants.TRANSPORT_MESSAGE, cMsg);
connection.setStringField(HttpConstants.CONNECTION_HOST_INDEX,
((InetSocketAddress) cMsg.getProperty(HttpConstants.LOCAL_ADDRESS)).getHostName());
connection.setIntField(HttpConstants.CONNECTION_PORT_INDEX,
(Integer) cMsg.getProperty(HttpConstants.LISTENER_PORT));
}
/**
* Populate inbound response with headers and entity.
*
* @param inboundResponse Ballerina struct to represent response
* @param entity Entity of the response
* @param mediaType Content type of the response
* @param inboundResponseMsg Represent carbon message.
*/
public static void populateInboundResponse(BStruct inboundResponse, BStruct entity, BStruct mediaType,
HTTPCarbonMessage inboundResponseMsg) {
inboundResponse.addNativeData(HttpConstants.TRANSPORT_MESSAGE, inboundResponseMsg);
int statusCode = (Integer) inboundResponseMsg.getProperty(HttpConstants.HTTP_STATUS_CODE);
inboundResponse.setIntField(HttpConstants.IN_RESPONSE_STATUS_CODE_INDEX, statusCode);
inboundResponse.setStringField(HttpConstants.IN_RESPONSE_REASON_PHRASE_INDEX,
HttpResponseStatus.valueOf(statusCode).reasonPhrase());
if (inboundResponseMsg.getHeader(HttpConstants.SERVER_HEADER) != null) {
inboundResponse.setStringField(HttpConstants.IN_RESPONSE_SERVER_INDEX,
inboundResponseMsg.getHeader(HttpConstants.SERVER_HEADER));
inboundResponseMsg.removeHeader(HttpConstants.SERVER_HEADER);
}
populateEntity(entity, mediaType, inboundResponseMsg);
inboundResponse.addNativeData(MESSAGE_ENTITY, entity);
inboundResponse.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
}
/**
* Populate entity with headers, content-type and content-length.
*
* @param entity Represent an entity struct
* @param mediaType mediaType struct that needs to be set to the entity
* @param cMsg Represent a carbon message
*/
private static void populateEntity(BStruct entity, BStruct mediaType, HTTPCarbonMessage cMsg) {
String contentType = cMsg.getHeader(CONTENT_TYPE);
MimeUtil.setContentType(mediaType, entity, contentType);
int contentLength = -1;
String lengthStr = cMsg.getHeader(HttpConstants.HTTP_CONTENT_LENGTH);
try {
contentLength = lengthStr != null ? Integer.parseInt(lengthStr) : contentLength;
MimeUtil.setContentLength(entity, contentLength);
} catch (NumberFormatException e) {
throw new BallerinaException("Invalid content length");
}
entity.setRefField(ENTITY_HEADERS_INDEX, prepareEntityHeaderMap(cMsg.getHeaders(), new BMap<>()));
}
private static BMap<String, BValue> prepareEntityHeaderMap(HttpHeaders headers, BMap<String, BValue> headerMap) {
for (Map.Entry<String, String> header : headers.entries()) {
if (headerMap.keySet().contains(header.getKey())) {
BStringArray valueArray = (BStringArray) headerMap.get(header.getKey());
valueArray.add(valueArray.size(), header.getValue());
} else {
BStringArray valueArray = new BStringArray(new String[]{header.getValue()});
headerMap.put(header.getKey(), valueArray);
}
}
return headerMap;
}
/**
* Set headers and properties of request/response struct to the outbound transport message.
*
* @param outboundMsg transport Http carbon message.
* @param outboundStruct req/resp struct.
*/
public static void enrichOutboundMessage(HTTPCarbonMessage outboundMsg, BStruct outboundStruct) {
setHeadersToTransportMessage(outboundMsg, outboundStruct);
setPropertiesToTransportMessage(outboundMsg, outboundStruct);
}
@SuppressWarnings("unchecked")
private static void setHeadersToTransportMessage(HTTPCarbonMessage outboundMsg, BStruct struct) {
BStruct entityStruct = (BStruct) struct.getNativeData(MESSAGE_ENTITY);
HttpHeaders transportHeaders = outboundMsg.getHeaders();
if (isInboundRequestStruct(struct) || isInboundResponseStruct(struct)) {
addRemovedPropertiesBackToHeadersMap(struct, transportHeaders);
return;
}
BMap<String, BValue> entityHeaders = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);
if (entityHeaders == null) {
return;
}
Set<String> keys = entityHeaders.keySet();
for (String key : keys) {
BStringArray headerValues = (BStringArray) entityHeaders.get(key);
for (int i = 0; i < headerValues.size(); i++) {
transportHeaders.add(key, headerValues.get(i));
}
}
}
private static boolean isInboundRequestStruct(BStruct struct) {
return struct.getType().getName().equals(HttpConstants.IN_REQUEST);
}
private static boolean isInboundResponseStruct(BStruct struct) {
return struct.getType().getName().equals(HttpConstants.IN_RESPONSE);
}
private static boolean isOutboundResponseStruct(BStruct struct) {
return struct.getType().getName().equals(HttpConstants.OUT_RESPONSE);
}
private static void addRemovedPropertiesBackToHeadersMap(BStruct struct, HttpHeaders transportHeaders) {
if (isInboundRequestStruct(struct)) {
if (!struct.getStringField(HttpConstants.IN_REQUEST_USER_AGENT_INDEX).isEmpty()) {
transportHeaders.add(HttpConstants.USER_AGENT_HEADER,
struct.getStringField(HttpConstants.IN_REQUEST_USER_AGENT_INDEX));
}
} else {
if (!struct.getStringField(HttpConstants.IN_RESPONSE_SERVER_INDEX).isEmpty()) {
transportHeaders.add(HttpConstants.SERVER_HEADER,
struct.getStringField(HttpConstants.IN_RESPONSE_SERVER_INDEX));
}
}
}
private static void setPropertiesToTransportMessage(HTTPCarbonMessage outboundResponseMsg, BStruct struct) {
if (isOutboundResponseStruct(struct)) {
if (struct.getIntField(HttpConstants.OUT_RESPONSE_STATUS_CODE_INDEX) != 0) {
outboundResponseMsg.setProperty(HttpConstants.HTTP_STATUS_CODE, getIntValue(
struct.getIntField(HttpConstants.OUT_RESPONSE_STATUS_CODE_INDEX)));
}
if (!struct.getStringField(HttpConstants.OUT_RESPONSE_REASON_PHRASE_INDEX).isEmpty()) {
outboundResponseMsg.setProperty(HttpConstants.HTTP_REASON_PHRASE,
struct.getStringField(HttpConstants.OUT_RESPONSE_REASON_PHRASE_INDEX));
}
}
}
private static void setHeaderToEntity(BStruct struct, String key, String value) {
BMap<String, BValue> headerMap = struct.getRefField(ENTITY_HEADERS_INDEX) != null ?
(BMap) struct.getRefField(ENTITY_HEADERS_INDEX) : new BMap<>();
struct.setRefField(ENTITY_HEADERS_INDEX,
prepareEntityHeaderMap(new DefaultHttpHeaders().add(key, value), headerMap));
}
/**
* Check the existence of entity. Set new entity of not present.
*
* @param context ballerina context.
* @param struct request/response struct.
*/
public static void checkEntityAvailability(Context context, BStruct struct) {
BStruct entity = (BStruct) struct.getNativeData(MESSAGE_ENTITY);
if (entity == null) {
HttpUtil.createNewEntity(context, struct);
}
}
/**
* Set new entity to in/out request/response struct.
*
* @param context ballerina context.
* @param struct request/response struct.
*/
public static BStruct createNewEntity(Context context, BStruct struct) {
BStruct entity = ConnectorUtils.createAndGetStruct(context
, org.ballerinalang.mime.util.Constants.PROTOCOL_PACKAGE_MIME
, org.ballerinalang.mime.util.Constants.ENTITY);
entity.setRefField(ENTITY_HEADERS_INDEX, new BMap<>());
struct.addNativeData(MESSAGE_ENTITY, entity);
struct.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
return entity;
}
/**
* Set connection Keep-Alive and content-encoding headers to transport message.
*
* @param context ballerina context.
* @param outboundMessage transport message.
*/
/**
* Extract the listener configurations from the config annotation.
*
* @param annotationInfo configuration annotation info.
* @return the set of {@link ListenerConfiguration} which were extracted from config annotation.
*/
public static Set<ListenerConfiguration> getDefaultOrDynamicListenerConfig(Annotation annotationInfo) {
if (annotationInfo == null) {
return HttpConnectionManager.getInstance().getDefaultListenerConfiugrationSet();
}
Set<ListenerConfiguration> listenerConfSet = new HashSet<>();
extractBasicConfig(annotationInfo, listenerConfSet);
extractHttpsConfig(annotationInfo, listenerConfSet);
if (listenerConfSet.isEmpty()) {
listenerConfSet = HttpConnectionManager.getInstance().getDefaultListenerConfiugrationSet();
}
return listenerConfSet;
}
private static String getListenerInterface(String host, int port) {
host = host != null ? host : "0.0.0.0";
return host + ":" + port;
}
private static void extractBasicConfig(Annotation configInfo, Set<ListenerConfiguration> listenerConfSet) {
AnnAttrValue hostAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_HOST);
AnnAttrValue portAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_PORT);
AnnAttrValue keepAliveAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_KEEP_ALIVE);
AnnAttrValue transferEncoding = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_TRANSFER_ENCODING);
AnnAttrValue chunking = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_CHUNKING);
AnnAttrValue maxUriLength = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_MAXIMUM_URL_LENGTH);
AnnAttrValue maxHeaderSize = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_MAXIMUM_HEADER_SIZE);
AnnAttrValue maxEntityBodySize = configInfo.getAnnAttrValue(
HttpConstants.ANN_CONFIG_ATTR_MAXIMUM_ENTITY_BODY_SIZE);
ListenerConfiguration listenerConfiguration = new ListenerConfiguration();
if (portAttrVal != null && portAttrVal.getIntValue() > 0) {
listenerConfiguration.setPort(Math.toIntExact(portAttrVal.getIntValue()));
listenerConfiguration.setScheme(HttpConstants.PROTOCOL_HTTP);
if (hostAttrVal != null && hostAttrVal.getStringValue() != null) {
listenerConfiguration.setHost(hostAttrVal.getStringValue());
} else {
listenerConfiguration.setHost(HttpConstants.HTTP_DEFAULT_HOST);
}
if (transferEncoding != null && !HttpConstants.ANN_CONFIG_ATTR_CHUNKING
.equalsIgnoreCase(transferEncoding.getStringValue())) {
throw new BallerinaConnectorException("Unsupported configuration found for Transfer-Encoding : "
+ transferEncoding.getStringValue());
}
if (chunking != null) {
ChunkKeepAliveConfig chunkConfig = getKeepAliveChunkConfig(chunking.getStringValue());
listenerConfiguration.setChunkConfig(chunkConfig);
} else {
listenerConfiguration.setChunkConfig(ChunkKeepAliveConfig.AUTO);
}
if (keepAliveAttrVal != null) {
ChunkKeepAliveConfig keepAliveConfig = getKeepAliveChunkConfig(keepAliveAttrVal.getStringValue());
listenerConfiguration.setKeepAliveConfig(keepAliveConfig);
} else {
listenerConfiguration.setKeepAliveConfig(ChunkKeepAliveConfig.AUTO);
}
RequestSizeValidationConfig requestSizeValidationConfig =
listenerConfiguration.getRequestSizeValidationConfig();
if (maxUriLength != null) {
if (maxUriLength.getIntValue() > 0) {
requestSizeValidationConfig.setMaxUriLength(Math.toIntExact(maxUriLength.getIntValue()));
} else {
throw new BallerinaConnectorException("Invalid configuration found for maxUriLength : "
+ maxUriLength.getIntValue());
}
}
if (maxHeaderSize != null) {
if (maxHeaderSize.getIntValue() > 0) {
requestSizeValidationConfig.setMaxHeaderSize(Math.toIntExact(maxHeaderSize.getIntValue()));
} else {
throw new BallerinaConnectorException("Invalid configuration found for maxHeaderSize : "
+ maxHeaderSize.getIntValue());
}
}
if (maxEntityBodySize != null) {
if (maxEntityBodySize.getIntValue() > 0) {
requestSizeValidationConfig.setMaxEntityBodySize(Math.toIntExact(maxEntityBodySize.getIntValue()));
} else {
throw new BallerinaConnectorException("Invalid configuration found for maxEntityBodySize : "
+ maxEntityBodySize.getIntValue());
}
}
listenerConfiguration
.setId(getListenerInterface(listenerConfiguration.getHost(), listenerConfiguration.getPort()));
listenerConfSet.add(listenerConfiguration);
}
}
public static ChunkKeepAliveConfig getKeepAliveChunkConfig(String keepAliveChunkConfig) {
ChunkKeepAliveConfig chunkConfig;
if (HttpConstants.CHUNKING_KEEP_ALIVE_AUTO.equalsIgnoreCase(keepAliveChunkConfig)) {
chunkConfig = ChunkKeepAliveConfig.AUTO;
} else if (HttpConstants.CHUNKING_KEEP_ALIVE_ALWAYS.equalsIgnoreCase(keepAliveChunkConfig)) {
chunkConfig = ChunkKeepAliveConfig.ALWAYS;
} else if (HttpConstants.CHUNKING_KEEP_ALIVE_NEVER.equalsIgnoreCase(keepAliveChunkConfig)) {
chunkConfig = ChunkKeepAliveConfig.NEVER;
} else {
throw new BallerinaConnectorException(
"Invalid configuration found for Transfer-Encoding : " + keepAliveChunkConfig);
}
return chunkConfig;
}
private static void extractHttpsConfig(Annotation configInfo, Set<ListenerConfiguration> listenerConfSet) {
AnnAttrValue httpsPortAttrVal;
if (configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_HTTPS_PORT) == null) {
httpsPortAttrVal =
configInfo.getAnnAttrValue(WebSocketConstants.ANN_CONFIG_ATTR_WSS_PORT);
} else {
httpsPortAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_HTTPS_PORT);
}
AnnAttrValue keyStoreFileAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_KEY_STORE_FILE);
AnnAttrValue keyStorePasswordAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_KEY_STORE_PASS);
AnnAttrValue certPasswordAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_CERT_PASS);
AnnAttrValue trustStoreFileAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_TRUST_STORE_FILE);
AnnAttrValue trustStorePasswordAttrVal = configInfo.getAnnAttrValue(
HttpConstants.ANN_CONFIG_ATTR_TRUST_STORE_PASS);
AnnAttrValue sslVerifyClientAttrVal = configInfo.getAnnAttrValue(
HttpConstants.ANN_CONFIG_ATTR_SSL_VERIFY_CLIENT);
AnnAttrValue sslEnabledProtocolsAttrVal = configInfo
.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS);
AnnAttrValue ciphersAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_CIPHERS);
AnnAttrValue sslProtocolAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_SSL_PROTOCOL);
AnnAttrValue hostAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_HOST);
AnnAttrValue certificateValidationEnabledAttrValue = configInfo
.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_VALIDATE_CERT_ENABLED);
AnnAttrValue cacheSizeAttrValue = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_CACHE_SIZE);
AnnAttrValue cacheValidityPeriodAttrValue = configInfo
.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_CACHE_VALIDITY_PERIOD);
ListenerConfiguration listenerConfiguration = new ListenerConfiguration();
if (httpsPortAttrVal != null && httpsPortAttrVal.getIntValue() > 0) {
listenerConfiguration.setPort(Math.toIntExact(httpsPortAttrVal.getIntValue()));
listenerConfiguration.setScheme(HttpConstants.PROTOCOL_HTTPS);
if (hostAttrVal != null && hostAttrVal.getStringValue() != null) {
listenerConfiguration.setHost(hostAttrVal.getStringValue());
} else {
listenerConfiguration.setHost(HttpConstants.HTTP_DEFAULT_HOST);
}
if (keyStoreFileAttrVal == null || keyStoreFileAttrVal.getStringValue() == null) {
throw new BallerinaConnectorException("Keystore location must be provided for secure connection");
}
if (keyStorePasswordAttrVal == null || keyStorePasswordAttrVal.getStringValue() == null) {
throw new BallerinaConnectorException("Keystore password value must be provided for secure connection");
}
if (certPasswordAttrVal == null || certPasswordAttrVal.getStringValue() == null) {
throw new BallerinaConnectorException(
"Certificate password value must be provided for secure connection");
}
if ((trustStoreFileAttrVal == null || trustStoreFileAttrVal.getStringValue() == null)
&& sslVerifyClientAttrVal != null) {
throw new BallerinaException("Truststore location must be provided to enable Mutual SSL");
}
if ((trustStorePasswordAttrVal == null || trustStorePasswordAttrVal.getStringValue() == null)
&& sslVerifyClientAttrVal != null) {
throw new BallerinaException("Truststore password value must be provided to enable Mutual SSL");
}
listenerConfiguration.setTLSStoreType(HttpConstants.PKCS_STORE_TYPE);
listenerConfiguration.setKeyStoreFile(keyStoreFileAttrVal.getStringValue());
listenerConfiguration.setKeyStorePass(keyStorePasswordAttrVal.getStringValue());
listenerConfiguration.setCertPass(certPasswordAttrVal.getStringValue());
if (sslVerifyClientAttrVal != null) {
listenerConfiguration.setVerifyClient(sslVerifyClientAttrVal.getStringValue());
}
if (trustStoreFileAttrVal != null) {
listenerConfiguration.setTrustStoreFile(trustStoreFileAttrVal.getStringValue());
}
if (trustStorePasswordAttrVal != null) {
listenerConfiguration.setTrustStorePass(trustStorePasswordAttrVal.getStringValue());
}
if (certificateValidationEnabledAttrValue != null && certificateValidationEnabledAttrValue
.getBooleanValue()) {
listenerConfiguration.setValidateCertEnabled(certificateValidationEnabledAttrValue.getBooleanValue());
if (cacheSizeAttrValue != null) {
listenerConfiguration.setCacheSize((int) cacheSizeAttrValue.getIntValue());
}
if (cacheValidityPeriodAttrValue != null) {
listenerConfiguration.setCacheValidityPeriod((int) cacheValidityPeriodAttrValue.getIntValue());
}
}
List<Parameter> serverParams = new ArrayList<>();
Parameter serverCiphers;
if (sslEnabledProtocolsAttrVal != null && sslEnabledProtocolsAttrVal.getStringValue() != null) {
serverCiphers = new Parameter(HttpConstants.ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS,
sslEnabledProtocolsAttrVal.getStringValue());
serverParams.add(serverCiphers);
}
if (ciphersAttrVal != null && ciphersAttrVal.getStringValue() != null) {
serverCiphers = new Parameter(HttpConstants.ANN_CONFIG_ATTR_CIPHERS, ciphersAttrVal.getStringValue());
serverParams.add(serverCiphers);
}
if (!serverParams.isEmpty()) {
listenerConfiguration.setParameters(serverParams);
}
if (sslProtocolAttrVal != null) {
listenerConfiguration.setSSLProtocol(sslProtocolAttrVal.getStringValue());
}
listenerConfiguration
.setId(getListenerInterface(listenerConfiguration.getHost(), listenerConfiguration.getPort()));
listenerConfSet.add(listenerConfiguration);
}
}
public static HTTPCarbonMessage createHttpCarbonMessage(boolean isRequest) {
HTTPCarbonMessage httpCarbonMessage;
if (isRequest) {
httpCarbonMessage = new HTTPCarbonMessage(
new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, ""));
httpCarbonMessage.setEndOfMsgAdded(true);
} else {
httpCarbonMessage = new HTTPCarbonMessage(
new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK));
httpCarbonMessage.setEndOfMsgAdded(true);
}
return httpCarbonMessage;
}
public static void checkFunctionValidity(BStruct connectionStruct, HTTPCarbonMessage reqMsg) {
serverConnectionStructCheck(reqMsg);
methodInvocationCheck(connectionStruct, reqMsg);
}
private static void methodInvocationCheck(BStruct bStruct, HTTPCarbonMessage reqMsg) {
if (bStruct.getNativeData(METHOD_ACCESSED) != null || reqMsg == null) {
throw new IllegalStateException("illegal function invocation");
}
if (!is100ContinueRequest(reqMsg)) {
bStruct.addNativeData(METHOD_ACCESSED, true);
}
}
private static void serverConnectionStructCheck(HTTPCarbonMessage reqMsg) {
if (reqMsg == null) {
throw new BallerinaException("operation not allowed:invalid Connection variable");
}
}
private static boolean is100ContinueRequest(HTTPCarbonMessage reqMsg) {
return HttpConstants.HEADER_VAL_100_CONTINUE.equalsIgnoreCase(reqMsg.getHeader(HttpConstants.EXPECT_HEADER));
}
public static Annotation getServiceConfigAnnotation(Service service, String pkgPath) {
List<Annotation> annotationList = service.getAnnotationList(pkgPath, HttpConstants.ANN_NAME_CONFIG);
if (annotationList == null) {
return null;
}
if (annotationList.size() > 1) {
throw new BallerinaException(
"multiple service configuration annotations found in service: " + service.getName());
}
return annotationList.isEmpty() ? null : annotationList.get(0);
}
public static Annotation getResourceConfigAnnotation(Resource resource, String pkgPath) {
List<Annotation> annotationList = resource.getAnnotationList(pkgPath, HttpConstants.ANN_NAME_RESOURCE_CONFIG);
if (annotationList == null) {
return null;
}
if (annotationList.size() > 1) {
throw new BallerinaException(
"multiple resource configuration annotations found in resource: " +
resource.getServiceName() + "." + resource.getName());
}
return annotationList.isEmpty() ? null : annotationList.get(0);
}
private static int getIntValue(long val) {
int intVal = (int) val;
if (intVal != val) {
throw new IllegalArgumentException("invalid argument: " + val);
}
return intVal;
}
/**
* Extract generic error message.
*
* @param context Represent ballerina context.
* @param errMsg Error message.
* @return Generic error message.
*/
public static BStruct getGenericError(Context context, String errMsg) {
PackageInfo errorPackageInfo = context.getProgramFile().getPackageInfo(PACKAGE_BUILTIN);
StructInfo errorStructInfo = errorPackageInfo.getStructInfo(STRUCT_GENERIC_ERROR);
BStruct genericError = new BStruct(errorStructInfo.getType());
genericError.setStringField(0, errMsg);
return genericError;
}
} | class HttpUtil {
private static final Logger log = LoggerFactory.getLogger(HttpUtil.class);
private static final String METHOD_ACCESSED = "isMethodAccessed";
private static final String IO_EXCEPTION_OCCURED = "I/O exception occurred";
public static BValue[] getProperty(Context context,
AbstractNativeFunction abstractNativeFunction, boolean isRequest) {
BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0);
HTTPCarbonMessage httpCarbonMessage = HttpUtil
.getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest));
String propertyName = abstractNativeFunction.getStringArgument(context, 0);
Object propertyValue = httpCarbonMessage.getProperty(propertyName);
if (propertyValue == null) {
return AbstractNativeFunction.VOID_RETURN;
}
if (propertyValue instanceof String) {
return abstractNativeFunction.getBValues(new BString((String) propertyValue));
} else {
throw new BallerinaException("Property value is of unknown type : " + propertyValue.getClass().getName());
}
}
public static BValue[] setProperty(Context context,
AbstractNativeFunction abstractNativeFunction, boolean isRequest) {
BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, 0);
String propertyName = abstractNativeFunction.getStringArgument(context, 0);
String propertyValue = abstractNativeFunction.getStringArgument(context, 1);
if (propertyName != null && propertyValue != null) {
HTTPCarbonMessage httpCarbonMessage = HttpUtil
.getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest));
httpCarbonMessage.setProperty(propertyName, propertyValue);
}
return AbstractNativeFunction.VOID_RETURN;
}
/**
* Set the given entity to request or response message.
*
* @param context Ballerina context
* @param abstractNativeFunction Reference to abstract native ballerina function
* @param isRequest boolean representing whether the message is a request or a response
* @return void return
*/
public static BValue[] setEntity(Context context, AbstractNativeFunction abstractNativeFunction,
boolean isRequest) {
BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, HTTP_MESSAGE_INDEX);
HTTPCarbonMessage httpCarbonMessage = HttpUtil
.getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest));
httpCarbonMessage.waitAndReleaseAllEntities();
BStruct entity = (BStruct) abstractNativeFunction.getRefArgument(context, ENTITY_INDEX);
String contentType = MimeUtil.getContentTypeWithParameters(entity);
if (contentType == null) {
contentType = OCTET_STREAM;
}
HttpUtil.setHeaderToEntity(entity, HttpHeaderNames.CONTENT_TYPE.toString(), contentType);
httpMessageStruct.addNativeData(MESSAGE_ENTITY, entity);
httpMessageStruct.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, EntityBodyHandler
.checkEntityBodyAvailability(entity));
return AbstractNativeFunction.VOID_RETURN;
}
/**
* Get the entity from request or response.
*
* @param context Ballerina context
* @param abstractNativeFunction Reference to abstract native ballerina function
* @param isRequest boolean representing whether the message is a request or a response
* @param isEntityBodyRequired boolean representing whether the entity body is required
* @return Entity of the request or response
*/
public static BValue[] getEntity(Context context, AbstractNativeFunction abstractNativeFunction, boolean isRequest,
boolean isEntityBodyRequired) {
BStruct httpMessageStruct = (BStruct) abstractNativeFunction.getRefArgument(context, HTTP_MESSAGE_INDEX);
BStruct entity = (BStruct) httpMessageStruct.getNativeData(MESSAGE_ENTITY);
boolean isByteChannelAlreadySet = false;
if (httpMessageStruct.getNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET) != null) {
isByteChannelAlreadySet = (Boolean) httpMessageStruct.getNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET);
}
if (entity != null && isEntityBodyRequired && !isByteChannelAlreadySet) {
populateEntityBody(context, httpMessageStruct, entity, isRequest);
}
if (entity == null) {
entity = createNewEntity(context, httpMessageStruct);
}
return abstractNativeFunction.getBValues(entity);
}
/**
* Populate entity with the relevant body content.
*
* @param context Represent ballerina context
* @param httpMessageStruct Represent ballerina request/response
* @param entity Represent an entity
* @param isRequest boolean representing whether the message is a request or a response
*/
protected static void populateEntityBody(Context context, BStruct httpMessageStruct, BStruct entity,
boolean isRequest) {
HTTPCarbonMessage httpCarbonMessage = HttpUtil
.getCarbonMsg(httpMessageStruct, HttpUtil.createHttpCarbonMessage(isRequest));
HttpMessageDataStreamer httpMessageDataStreamer = new HttpMessageDataStreamer(httpCarbonMessage);
String contentType = httpCarbonMessage.getHeader(HttpHeaderNames.CONTENT_TYPE.toString());
if (MimeUtil.isNotNullAndEmpty(contentType) && contentType.startsWith(MULTIPART_AS_PRIMARY_TYPE)
&& context != null) {
MultipartDecoder.parseBody(context, entity, contentType, httpMessageDataStreamer.getInputStream());
} else {
int contentLength = NO_CONTENT_LENGTH_FOUND;
String lengthStr = httpCarbonMessage.getHeader(HttpHeaderNames.CONTENT_LENGTH.toString());
try {
contentLength = lengthStr != null ? Integer.parseInt(lengthStr) : contentLength;
if (contentLength == NO_CONTENT_LENGTH_FOUND) {
contentLength = httpCarbonMessage.getFullMessageLength();
}
MimeUtil.setContentLength(entity, contentLength);
} catch (NumberFormatException e) {
throw new BallerinaException("Invalid content length");
}
EntityBodyHandler.setDiscreteMediaTypeBodyContent(entity, httpMessageDataStreamer
.getInputStream());
}
httpMessageStruct.addNativeData(MESSAGE_ENTITY, entity);
httpMessageStruct.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, true);
}
public static void closeMessageOutputStream(OutputStream messageOutputStream) {
try {
if (messageOutputStream != null) {
messageOutputStream.close();
}
} catch (IOException e) {
log.error("Couldn't close message output stream", e);
}
}
/**
* Helper method to start pending http server connectors.
*
* @throws BallerinaConnectorException
*/
public static void startPendingHttpConnectors(BallerinaHttpServerConnector httpServerConnector)
throws BallerinaConnectorException {
try {
HttpConnectionManager.getInstance().startPendingHTTPConnectors(httpServerConnector);
} catch (ServerConnectorException e) {
throw new BallerinaConnectorException(e);
}
}
public static void prepareOutboundResponse(Context context, HTTPCarbonMessage inboundRequestMsg,
HTTPCarbonMessage outboundResponseMsg, BStruct outboundResponseStruct) {
HttpUtil.checkEntityAvailability(context, outboundResponseStruct);
HttpUtil.addHTTPSessionAndCorsHeaders(context, inboundRequestMsg, outboundResponseMsg);
setCompressionHeaders(context, outboundResponseMsg);
HttpUtil.enrichOutboundMessage(outboundResponseMsg, outboundResponseStruct);
}
public static BStruct createSessionStruct(Context context, Session session) {
BStruct sessionStruct = ConnectorUtils
.createAndGetStruct(context, HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.SESSION);
sessionStruct.addNativeData(HttpConstants.HTTP_SESSION, session);
return sessionStruct;
}
public static String getSessionID(String cookieHeader) {
return Arrays.stream(cookieHeader.split(";"))
.filter(cookie -> cookie.trim().startsWith(HttpConstants.SESSION_ID))
.findFirst().get().trim().substring(HttpConstants.SESSION_ID.length());
}
public static void addHTTPSessionAndCorsHeaders(Context context, HTTPCarbonMessage requestMsg,
HTTPCarbonMessage responseMsg) {
Session session = (Session) requestMsg.getProperty(HttpConstants.HTTP_SESSION);
if (session != null) {
boolean isSecureRequest = false;
AnnAttachmentInfo configAnn = context.getServiceInfo().getAnnotationAttachmentInfo(
HttpConstants.PROTOCOL_PACKAGE_HTTP, HttpConstants.ANN_NAME_CONFIG);
if (configAnn != null) {
AnnAttributeValue httpsPortAttrVal = configAnn
.getAttributeValue(HttpConstants.ANN_CONFIG_ATTR_HTTPS_PORT);
if (httpsPortAttrVal != null) {
Integer listenerPort = (Integer) requestMsg.getProperty(HttpConstants.LISTENER_PORT);
if (listenerPort != null && httpsPortAttrVal.getIntValue() == listenerPort) {
isSecureRequest = true;
}
}
}
session.generateSessionHeader(responseMsg, isSecureRequest);
}
if (requestMsg.getHeader(HttpHeaderNames.ORIGIN.toString()) != null) {
CorsHeaderGenerator.process(requestMsg, responseMsg, true);
}
}
public static HttpResponseFuture sendOutboundResponse(HTTPCarbonMessage requestMsg,
HTTPCarbonMessage responseMsg) {
HttpResponseFuture responseFuture;
try {
responseFuture = requestMsg.respond(responseMsg);
} catch (org.wso2.transport.http.netty.contract.ServerConnectorException e) {
throw new BallerinaConnectorException("Error occurred during response", e);
}
return responseFuture;
}
public static void handleFailure(HTTPCarbonMessage requestMessage, BallerinaConnectorException ex) {
Object carbonStatusCode = requestMessage.getProperty(HttpConstants.HTTP_STATUS_CODE);
int statusCode = (carbonStatusCode == null) ? 500 : Integer.parseInt(carbonStatusCode.toString());
String errorMsg = ex.getMessage();
log.error(errorMsg);
ErrorHandlerUtils.printError(ex);
sendOutboundResponse(requestMessage, createErrorMessage(errorMsg, statusCode));
}
public static HTTPCarbonMessage createErrorMessage(String payload, int statusCode) {
HTTPCarbonMessage response = HttpUtil.createHttpCarbonMessage(false);
response.waitAndReleaseAllEntities();
if (payload != null) {
payload = lowerCaseTheFirstLetter(payload);
response.addHttpContent(new DefaultLastHttpContent(Unpooled.wrappedBuffer(payload.getBytes())));
} else {
response.addHttpContent(new DefaultLastHttpContent());
}
setHttpStatusCodes(statusCode, response);
return response;
}
private static String lowerCaseTheFirstLetter(String payload) {
if (!payload.isEmpty()) {
char[] characters = payload.toCharArray();
characters[0] = Character.toLowerCase(characters[0]);
payload = new String(characters);
}
return payload;
}
private static void setHttpStatusCodes(int statusCode, HTTPCarbonMessage response) {
HttpHeaders httpHeaders = response.getHeaders();
httpHeaders.set(HttpHeaderNames.CONTENT_TYPE, org.wso2.transport.http.netty.common.Constants.TEXT_PLAIN);
response.setProperty(org.wso2.transport.http.netty.common.Constants.HTTP_STATUS_CODE, statusCode);
}
public static BStruct getServerConnectorError(Context context, Throwable throwable) {
PackageInfo httpPackageInfo = context.getProgramFile()
.getPackageInfo(HttpConstants.PROTOCOL_PACKAGE_HTTP);
StructInfo errorStructInfo = httpPackageInfo.getStructInfo(HttpConstants.HTTP_CONNECTOR_ERROR);
BStruct httpConnectorError = new BStruct(errorStructInfo.getType());
if (throwable.getMessage() == null) {
httpConnectorError.setStringField(0, IO_EXCEPTION_OCCURED);
} else {
httpConnectorError.setStringField(0, throwable.getMessage());
}
return httpConnectorError;
}
public static HTTPCarbonMessage getCarbonMsg(BStruct struct, HTTPCarbonMessage defaultMsg) {
HTTPCarbonMessage httpCarbonMessage = (HTTPCarbonMessage) struct
.getNativeData(HttpConstants.TRANSPORT_MESSAGE);
if (httpCarbonMessage != null) {
return httpCarbonMessage;
}
addCarbonMsg(struct, defaultMsg);
return defaultMsg;
}
public static void addCarbonMsg(BStruct struct, HTTPCarbonMessage httpCarbonMessage) {
struct.addNativeData(HttpConstants.TRANSPORT_MESSAGE, httpCarbonMessage);
}
public static void populateInboundRequest(BStruct inboundRequestStruct, BStruct entity, BStruct mediaType,
HTTPCarbonMessage inboundRequestMsg) {
inboundRequestStruct.addNativeData(HttpConstants.TRANSPORT_MESSAGE, inboundRequestMsg);
inboundRequestStruct.addNativeData(HttpConstants.IN_REQUEST, true);
enrichWithInboundRequestInfo(inboundRequestStruct, inboundRequestMsg);
enrichWithInboundRequestHeaders(inboundRequestStruct, inboundRequestMsg);
populateEntity(entity, mediaType, inboundRequestMsg);
inboundRequestStruct.addNativeData(MESSAGE_ENTITY, entity);
inboundRequestStruct.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
}
private static void enrichWithInboundRequestHeaders(BStruct inboundRequestStruct,
HTTPCarbonMessage inboundRequestMsg) {
if (inboundRequestMsg.getHeader(HttpHeaderNames.USER_AGENT.toString()) != null) {
inboundRequestStruct.setStringField(HttpConstants.IN_REQUEST_USER_AGENT_INDEX,
inboundRequestMsg.getHeader(HttpHeaderNames.USER_AGENT.toString()));
inboundRequestMsg.removeHeader(HttpHeaderNames.USER_AGENT.toString());
}
}
private static void enrichWithInboundRequestInfo(BStruct inboundRequestStruct,
HTTPCarbonMessage inboundRequestMsg) {
inboundRequestStruct.setStringField(HttpConstants.IN_REQUEST_RAW_PATH_INDEX,
(String) inboundRequestMsg.getProperty(HttpConstants.REQUEST_URL));
inboundRequestStruct.setStringField(HttpConstants.IN_REQUEST_METHOD_INDEX,
(String) inboundRequestMsg.getProperty(HttpConstants.HTTP_METHOD));
inboundRequestStruct.setStringField(HttpConstants.IN_REQUEST_VERSION_INDEX,
(String) inboundRequestMsg.getProperty(HttpConstants.HTTP_VERSION));
Map<String, String> resourceArgValues =
(Map<String, String>) inboundRequestMsg.getProperty(HttpConstants.RESOURCE_ARGS);
inboundRequestStruct.setStringField(HttpConstants.IN_REQUEST_EXTRA_PATH_INFO_INDEX,
resourceArgValues.get(HttpConstants.EXTRA_PATH_INFO));
}
public static void enrichConnectionInfo(BStruct connection, HTTPCarbonMessage cMsg) {
connection.addNativeData(HttpConstants.TRANSPORT_MESSAGE, cMsg);
connection.setStringField(HttpConstants.CONNECTION_HOST_INDEX,
((InetSocketAddress) cMsg.getProperty(HttpConstants.LOCAL_ADDRESS)).getHostName());
connection.setIntField(HttpConstants.CONNECTION_PORT_INDEX,
(Integer) cMsg.getProperty(HttpConstants.LISTENER_PORT));
}
/**
* Populate inbound response with headers and entity.
*
* @param inboundResponse Ballerina struct to represent response
* @param entity Entity of the response
* @param mediaType Content type of the response
* @param inboundResponseMsg Represent carbon message.
*/
public static void populateInboundResponse(BStruct inboundResponse, BStruct entity, BStruct mediaType,
HTTPCarbonMessage inboundResponseMsg) {
inboundResponse.addNativeData(HttpConstants.TRANSPORT_MESSAGE, inboundResponseMsg);
int statusCode = (Integer) inboundResponseMsg.getProperty(HttpConstants.HTTP_STATUS_CODE);
inboundResponse.setIntField(HttpConstants.IN_RESPONSE_STATUS_CODE_INDEX, statusCode);
inboundResponse.setStringField(HttpConstants.IN_RESPONSE_REASON_PHRASE_INDEX,
HttpResponseStatus.valueOf(statusCode).reasonPhrase());
if (inboundResponseMsg.getHeader(HttpHeaderNames.SERVER.toString()) != null) {
inboundResponse.setStringField(HttpConstants.IN_RESPONSE_SERVER_INDEX,
inboundResponseMsg.getHeader(HttpHeaderNames.SERVER.toString()));
inboundResponseMsg.removeHeader(HttpHeaderNames.SERVER.toString());
}
populateEntity(entity, mediaType, inboundResponseMsg);
inboundResponse.addNativeData(MESSAGE_ENTITY, entity);
inboundResponse.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
}
/**
* Populate entity with headers, content-type and content-length.
*
* @param entity Represent an entity struct
* @param mediaType mediaType struct that needs to be set to the entity
* @param cMsg Represent a carbon message
*/
private static void populateEntity(BStruct entity, BStruct mediaType, HTTPCarbonMessage cMsg) {
String contentType = cMsg.getHeader(HttpHeaderNames.CONTENT_TYPE.toString());
MimeUtil.setContentType(mediaType, entity, contentType);
int contentLength = -1;
String lengthStr = cMsg.getHeader(HttpHeaderNames.CONTENT_LENGTH.toString());
try {
contentLength = lengthStr != null ? Integer.parseInt(lengthStr) : contentLength;
MimeUtil.setContentLength(entity, contentLength);
} catch (NumberFormatException e) {
throw new BallerinaException("Invalid content length");
}
entity.setRefField(ENTITY_HEADERS_INDEX, prepareEntityHeaderMap(cMsg.getHeaders(), new BMap<>()));
}
private static BMap<String, BValue> prepareEntityHeaderMap(HttpHeaders headers, BMap<String, BValue> headerMap) {
for (Map.Entry<String, String> header : headers.entries()) {
if (headerMap.keySet().contains(header.getKey())) {
BStringArray valueArray = (BStringArray) headerMap.get(header.getKey());
valueArray.add(valueArray.size(), header.getValue());
} else {
BStringArray valueArray = new BStringArray(new String[]{header.getValue()});
headerMap.put(header.getKey(), valueArray);
}
}
return headerMap;
}
/**
* Set headers and properties of request/response struct to the outbound transport message.
*
* @param outboundMsg transport Http carbon message.
* @param outboundStruct req/resp struct.
*/
public static void enrichOutboundMessage(HTTPCarbonMessage outboundMsg, BStruct outboundStruct) {
setHeadersToTransportMessage(outboundMsg, outboundStruct);
setPropertiesToTransportMessage(outboundMsg, outboundStruct);
}
@SuppressWarnings("unchecked")
private static void setHeadersToTransportMessage(HTTPCarbonMessage outboundMsg, BStruct struct) {
BStruct entityStruct = (BStruct) struct.getNativeData(MESSAGE_ENTITY);
HttpHeaders transportHeaders = outboundMsg.getHeaders();
if (isInboundRequestStruct(struct) || isInboundResponseStruct(struct)) {
addRemovedPropertiesBackToHeadersMap(struct, transportHeaders);
return;
}
BMap<String, BValue> entityHeaders = (BMap) entityStruct.getRefField(ENTITY_HEADERS_INDEX);
if (entityHeaders == null) {
return;
}
Set<String> keys = entityHeaders.keySet();
for (String key : keys) {
BStringArray headerValues = (BStringArray) entityHeaders.get(key);
for (int i = 0; i < headerValues.size(); i++) {
transportHeaders.add(key, headerValues.get(i));
}
}
}
private static boolean isInboundRequestStruct(BStruct struct) {
return struct.getType().getName().equals(HttpConstants.IN_REQUEST);
}
private static boolean isInboundResponseStruct(BStruct struct) {
return struct.getType().getName().equals(HttpConstants.IN_RESPONSE);
}
private static boolean isOutboundResponseStruct(BStruct struct) {
return struct.getType().getName().equals(HttpConstants.OUT_RESPONSE);
}
private static void addRemovedPropertiesBackToHeadersMap(BStruct struct, HttpHeaders transportHeaders) {
if (isInboundRequestStruct(struct)) {
if (!struct.getStringField(HttpConstants.IN_REQUEST_USER_AGENT_INDEX).isEmpty()) {
transportHeaders.add(HttpHeaderNames.USER_AGENT.toString(),
struct.getStringField(HttpConstants.IN_REQUEST_USER_AGENT_INDEX));
}
} else {
if (!struct.getStringField(HttpConstants.IN_RESPONSE_SERVER_INDEX).isEmpty()) {
transportHeaders.add(HttpHeaderNames.SERVER.toString(),
struct.getStringField(HttpConstants.IN_RESPONSE_SERVER_INDEX));
}
}
}
private static void setPropertiesToTransportMessage(HTTPCarbonMessage outboundResponseMsg, BStruct struct) {
if (isOutboundResponseStruct(struct)) {
if (struct.getIntField(HttpConstants.OUT_RESPONSE_STATUS_CODE_INDEX) != 0) {
outboundResponseMsg.setProperty(HttpConstants.HTTP_STATUS_CODE, getIntValue(
struct.getIntField(HttpConstants.OUT_RESPONSE_STATUS_CODE_INDEX)));
}
if (!struct.getStringField(HttpConstants.OUT_RESPONSE_REASON_PHRASE_INDEX).isEmpty()) {
outboundResponseMsg.setProperty(HttpConstants.HTTP_REASON_PHRASE,
struct.getStringField(HttpConstants.OUT_RESPONSE_REASON_PHRASE_INDEX));
}
}
}
private static void setHeaderToEntity(BStruct struct, String key, String value) {
BMap<String, BValue> headerMap = struct.getRefField(ENTITY_HEADERS_INDEX) != null ?
(BMap) struct.getRefField(ENTITY_HEADERS_INDEX) : new BMap<>();
struct.setRefField(ENTITY_HEADERS_INDEX,
prepareEntityHeaderMap(new DefaultHttpHeaders().add(key, value), headerMap));
}
/**
* Check the existence of entity. Set new entity of not present.
*
* @param context ballerina context.
* @param struct request/response struct.
*/
public static void checkEntityAvailability(Context context, BStruct struct) {
BStruct entity = (BStruct) struct.getNativeData(MESSAGE_ENTITY);
if (entity == null) {
HttpUtil.createNewEntity(context, struct);
}
}
/**
* Set new entity to in/out request/response struct.
*
* @param context ballerina context.
* @param struct request/response struct.
*/
public static BStruct createNewEntity(Context context, BStruct struct) {
BStruct entity = ConnectorUtils.createAndGetStruct(context
, org.ballerinalang.mime.util.Constants.PROTOCOL_PACKAGE_MIME
, org.ballerinalang.mime.util.Constants.ENTITY);
entity.setRefField(ENTITY_HEADERS_INDEX, new BMap<>());
struct.addNativeData(MESSAGE_ENTITY, entity);
struct.addNativeData(IS_BODY_BYTE_CHANNEL_ALREADY_SET, false);
return entity;
}
/**
* Set connection content-encoding headers to transport message.
*
* @param context ballerina context.
* @param outboundMessage transport message.
*/
/**
* Extract the listener configurations from the config annotation.
*
* @param annotationInfo configuration annotation info.
* @return the set of {@link ListenerConfiguration} which were extracted from config annotation.
*/
public static Set<ListenerConfiguration> getDefaultOrDynamicListenerConfig(Annotation annotationInfo) {
if (annotationInfo == null) {
return HttpConnectionManager.getInstance().getDefaultListenerConfiugrationSet();
}
Set<ListenerConfiguration> listenerConfSet = new HashSet<>();
extractBasicConfig(annotationInfo, listenerConfSet);
extractHttpsConfig(annotationInfo, listenerConfSet);
if (listenerConfSet.isEmpty()) {
listenerConfSet = HttpConnectionManager.getInstance().getDefaultListenerConfiugrationSet();
}
return listenerConfSet;
}
private static String getListenerInterface(String host, int port) {
host = host != null ? host : "0.0.0.0";
return host + ":" + port;
}
private static void extractBasicConfig(Annotation configInfo, Set<ListenerConfiguration> listenerConfSet) {
AnnAttrValue hostAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_HOST);
AnnAttrValue portAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_PORT);
AnnAttrValue keepAliveAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_KEEP_ALIVE);
AnnAttrValue transferEncoding = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_TRANSFER_ENCODING);
AnnAttrValue chunking = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_CHUNKING);
AnnAttrValue maxUriLength = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_MAXIMUM_URL_LENGTH);
AnnAttrValue maxHeaderSize = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_MAXIMUM_HEADER_SIZE);
AnnAttrValue maxEntityBodySize = configInfo.getAnnAttrValue(
HttpConstants.ANN_CONFIG_ATTR_MAXIMUM_ENTITY_BODY_SIZE);
ListenerConfiguration listenerConfiguration = new ListenerConfiguration();
if (portAttrVal != null && portAttrVal.getIntValue() > 0) {
listenerConfiguration.setPort(Math.toIntExact(portAttrVal.getIntValue()));
listenerConfiguration.setScheme(HttpConstants.PROTOCOL_HTTP);
if (hostAttrVal != null && hostAttrVal.getStringValue() != null) {
listenerConfiguration.setHost(hostAttrVal.getStringValue());
} else {
listenerConfiguration.setHost(HttpConstants.HTTP_DEFAULT_HOST);
}
if (transferEncoding != null && !HttpConstants.ANN_CONFIG_ATTR_CHUNKING
.equalsIgnoreCase(transferEncoding.getStringValue())) {
throw new BallerinaConnectorException("Unsupported configuration found for Transfer-Encoding : "
+ transferEncoding.getStringValue());
}
if (chunking != null) {
ChunkConfig chunkConfig = getChunkConfig(chunking.getStringValue());
listenerConfiguration.setChunkConfig(chunkConfig);
} else {
listenerConfiguration.setChunkConfig(ChunkConfig.AUTO);
}
if (keepAliveAttrVal != null) {
KeepAliveConfig keepAliveConfig = getKeepAliveConfig(keepAliveAttrVal.getStringValue());
listenerConfiguration.setKeepAliveConfig(keepAliveConfig);
} else {
listenerConfiguration.setKeepAliveConfig(KeepAliveConfig.AUTO);
}
RequestSizeValidationConfig requestSizeValidationConfig =
listenerConfiguration.getRequestSizeValidationConfig();
if (maxUriLength != null) {
if (maxUriLength.getIntValue() > 0) {
requestSizeValidationConfig.setMaxUriLength(Math.toIntExact(maxUriLength.getIntValue()));
} else {
throw new BallerinaConnectorException("Invalid configuration found for maxUriLength : "
+ maxUriLength.getIntValue());
}
}
if (maxHeaderSize != null) {
if (maxHeaderSize.getIntValue() > 0) {
requestSizeValidationConfig.setMaxHeaderSize(Math.toIntExact(maxHeaderSize.getIntValue()));
} else {
throw new BallerinaConnectorException("Invalid configuration found for maxHeaderSize : "
+ maxHeaderSize.getIntValue());
}
}
if (maxEntityBodySize != null) {
if (maxEntityBodySize.getIntValue() > 0) {
requestSizeValidationConfig.setMaxEntityBodySize(Math.toIntExact(maxEntityBodySize.getIntValue()));
} else {
throw new BallerinaConnectorException("Invalid configuration found for maxEntityBodySize : "
+ maxEntityBodySize.getIntValue());
}
}
listenerConfiguration
.setId(getListenerInterface(listenerConfiguration.getHost(), listenerConfiguration.getPort()));
listenerConfSet.add(listenerConfiguration);
}
}
public static ChunkConfig getChunkConfig(String chunkConfig) {
if (HttpConstants.AUTO.equalsIgnoreCase(chunkConfig)) {
return ChunkConfig.AUTO;
} else if (HttpConstants.ALWAYS.equalsIgnoreCase(chunkConfig)) {
return ChunkConfig.ALWAYS;
} else if (HttpConstants.NEVER.equalsIgnoreCase(chunkConfig)) {
return ChunkConfig.NEVER;
} else {
throw new BallerinaConnectorException(
"Invalid configuration found for Transfer-Encoding: " + chunkConfig);
}
}
public static KeepAliveConfig getKeepAliveConfig(String keepAliveConfig) {
if (HttpConstants.AUTO.equalsIgnoreCase(keepAliveConfig)) {
return KeepAliveConfig.AUTO;
} else if (HttpConstants.ALWAYS.equalsIgnoreCase(keepAliveConfig)) {
return KeepAliveConfig.ALWAYS;
} else if (HttpConstants.NEVER.equalsIgnoreCase(keepAliveConfig)) {
return KeepAliveConfig.NEVER;
} else {
throw new BallerinaConnectorException(
"Invalid configuration found for Keep-Alive: " + keepAliveConfig);
}
}
public static ForwardedExtensionConfig getForwardedExtensionConfig(String forwarded) {
ForwardedExtensionConfig forwardedConfig;
if (HttpConstants.FORWARDED_ENABLE.equalsIgnoreCase(forwarded)) {
forwardedConfig = ForwardedExtensionConfig.ENABLE;
} else if (HttpConstants.FORWARDED_TRANSITION.equalsIgnoreCase(forwarded)) {
forwardedConfig = ForwardedExtensionConfig.TRANSITION;
} else if (HttpConstants.FORWARDED_DISABLE.equalsIgnoreCase(forwarded)) {
forwardedConfig = ForwardedExtensionConfig.DISABLE;
} else {
throw new BallerinaConnectorException("Invalid configuration found for Forwarded : " + forwarded);
}
return forwardedConfig;
}
private static void extractHttpsConfig(Annotation configInfo, Set<ListenerConfiguration> listenerConfSet) {
AnnAttrValue httpsPortAttrVal;
if (configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_HTTPS_PORT) == null) {
httpsPortAttrVal =
configInfo.getAnnAttrValue(WebSocketConstants.ANN_CONFIG_ATTR_WSS_PORT);
} else {
httpsPortAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_HTTPS_PORT);
}
AnnAttrValue keyStoreFileAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_KEY_STORE_FILE);
AnnAttrValue keyStorePasswordAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_KEY_STORE_PASS);
AnnAttrValue certPasswordAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_CERT_PASS);
AnnAttrValue trustStoreFileAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_TRUST_STORE_FILE);
AnnAttrValue trustStorePasswordAttrVal = configInfo.getAnnAttrValue(
HttpConstants.ANN_CONFIG_ATTR_TRUST_STORE_PASS);
AnnAttrValue sslVerifyClientAttrVal = configInfo.getAnnAttrValue(
HttpConstants.ANN_CONFIG_ATTR_SSL_VERIFY_CLIENT);
AnnAttrValue sslEnabledProtocolsAttrVal = configInfo
.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS);
AnnAttrValue ciphersAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_CIPHERS);
AnnAttrValue sslProtocolAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_SSL_PROTOCOL);
AnnAttrValue hostAttrVal = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_HOST);
AnnAttrValue certificateValidationEnabledAttrValue = configInfo
.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_VALIDATE_CERT_ENABLED);
AnnAttrValue cacheSizeAttrValue = configInfo.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_CACHE_SIZE);
AnnAttrValue cacheValidityPeriodAttrValue = configInfo
.getAnnAttrValue(HttpConstants.ANN_CONFIG_ATTR_CACHE_VALIDITY_PERIOD);
ListenerConfiguration listenerConfiguration = new ListenerConfiguration();
if (httpsPortAttrVal != null && httpsPortAttrVal.getIntValue() > 0) {
listenerConfiguration.setPort(Math.toIntExact(httpsPortAttrVal.getIntValue()));
listenerConfiguration.setScheme(HttpConstants.PROTOCOL_HTTPS);
if (hostAttrVal != null && hostAttrVal.getStringValue() != null) {
listenerConfiguration.setHost(hostAttrVal.getStringValue());
} else {
listenerConfiguration.setHost(HttpConstants.HTTP_DEFAULT_HOST);
}
if (keyStoreFileAttrVal == null || keyStoreFileAttrVal.getStringValue() == null) {
throw new BallerinaConnectorException("Keystore location must be provided for secure connection");
}
if (keyStorePasswordAttrVal == null || keyStorePasswordAttrVal.getStringValue() == null) {
throw new BallerinaConnectorException("Keystore password value must be provided for secure connection");
}
if (certPasswordAttrVal == null || certPasswordAttrVal.getStringValue() == null) {
throw new BallerinaConnectorException(
"Certificate password value must be provided for secure connection");
}
if ((trustStoreFileAttrVal == null || trustStoreFileAttrVal.getStringValue() == null)
&& sslVerifyClientAttrVal != null) {
throw new BallerinaException("Truststore location must be provided to enable Mutual SSL");
}
if ((trustStorePasswordAttrVal == null || trustStorePasswordAttrVal.getStringValue() == null)
&& sslVerifyClientAttrVal != null) {
throw new BallerinaException("Truststore password value must be provided to enable Mutual SSL");
}
listenerConfiguration.setTLSStoreType(HttpConstants.PKCS_STORE_TYPE);
listenerConfiguration.setKeyStoreFile(keyStoreFileAttrVal.getStringValue());
listenerConfiguration.setKeyStorePass(keyStorePasswordAttrVal.getStringValue());
listenerConfiguration.setCertPass(certPasswordAttrVal.getStringValue());
if (sslVerifyClientAttrVal != null) {
listenerConfiguration.setVerifyClient(sslVerifyClientAttrVal.getStringValue());
}
if (trustStoreFileAttrVal != null) {
listenerConfiguration.setTrustStoreFile(trustStoreFileAttrVal.getStringValue());
}
if (trustStorePasswordAttrVal != null) {
listenerConfiguration.setTrustStorePass(trustStorePasswordAttrVal.getStringValue());
}
if (certificateValidationEnabledAttrValue != null && certificateValidationEnabledAttrValue
.getBooleanValue()) {
listenerConfiguration.setValidateCertEnabled(certificateValidationEnabledAttrValue.getBooleanValue());
if (cacheSizeAttrValue != null) {
listenerConfiguration.setCacheSize((int) cacheSizeAttrValue.getIntValue());
}
if (cacheValidityPeriodAttrValue != null) {
listenerConfiguration.setCacheValidityPeriod((int) cacheValidityPeriodAttrValue.getIntValue());
}
}
List<Parameter> serverParams = new ArrayList<>();
Parameter serverCiphers;
if (sslEnabledProtocolsAttrVal != null && sslEnabledProtocolsAttrVal.getStringValue() != null) {
serverCiphers = new Parameter(HttpConstants.ANN_CONFIG_ATTR_SSL_ENABLED_PROTOCOLS,
sslEnabledProtocolsAttrVal.getStringValue());
serverParams.add(serverCiphers);
}
if (ciphersAttrVal != null && ciphersAttrVal.getStringValue() != null) {
serverCiphers = new Parameter(HttpConstants.ANN_CONFIG_ATTR_CIPHERS, ciphersAttrVal.getStringValue());
serverParams.add(serverCiphers);
}
if (!serverParams.isEmpty()) {
listenerConfiguration.setParameters(serverParams);
}
if (sslProtocolAttrVal != null) {
listenerConfiguration.setSSLProtocol(sslProtocolAttrVal.getStringValue());
}
listenerConfiguration
.setId(getListenerInterface(listenerConfiguration.getHost(), listenerConfiguration.getPort()));
listenerConfSet.add(listenerConfiguration);
}
}
public static HTTPCarbonMessage createHttpCarbonMessage(boolean isRequest) {
HTTPCarbonMessage httpCarbonMessage;
if (isRequest) {
httpCarbonMessage = new HTTPCarbonMessage(
new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, ""));
} else {
httpCarbonMessage = new HTTPCarbonMessage(
new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK));
}
httpCarbonMessage.completeMessage();
return httpCarbonMessage;
}
public static void checkFunctionValidity(BStruct connectionStruct, HTTPCarbonMessage reqMsg) {
serverConnectionStructCheck(reqMsg);
methodInvocationCheck(connectionStruct, reqMsg);
}
private static void methodInvocationCheck(BStruct bStruct, HTTPCarbonMessage reqMsg) {
if (bStruct.getNativeData(METHOD_ACCESSED) != null || reqMsg == null) {
throw new IllegalStateException("illegal function invocation");
}
if (!is100ContinueRequest(reqMsg)) {
bStruct.addNativeData(METHOD_ACCESSED, true);
}
}
private static void serverConnectionStructCheck(HTTPCarbonMessage reqMsg) {
if (reqMsg == null) {
throw new BallerinaException("operation not allowed:invalid Connection variable");
}
}
private static boolean is100ContinueRequest(HTTPCarbonMessage reqMsg) {
return HttpConstants.HEADER_VAL_100_CONTINUE.equalsIgnoreCase(
reqMsg.getHeader(HttpHeaderNames.EXPECT.toString()));
}
public static Annotation getServiceConfigAnnotation(Service service, String pkgPath) {
List<Annotation> annotationList = service.getAnnotationList(pkgPath, HttpConstants.ANN_NAME_CONFIG);
if (annotationList == null) {
return null;
}
if (annotationList.size() > 1) {
throw new BallerinaException(
"multiple service configuration annotations found in service: " + service.getName());
}
return annotationList.isEmpty() ? null : annotationList.get(0);
}
public static Annotation getResourceConfigAnnotation(Resource resource, String pkgPath) {
List<Annotation> annotationList = resource.getAnnotationList(pkgPath, HttpConstants.ANN_NAME_RESOURCE_CONFIG);
if (annotationList == null) {
return null;
}
if (annotationList.size() > 1) {
throw new BallerinaException(
"multiple resource configuration annotations found in resource: " +
resource.getServiceName() + "." + resource.getName());
}
return annotationList.isEmpty() ? null : annotationList.get(0);
}
private static int getIntValue(long val) {
int intVal = (int) val;
if (intVal != val) {
throw new IllegalArgumentException("invalid argument: " + val);
}
return intVal;
}
public static String getContentTypeFromTransportMessage(HTTPCarbonMessage transportMessage) {
return transportMessage.getHeader(HttpHeaderNames.CONTENT_TYPE.toString()) != null ?
transportMessage.getHeader(HttpHeaderNames.CONTENT_TYPE.toString()) : null;
}
/**
* If the given Content-Type header value doesn't have a boundary parameter value, get a new boundary string and
* append it to Content-Type and set it to transport message.
*
* @param transportMessage Represent transport message
* @param contentType Represent the Content-Type header value
* @return The boundary string that was extracted from header or the newly generated one
*/
public static String addBoundaryIfNotExist(HTTPCarbonMessage transportMessage, String contentType) {
String boundaryString;
BString boundaryValue = HeaderUtil.extractBoundaryParameter(contentType);
boundaryString = boundaryValue != null ? boundaryValue.toString() :
HttpUtil.addBoundaryParameter(transportMessage, contentType);
return boundaryString;
}
/**
* Generate a new boundary string and append it Content-Type and set that to transport message.
*
* @param transportMessage Represent transport message
* @param contentType Represent the Content-Type header value
* @return The newly generated boundary string
*/
private static String addBoundaryParameter(HTTPCarbonMessage transportMessage, String contentType) {
String boundaryString = null;
if (contentType != null && contentType.startsWith(MULTIPART_AS_PRIMARY_TYPE)) {
boundaryString = MimeUtil.getNewMultipartDelimiter();
transportMessage.setHeader(HttpHeaderNames.CONTENT_TYPE.toString(), contentType + "; " + BOUNDARY + "=" +
boundaryString);
}
return boundaryString;
}
/**
* Extract generic error message.
*
* @param context Represent ballerina context.
* @param errMsg Error message.
* @return Generic error message.
*/
public static BStruct getGenericError(Context context, String errMsg) {
PackageInfo errorPackageInfo = context.getProgramFile().getPackageInfo(PACKAGE_BUILTIN);
StructInfo errorStructInfo = errorPackageInfo.getStructInfo(STRUCT_GENERIC_ERROR);
BStruct genericError = new BStruct(errorStructInfo.getType());
genericError.setStringField(0, errMsg);
return genericError;
}
public static HttpWsConnectorFactory createHttpWsConnectionFactory() {
return new DefaultHttpWsConnectorFactory();
}
} |
In which situation is it NESTED? We made the change originally because it wasn't prefixed, so trying to understand what changed. | private void prepareStateBackend(K key) {
ByteBuffer encodedKey =
FlinkKeyUtils.removeNestedContext(key, (Coder<ByteString>) keyCoder);
keyedStateBackend.setCurrentKey(encodedKey);
} | private void prepareStateBackend(K key) {
ByteBuffer encodedKey = FlinkKeyUtils.fromEncodedKey(key);
keyedStateBackend.setCurrentKey(encodedKey);
} | class BagUserStateFactory<K extends ByteString, V, W extends BoundedWindow>
implements StateRequestHandlers.BagUserStateHandlerFactory<K, V, W> {
private final StateInternals stateInternals;
private final KeyedStateBackend<ByteBuffer> keyedStateBackend;
private final Lock stateBackendLock;
private BagUserStateFactory(
StateInternals stateInternals,
KeyedStateBackend<ByteBuffer> keyedStateBackend,
Lock stateBackendLock) {
this.stateInternals = stateInternals;
this.keyedStateBackend = keyedStateBackend;
this.stateBackendLock = stateBackendLock;
}
@Override
public StateRequestHandlers.BagUserStateHandler<K, V, W> forUserState(
String pTransformId,
String userStateId,
Coder<K> keyCoder,
Coder<V> valueCoder,
Coder<W> windowCoder) {
return new StateRequestHandlers.BagUserStateHandler<K, V, W>() {
@Override
public Iterable<V> get(K key, W window) {
try {
stateBackendLock.lock();
prepareStateBackend(key);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
if (LOG.isDebugEnabled()) {
LOG.debug(
"State get for {} {} {} {}",
pTransformId,
userStateId,
Arrays.toString(keyedStateBackend.getCurrentKey().array()),
window);
}
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
return bagState.read();
} finally {
stateBackendLock.unlock();
}
}
@Override
public void append(K key, W window, Iterator<V> values) {
try {
stateBackendLock.lock();
prepareStateBackend(key);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
if (LOG.isDebugEnabled()) {
LOG.debug(
"State append for {} {} {} {}",
pTransformId,
userStateId,
Arrays.toString(keyedStateBackend.getCurrentKey().array()),
window);
}
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
while (values.hasNext()) {
bagState.add(values.next());
}
} finally {
stateBackendLock.unlock();
}
}
@Override
public void clear(K key, W window) {
try {
stateBackendLock.lock();
prepareStateBackend(key);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
if (LOG.isDebugEnabled()) {
LOG.debug(
"State clear for {} {} {} {}",
pTransformId,
userStateId,
Arrays.toString(keyedStateBackend.getCurrentKey().array()),
window);
}
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
bagState.clear();
} finally {
stateBackendLock.unlock();
}
}
};
}
} | class BagUserStateFactory<K extends ByteString, V, W extends BoundedWindow>
implements StateRequestHandlers.BagUserStateHandlerFactory<K, V, W> {
private final StateInternals stateInternals;
private final KeyedStateBackend<ByteBuffer> keyedStateBackend;
private final Lock stateBackendLock;
private BagUserStateFactory(
StateInternals stateInternals,
KeyedStateBackend<ByteBuffer> keyedStateBackend,
Lock stateBackendLock) {
this.stateInternals = stateInternals;
this.keyedStateBackend = keyedStateBackend;
this.stateBackendLock = stateBackendLock;
}
@Override
public StateRequestHandlers.BagUserStateHandler<K, V, W> forUserState(
String pTransformId,
String userStateId,
Coder<K> keyCoder,
Coder<V> valueCoder,
Coder<W> windowCoder) {
return new StateRequestHandlers.BagUserStateHandler<K, V, W>() {
@Override
public Iterable<V> get(K key, W window) {
try {
stateBackendLock.lock();
prepareStateBackend(key);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
if (LOG.isDebugEnabled()) {
LOG.debug(
"State get for {} {} {} {}",
pTransformId,
userStateId,
Arrays.toString(keyedStateBackend.getCurrentKey().array()),
window);
}
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
return bagState.read();
} finally {
stateBackendLock.unlock();
}
}
@Override
public void append(K key, W window, Iterator<V> values) {
try {
stateBackendLock.lock();
prepareStateBackend(key);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
if (LOG.isDebugEnabled()) {
LOG.debug(
"State append for {} {} {} {}",
pTransformId,
userStateId,
Arrays.toString(keyedStateBackend.getCurrentKey().array()),
window);
}
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
while (values.hasNext()) {
bagState.add(values.next());
}
} finally {
stateBackendLock.unlock();
}
}
@Override
public void clear(K key, W window) {
try {
stateBackendLock.lock();
prepareStateBackend(key);
StateNamespace namespace = StateNamespaces.window(windowCoder, window);
if (LOG.isDebugEnabled()) {
LOG.debug(
"State clear for {} {} {} {}",
pTransformId,
userStateId,
Arrays.toString(keyedStateBackend.getCurrentKey().array()),
window);
}
BagState<V> bagState =
stateInternals.state(namespace, StateTags.bag(userStateId, valueCoder));
bagState.clear();
} finally {
stateBackendLock.unlock();
}
}
};
}
} | |
> I think we should keep this. because when TableEnvironment supports new operation but forgets to update SqlCommandParser I think we should never do this, either to refactor the code to keep SQL_CLI synced with table environment or keep the logic clean. | private Optional<SqlCommandCall> parseCommand(String line) {
final Optional<SqlCommandCall> parsedLine;
try {
parsedLine = SqlCommandParser.parse(executor.getSqlParser(sessionId), line);
} catch (SqlExecutionException e) {
printExecutionException(e);
return Optional.empty();
}
if (!parsedLine.isPresent()) {
printError(CliStrings.MESSAGE_UNKNOWN_SQL);
}
return parsedLine;
} | printError(CliStrings.MESSAGE_UNKNOWN_SQL); | private Optional<SqlCommandCall> parseCommand(String line) {
final SqlCommandCall parsedLine;
try {
parsedLine = SqlCommandParser.parse(executor.getSqlParser(sessionId), line);
} catch (SqlExecutionException e) {
printExecutionException(e);
return Optional.empty();
}
return Optional.of(parsedLine);
} | class CliClient {
private static final Logger LOG = LoggerFactory.getLogger(CliClient.class);
private final Executor executor;
private final String sessionId;
private final Terminal terminal;
private final LineReader lineReader;
private final String prompt;
private boolean isRunning;
private static final int PLAIN_TERMINAL_WIDTH = 80;
private static final int PLAIN_TERMINAL_HEIGHT = 30;
private static final int SOURCE_MAX_SIZE = 50_000;
/**
* Creates a CLI instance with a custom terminal. Make sure to close the CLI instance
* afterwards using {@link
*/
@VisibleForTesting
public CliClient(Terminal terminal, String sessionId, Executor executor, Path historyFilePath) {
this.terminal = terminal;
this.sessionId = sessionId;
this.executor = executor;
terminal.writer().println();
terminal.writer().flush();
lineReader = LineReaderBuilder.builder()
.terminal(terminal)
.appName(CliStrings.CLI_NAME)
.parser(new SqlMultiLineParser())
.completer(new SqlCompleter(sessionId, executor))
.build();
lineReader.option(LineReader.Option.DISABLE_EVENT_EXPANSION, true);
lineReader.setVariable(LineReader.ERRORS, 1);
lineReader.option(LineReader.Option.CASE_INSENSITIVE, true);
if (Files.exists(historyFilePath) || CliUtils.createFile(historyFilePath)) {
String msg = "Command history file path: " + historyFilePath;
System.out.println(msg);
LOG.info(msg);
lineReader.setVariable(LineReader.HISTORY_FILE, historyFilePath);
} else {
String msg = "Unable to create history file: " + historyFilePath;
System.out.println(msg);
LOG.warn(msg);
}
prompt = new AttributedStringBuilder()
.style(AttributedStyle.DEFAULT.foreground(AttributedStyle.GREEN))
.append("Flink SQL")
.style(AttributedStyle.DEFAULT)
.append("> ")
.toAnsi();
}
/**
* Creates a CLI instance with a prepared terminal. Make sure to close the CLI instance
* afterwards using {@link
*/
public CliClient(String sessionId, Executor executor, Path historyFilePath) {
this(createDefaultTerminal(), sessionId, executor, historyFilePath);
}
public Terminal getTerminal() {
return terminal;
}
public String getSessionId() {
return this.sessionId;
}
public void clearTerminal() {
if (isPlainTerminal()) {
for (int i = 0; i < 200; i++) {
terminal.writer().println();
}
} else {
terminal.puts(InfoCmp.Capability.clear_screen);
}
}
public boolean isPlainTerminal() {
return terminal.getWidth() == 0 && terminal.getHeight() == 0;
}
public int getWidth() {
if (isPlainTerminal()) {
return PLAIN_TERMINAL_WIDTH;
}
return terminal.getWidth();
}
public int getHeight() {
if (isPlainTerminal()) {
return PLAIN_TERMINAL_HEIGHT;
}
return terminal.getHeight();
}
public Executor getExecutor() {
return executor;
}
/**
* Opens the interactive CLI shell.
*/
public void open() {
isRunning = true;
terminal.writer().append(CliStrings.MESSAGE_WELCOME);
while (isRunning) {
terminal.writer().append("\n");
terminal.flush();
final String line;
try {
line = lineReader.readLine(prompt, null, (MaskingCallback) null, null);
} catch (UserInterruptException e) {
continue;
} catch (EndOfFileException | IOError e) {
break;
} catch (Throwable t) {
throw new SqlClientException("Could not read from command line.", t);
}
if (line == null) {
continue;
}
final Optional<SqlCommandCall> cmdCall = parseCommand(line);
cmdCall.ifPresent(this::callCommand);
}
}
/**
* Closes the CLI instance.
*/
public void close() {
try {
terminal.close();
} catch (IOException e) {
throw new SqlClientException("Unable to close terminal.", e);
}
}
/**
* Submits a SQL update statement and prints status information and/or errors on the terminal.
*
* @param statement SQL update statement
* @return flag to indicate if the submission was successful or not
*/
public boolean submitUpdate(String statement) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_WILL_EXECUTE).toAnsi());
terminal.writer().println(new AttributedString(statement).toString());
terminal.flush();
final Optional<SqlCommandCall> parsedStatement = parseCommand(statement);
return parsedStatement.map(cmdCall -> {
switch (cmdCall.command) {
case INSERT_INTO:
case INSERT_OVERWRITE:
return callInsert(cmdCall);
default:
printError(CliStrings.MESSAGE_UNSUPPORTED_SQL);
return false;
}
}).orElse(false);
}
private void callCommand(SqlCommandCall cmdCall) {
switch (cmdCall.command) {
case QUIT:
callQuit();
break;
case CLEAR:
callClear();
break;
case RESET:
callReset();
break;
case SET:
callSet(cmdCall);
break;
case HELP:
callHelp();
break;
case SHOW_CATALOGS:
callShowCatalogs();
break;
case SHOW_DATABASES:
callShowDatabases();
break;
case SHOW_TABLES:
callShowTables();
break;
case SHOW_FUNCTIONS:
callShowFunctions();
break;
case SHOW_MODULES:
callShowModules();
break;
case USE_CATALOG:
callUseCatalog(cmdCall);
break;
case USE:
callUseDatabase(cmdCall);
break;
case DESC:
case DESCRIBE:
callDescribe(cmdCall);
break;
case EXPLAIN:
callExplain(cmdCall);
break;
case SELECT:
callSelect(cmdCall);
break;
case INSERT_INTO:
case INSERT_OVERWRITE:
callInsert(cmdCall);
break;
case CREATE_TABLE:
callCreateTable(cmdCall);
break;
case DROP_TABLE:
callDropTable(cmdCall);
break;
case CREATE_VIEW:
callCreateView(cmdCall);
break;
case DROP_VIEW:
callDropView(cmdCall);
break;
case CREATE_FUNCTION:
callCreateFunction(cmdCall);
break;
case DROP_FUNCTION:
callDropFunction(cmdCall);
break;
case ALTER_FUNCTION:
callAlterFunction(cmdCall);
break;
case SOURCE:
callSource(cmdCall);
break;
case CREATE_DATABASE:
callCreateDatabase(cmdCall);
break;
case DROP_DATABASE:
callDropDatabase(cmdCall);
break;
case ALTER_DATABASE:
callAlterDatabase(cmdCall);
break;
case ALTER_TABLE:
callAlterTable(cmdCall);
break;
default:
throw new SqlClientException("Unsupported command: " + cmdCall.command);
}
}
private void callQuit() {
printInfo(CliStrings.MESSAGE_QUIT);
isRunning = false;
}
private void callClear() {
clearTerminal();
}
private void callReset() {
executor.resetSessionProperties(sessionId);
printInfo(CliStrings.MESSAGE_RESET);
}
private void callSet(SqlCommandCall cmdCall) {
if (cmdCall.operands.length == 0) {
final Map<String, String> properties;
try {
properties = executor.getSessionProperties(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (properties.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
properties
.entrySet()
.stream()
.map((e) -> e.getKey() + "=" + e.getValue())
.sorted()
.forEach((p) -> terminal.writer().println(p));
}
}
else {
executor.setSessionProperty(sessionId, cmdCall.operands[0], cmdCall.operands[1].trim());
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_SET).toAnsi());
}
terminal.flush();
}
private void callHelp() {
terminal.writer().println(CliStrings.MESSAGE_HELP);
terminal.flush();
}
private void callShowCatalogs() {
final List<String> catalogs;
try {
catalogs = executor.listCatalogs(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (catalogs.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
catalogs.forEach((v) -> terminal.writer().println(v));
}
terminal.flush();
}
private void callShowDatabases() {
final List<String> dbs;
try {
dbs = executor.listDatabases(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (dbs.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
dbs.forEach((v) -> terminal.writer().println(v));
}
terminal.flush();
}
private void callShowTables() {
final List<String> tables;
try {
tables = executor.listTables(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (tables.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
tables.forEach((v) -> terminal.writer().println(v));
}
terminal.flush();
}
private void callShowFunctions() {
final List<String> functions;
try {
functions = executor.listFunctions(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (functions.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
Collections.sort(functions);
functions.forEach((v) -> terminal.writer().println(v));
}
terminal.flush();
}
private void callShowModules() {
final List<String> modules;
try {
modules = executor.listModules(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (modules.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
modules.forEach((v) -> terminal.writer().println(v));
}
terminal.flush();
}
private void callUseCatalog(SqlCommandCall cmdCall) {
try {
executor.useCatalog(sessionId, cmdCall.operands[0]);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
terminal.flush();
}
private void callUseDatabase(SqlCommandCall cmdCall) {
try {
executor.useDatabase(sessionId, cmdCall.operands[0]);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
terminal.flush();
}
private void callDescribe(SqlCommandCall cmdCall) {
final TableSchema schema;
try {
schema = executor.getTableSchema(sessionId, cmdCall.operands[0]);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
terminal.writer().println(schema.toString());
terminal.flush();
}
private void callExplain(SqlCommandCall cmdCall) {
final String explanation;
try {
TableResult tableResult = executor.executeSql(sessionId, cmdCall.operands[0]);
explanation = tableResult.collect().next().getField(0).toString();
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
terminal.writer().println(explanation);
terminal.flush();
}
private void callSelect(SqlCommandCall cmdCall) {
final ResultDescriptor resultDesc;
try {
resultDesc = executor.executeQuery(sessionId, cmdCall.operands[0]);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (resultDesc.isTableauMode()) {
try (CliTableauResultView tableauResultView = new CliTableauResultView(
terminal, executor, sessionId, resultDesc)) {
if (resultDesc.isMaterialized()) {
tableauResultView.displayBatchResults();
} else {
tableauResultView.displayStreamResults();
}
} catch (SqlExecutionException e) {
printExecutionException(e);
}
} else {
final CliResultView view;
if (resultDesc.isMaterialized()) {
view = new CliTableResultView(this, resultDesc);
} else {
view = new CliChangelogResultView(this, resultDesc);
}
try {
view.open();
printInfo(CliStrings.MESSAGE_RESULT_QUIT);
} catch (SqlExecutionException e) {
printExecutionException(e);
}
}
}
private boolean callInsert(SqlCommandCall cmdCall) {
printInfo(CliStrings.MESSAGE_SUBMITTING_STATEMENT);
try {
final ProgramTargetDescriptor programTarget = executor.executeUpdate(sessionId, cmdCall.operands[0]);
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_STATEMENT_SUBMITTED).toAnsi());
terminal.writer().println(programTarget.toString());
terminal.flush();
} catch (SqlExecutionException e) {
printExecutionException(e);
return false;
}
return true;
}
private void callCreateTable(SqlCommandCall cmdCall) {
try {
executor.createTable(sessionId, cmdCall.operands[0]);
printInfo(CliStrings.MESSAGE_TABLE_CREATED);
} catch (SqlExecutionException e) {
printExecutionException(e);
}
}
private void callDropTable(SqlCommandCall cmdCall) {
try {
executor.dropTable(sessionId, cmdCall.operands[0]);
printInfo(CliStrings.MESSAGE_TABLE_REMOVED);
} catch (SqlExecutionException e) {
printExecutionException(e);
}
}
private void callCreateView(SqlCommandCall cmdCall) {
final String name = cmdCall.operands[0];
final String query = cmdCall.operands[1];
final ViewEntry previousView = executor.listViews(sessionId).get(name);
if (previousView != null) {
printExecutionError(CliStrings.MESSAGE_VIEW_ALREADY_EXISTS);
return;
}
try {
executor.addView(sessionId, name, query);
printInfo(CliStrings.MESSAGE_VIEW_CREATED);
} catch (SqlExecutionException e) {
executor.removeView(sessionId, name);
printExecutionException(e);
}
}
private void callDropView(SqlCommandCall cmdCall) {
final String name = cmdCall.operands[0];
final ViewEntry view = executor.listViews(sessionId).get(name);
if (view == null) {
printExecutionError(CliStrings.MESSAGE_VIEW_NOT_FOUND);
return;
}
try {
executor.removeView(sessionId, name);
printInfo(CliStrings.MESSAGE_VIEW_REMOVED);
} catch (SqlExecutionException e) {
executor.addView(sessionId, view.getName(), view.getQuery());
printExecutionException(CliStrings.MESSAGE_VIEW_NOT_REMOVED, e);
}
}
private void callCreateFunction(SqlCommandCall cmdCall) {
try {
executor.executeSql(sessionId, cmdCall.operands[0]);
printInfo(CliStrings.MESSAGE_FUNCTION_CREATED);
} catch (SqlExecutionException e) {
printExecutionException(e);
}
}
private void callDropFunction(SqlCommandCall cmdCall) {
try {
executor.executeSql(sessionId, cmdCall.operands[0]);
printInfo(CliStrings.MESSAGE_FUNCTION_REMOVED);
} catch (SqlExecutionException e) {
printExecutionException(e);
}
}
private void callAlterFunction(SqlCommandCall cmdCall) {
try {
executor.executeSql(sessionId, cmdCall.operands[0]);
printInfo(CliStrings.MESSAGE_ALTER_FUNCTION_SUCCEEDED);
} catch (SqlExecutionException e) {
printExecutionException(CliStrings.MESSAGE_ALTER_FUNCTION_FAILED, e);
}
}
private void callSource(SqlCommandCall cmdCall) {
final String pathString = cmdCall.operands[0];
final String stmt;
try {
final Path path = Paths.get(pathString);
byte[] encoded = Files.readAllBytes(path);
stmt = new String(encoded, Charset.defaultCharset());
} catch (IOException e) {
printExecutionException(e);
return;
}
if (stmt.length() > SOURCE_MAX_SIZE) {
printExecutionError(CliStrings.MESSAGE_MAX_SIZE_EXCEEDED);
return;
}
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_WILL_EXECUTE).toAnsi());
terminal.writer().println(new AttributedString(stmt).toString());
terminal.flush();
final Optional<SqlCommandCall> call = parseCommand(stmt);
call.ifPresent(this::callCommand);
}
private void callCreateDatabase(SqlCommandCall cmdCall) {
final String createDatabaseStmt = cmdCall.operands[0];
try {
executor.executeUpdate(sessionId, createDatabaseStmt);
printInfo(CliStrings.MESSAGE_DATABASE_CREATED);
} catch (SqlExecutionException e) {
printExecutionException(e);
}
}
private void callDropDatabase(SqlCommandCall cmdCall) {
final String dropDatabaseStmt = cmdCall.operands[0];
try {
executor.executeUpdate(sessionId, dropDatabaseStmt);
printInfo(CliStrings.MESSAGE_DATABASE_REMOVED);
} catch (SqlExecutionException e) {
printExecutionException(e);
}
}
private void callAlterDatabase(SqlCommandCall cmdCall) {
final String alterDatabaseStmt = cmdCall.operands[0];
try {
executor.executeUpdate(sessionId, alterDatabaseStmt);
printInfo(CliStrings.MESSAGE_DATABASE_ALTER_SUCCEEDED);
} catch (SqlExecutionException e) {
printExecutionException(CliStrings.MESSAGE_DATABASE_ALTER_FAILED, e);
}
}
private void callAlterTable(SqlCommandCall cmdCall) {
final String alterTableStmt = cmdCall.operands[0];
try {
executor.executeUpdate(sessionId, alterTableStmt);
printInfo(CliStrings.MESSAGE_ALTER_TABLE_SUCCEEDED);
} catch (SqlExecutionException e) {
printExecutionException(CliStrings.MESSAGE_ALTER_TABLE_FAILED, e);
}
}
private void printExecutionException(Throwable t) {
printExecutionException(null, t);
}
private void printExecutionException(String message, Throwable t) {
final String finalMessage;
if (message == null) {
finalMessage = CliStrings.MESSAGE_SQL_EXECUTION_ERROR;
} else {
finalMessage = CliStrings.MESSAGE_SQL_EXECUTION_ERROR + ' ' + message;
}
printException(finalMessage, t);
}
private void printExecutionError(String message) {
terminal.writer().println(CliStrings.messageError(CliStrings.MESSAGE_SQL_EXECUTION_ERROR, message).toAnsi());
terminal.flush();
}
private void printException(String message, Throwable t) {
LOG.warn(message, t);
terminal.writer().println(CliStrings.messageError(message, t).toAnsi());
terminal.flush();
}
private void printError(String message) {
terminal.writer().println(CliStrings.messageError(message).toAnsi());
terminal.flush();
}
private void printInfo(String message) {
terminal.writer().println(CliStrings.messageInfo(message).toAnsi());
terminal.flush();
}
private static Terminal createDefaultTerminal() {
try {
return TerminalBuilder.builder()
.name(CliStrings.CLI_NAME)
.build();
} catch (IOException e) {
throw new SqlClientException("Error opening command line interface.", e);
}
}
} | class CliClient {
private static final Logger LOG = LoggerFactory.getLogger(CliClient.class);
private final Executor executor;
private final String sessionId;
private final Terminal terminal;
private final LineReader lineReader;
private final String prompt;
private boolean isRunning;
private static final int PLAIN_TERMINAL_WIDTH = 80;
private static final int PLAIN_TERMINAL_HEIGHT = 30;
private static final int SOURCE_MAX_SIZE = 50_000;
/**
* Creates a CLI instance with a custom terminal. Make sure to close the CLI instance
* afterwards using {@link
*/
@VisibleForTesting
public CliClient(Terminal terminal, String sessionId, Executor executor, Path historyFilePath) {
this.terminal = terminal;
this.sessionId = sessionId;
this.executor = executor;
terminal.writer().println();
terminal.writer().flush();
lineReader = LineReaderBuilder.builder()
.terminal(terminal)
.appName(CliStrings.CLI_NAME)
.parser(new SqlMultiLineParser())
.completer(new SqlCompleter(sessionId, executor))
.build();
lineReader.option(LineReader.Option.DISABLE_EVENT_EXPANSION, true);
lineReader.setVariable(LineReader.ERRORS, 1);
lineReader.option(LineReader.Option.CASE_INSENSITIVE, true);
if (Files.exists(historyFilePath) || CliUtils.createFile(historyFilePath)) {
String msg = "Command history file path: " + historyFilePath;
System.out.println(msg);
LOG.info(msg);
lineReader.setVariable(LineReader.HISTORY_FILE, historyFilePath);
} else {
String msg = "Unable to create history file: " + historyFilePath;
System.out.println(msg);
LOG.warn(msg);
}
prompt = new AttributedStringBuilder()
.style(AttributedStyle.DEFAULT.foreground(AttributedStyle.GREEN))
.append("Flink SQL")
.style(AttributedStyle.DEFAULT)
.append("> ")
.toAnsi();
}
/**
* Creates a CLI instance with a prepared terminal. Make sure to close the CLI instance
* afterwards using {@link
*/
public CliClient(String sessionId, Executor executor, Path historyFilePath) {
this(createDefaultTerminal(), sessionId, executor, historyFilePath);
}
public Terminal getTerminal() {
return terminal;
}
public String getSessionId() {
return this.sessionId;
}
public void clearTerminal() {
if (isPlainTerminal()) {
for (int i = 0; i < 200; i++) {
terminal.writer().println();
}
} else {
terminal.puts(InfoCmp.Capability.clear_screen);
}
}
public boolean isPlainTerminal() {
return terminal.getWidth() == 0 && terminal.getHeight() == 0;
}
public int getWidth() {
if (isPlainTerminal()) {
return PLAIN_TERMINAL_WIDTH;
}
return terminal.getWidth();
}
public int getHeight() {
if (isPlainTerminal()) {
return PLAIN_TERMINAL_HEIGHT;
}
return terminal.getHeight();
}
public Executor getExecutor() {
return executor;
}
/**
* Opens the interactive CLI shell.
*/
public void open() {
isRunning = true;
terminal.writer().append(CliStrings.MESSAGE_WELCOME);
while (isRunning) {
terminal.writer().append("\n");
terminal.flush();
final String line;
try {
line = lineReader.readLine(prompt, null, (MaskingCallback) null, null);
} catch (UserInterruptException e) {
continue;
} catch (EndOfFileException | IOError e) {
break;
} catch (Throwable t) {
throw new SqlClientException("Could not read from command line.", t);
}
if (line == null) {
continue;
}
final Optional<SqlCommandCall> cmdCall = parseCommand(line);
cmdCall.ifPresent(this::callCommand);
}
}
/**
* Closes the CLI instance.
*/
public void close() {
try {
terminal.close();
} catch (IOException e) {
throw new SqlClientException("Unable to close terminal.", e);
}
}
/**
* Submits a SQL update statement and prints status information and/or errors on the terminal.
*
* @param statement SQL update statement
* @return flag to indicate if the submission was successful or not
*/
public boolean submitUpdate(String statement) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_WILL_EXECUTE).toAnsi());
terminal.writer().println(new AttributedString(statement).toString());
terminal.flush();
final Optional<SqlCommandCall> parsedStatement = parseCommand(statement);
return parsedStatement.map(cmdCall -> {
switch (cmdCall.command) {
case INSERT_INTO:
case INSERT_OVERWRITE:
return callInsert(cmdCall);
default:
printError(CliStrings.MESSAGE_UNSUPPORTED_SQL);
return false;
}
}).orElse(false);
}
private void callCommand(SqlCommandCall cmdCall) {
switch (cmdCall.command) {
case QUIT:
callQuit();
break;
case CLEAR:
callClear();
break;
case RESET:
callReset();
break;
case SET:
callSet(cmdCall);
break;
case HELP:
callHelp();
break;
case SHOW_CATALOGS:
callShowCatalogs();
break;
case SHOW_DATABASES:
callShowDatabases();
break;
case SHOW_TABLES:
callShowTables();
break;
case SHOW_FUNCTIONS:
callShowFunctions();
break;
case SHOW_MODULES:
callShowModules();
break;
case USE_CATALOG:
callUseCatalog(cmdCall);
break;
case USE:
callUseDatabase(cmdCall);
break;
case DESC:
case DESCRIBE:
callDescribe(cmdCall);
break;
case EXPLAIN:
callExplain(cmdCall);
break;
case SELECT:
callSelect(cmdCall);
break;
case INSERT_INTO:
case INSERT_OVERWRITE:
callInsert(cmdCall);
break;
case CREATE_TABLE:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_TABLE_CREATED);
break;
case DROP_TABLE:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_TABLE_REMOVED);
break;
case CREATE_VIEW:
callCreateView(cmdCall);
break;
case DROP_VIEW:
callDropView(cmdCall);
break;
case CREATE_FUNCTION:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_FUNCTION_CREATED);
break;
case DROP_FUNCTION:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_FUNCTION_REMOVED);
break;
case ALTER_FUNCTION:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_ALTER_FUNCTION_SUCCEEDED,
CliStrings.MESSAGE_ALTER_FUNCTION_FAILED);
break;
case SOURCE:
callSource(cmdCall);
break;
case CREATE_DATABASE:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_DATABASE_CREATED);
break;
case DROP_DATABASE:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_DATABASE_REMOVED);
break;
case ALTER_DATABASE:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_ALTER_DATABASE_SUCCEEDED,
CliStrings.MESSAGE_ALTER_DATABASE_FAILED);
break;
case ALTER_TABLE:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_ALTER_TABLE_SUCCEEDED,
CliStrings.MESSAGE_ALTER_TABLE_FAILED);
break;
case CREATE_CATALOG:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_CATALOG_CREATED);
break;
case DROP_CATALOG:
callDdl(cmdCall.operands[0], CliStrings.MESSAGE_CATALOG_REMOVED);
break;
default:
throw new SqlClientException("Unsupported command: " + cmdCall.command);
}
}
private void callQuit() {
printInfo(CliStrings.MESSAGE_QUIT);
isRunning = false;
}
private void callClear() {
clearTerminal();
}
private void callReset() {
executor.resetSessionProperties(sessionId);
printInfo(CliStrings.MESSAGE_RESET);
}
private void callSet(SqlCommandCall cmdCall) {
if (cmdCall.operands.length == 0) {
final Map<String, String> properties;
try {
properties = executor.getSessionProperties(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (properties.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
properties
.entrySet()
.stream()
.map((e) -> e.getKey() + "=" + e.getValue())
.sorted()
.forEach((p) -> terminal.writer().println(p));
}
}
else {
executor.setSessionProperty(sessionId, cmdCall.operands[0], cmdCall.operands[1].trim());
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_SET).toAnsi());
}
terminal.flush();
}
private void callHelp() {
terminal.writer().println(CliStrings.MESSAGE_HELP);
terminal.flush();
}
private void callShowCatalogs() {
final List<String> catalogs;
try {
catalogs = executor.listCatalogs(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (catalogs.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
catalogs.forEach((v) -> terminal.writer().println(v));
}
terminal.flush();
}
private void callShowDatabases() {
final List<String> dbs;
try {
dbs = executor.listDatabases(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (dbs.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
dbs.forEach((v) -> terminal.writer().println(v));
}
terminal.flush();
}
private void callShowTables() {
final List<String> tables;
try {
tables = executor.listTables(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (tables.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
tables.forEach((v) -> terminal.writer().println(v));
}
terminal.flush();
}
private void callShowFunctions() {
final List<String> functions;
try {
functions = executor.listFunctions(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (functions.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
Collections.sort(functions);
functions.forEach((v) -> terminal.writer().println(v));
}
terminal.flush();
}
private void callShowModules() {
final List<String> modules;
try {
modules = executor.listModules(sessionId);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (modules.isEmpty()) {
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_EMPTY).toAnsi());
} else {
modules.forEach((v) -> terminal.writer().println(v));
}
terminal.flush();
}
private void callUseCatalog(SqlCommandCall cmdCall) {
try {
executor.useCatalog(sessionId, cmdCall.operands[0]);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
terminal.flush();
}
private void callUseDatabase(SqlCommandCall cmdCall) {
try {
executor.useDatabase(sessionId, cmdCall.operands[0]);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
terminal.flush();
}
private void callDescribe(SqlCommandCall cmdCall) {
final TableSchema schema;
try {
schema = executor.getTableSchema(sessionId, cmdCall.operands[0]);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
terminal.writer().println(schema.toString());
terminal.flush();
}
private void callExplain(SqlCommandCall cmdCall) {
final String explanation;
try {
TableResult tableResult = executor.executeSql(sessionId, cmdCall.operands[0]);
explanation = tableResult.collect().next().getField(0).toString();
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
terminal.writer().println(explanation);
terminal.flush();
}
private void callSelect(SqlCommandCall cmdCall) {
final ResultDescriptor resultDesc;
try {
resultDesc = executor.executeQuery(sessionId, cmdCall.operands[0]);
} catch (SqlExecutionException e) {
printExecutionException(e);
return;
}
if (resultDesc.isTableauMode()) {
try (CliTableauResultView tableauResultView = new CliTableauResultView(
terminal, executor, sessionId, resultDesc)) {
if (resultDesc.isMaterialized()) {
tableauResultView.displayBatchResults();
} else {
tableauResultView.displayStreamResults();
}
} catch (SqlExecutionException e) {
printExecutionException(e);
}
} else {
final CliResultView view;
if (resultDesc.isMaterialized()) {
view = new CliTableResultView(this, resultDesc);
} else {
view = new CliChangelogResultView(this, resultDesc);
}
try {
view.open();
printInfo(CliStrings.MESSAGE_RESULT_QUIT);
} catch (SqlExecutionException e) {
printExecutionException(e);
}
}
}
private boolean callInsert(SqlCommandCall cmdCall) {
printInfo(CliStrings.MESSAGE_SUBMITTING_STATEMENT);
try {
final ProgramTargetDescriptor programTarget = executor.executeUpdate(sessionId, cmdCall.operands[0]);
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_STATEMENT_SUBMITTED).toAnsi());
terminal.writer().println(programTarget.toString());
terminal.flush();
} catch (SqlExecutionException e) {
printExecutionException(e);
return false;
}
return true;
}
private void callCreateView(SqlCommandCall cmdCall) {
final String name = cmdCall.operands[0];
final String query = cmdCall.operands[1];
final ViewEntry previousView = executor.listViews(sessionId).get(name);
if (previousView != null) {
printExecutionError(CliStrings.MESSAGE_VIEW_ALREADY_EXISTS);
return;
}
try {
executor.addView(sessionId, name, query);
printInfo(CliStrings.MESSAGE_VIEW_CREATED);
} catch (SqlExecutionException e) {
executor.removeView(sessionId, name);
printExecutionException(e);
}
}
private void callDropView(SqlCommandCall cmdCall) {
final String name = cmdCall.operands[0];
final ViewEntry view = executor.listViews(sessionId).get(name);
if (view == null) {
printExecutionError(CliStrings.MESSAGE_VIEW_NOT_FOUND);
return;
}
try {
executor.removeView(sessionId, name);
printInfo(CliStrings.MESSAGE_VIEW_REMOVED);
} catch (SqlExecutionException e) {
executor.addView(sessionId, view.getName(), view.getQuery());
printExecutionException(CliStrings.MESSAGE_VIEW_NOT_REMOVED, e);
}
}
private void callSource(SqlCommandCall cmdCall) {
final String pathString = cmdCall.operands[0];
final String stmt;
try {
final Path path = Paths.get(pathString);
byte[] encoded = Files.readAllBytes(path);
stmt = new String(encoded, Charset.defaultCharset());
} catch (IOException e) {
printExecutionException(e);
return;
}
if (stmt.length() > SOURCE_MAX_SIZE) {
printExecutionError(CliStrings.MESSAGE_MAX_SIZE_EXCEEDED);
return;
}
terminal.writer().println(CliStrings.messageInfo(CliStrings.MESSAGE_WILL_EXECUTE).toAnsi());
terminal.writer().println(new AttributedString(stmt).toString());
terminal.flush();
final Optional<SqlCommandCall> call = parseCommand(stmt);
call.ifPresent(this::callCommand);
}
private void callDdl(String ddl, String successMessage) {
callDdl(ddl, successMessage, null);
}
private void callDdl(String ddl, String successMessage, String errorMessage) {
try {
executor.executeSql(sessionId, ddl);
printInfo(successMessage);
} catch (SqlExecutionException e) {
printExecutionException(errorMessage, e);
}
}
private void printExecutionException(Throwable t) {
printExecutionException(null, t);
}
private void printExecutionException(String message, Throwable t) {
final String finalMessage;
if (message == null) {
finalMessage = CliStrings.MESSAGE_SQL_EXECUTION_ERROR;
} else {
finalMessage = CliStrings.MESSAGE_SQL_EXECUTION_ERROR + ' ' + message;
}
printException(finalMessage, t);
}
private void printExecutionError(String message) {
terminal.writer().println(CliStrings.messageError(CliStrings.MESSAGE_SQL_EXECUTION_ERROR, message).toAnsi());
terminal.flush();
}
private void printException(String message, Throwable t) {
LOG.warn(message, t);
terminal.writer().println(CliStrings.messageError(message, t).toAnsi());
terminal.flush();
}
private void printError(String message) {
terminal.writer().println(CliStrings.messageError(message).toAnsi());
terminal.flush();
}
private void printInfo(String message) {
terminal.writer().println(CliStrings.messageInfo(message).toAnsi());
terminal.flush();
}
private static Terminal createDefaultTerminal() {
try {
return TerminalBuilder.builder()
.name(CliStrings.CLI_NAME)
.build();
} catch (IOException e) {
throw new SqlClientException("Error opening command line interface.", e);
}
}
} |
Modified it as `'Clazz' is abstract, and cannot be instantiated` per discussion. | private List<Executable> getExecutables(Class<?> clazz, String methodName, JMethodKind kind) {
if (kind == JMethodKind.CONSTRUCTOR) {
if (Modifier.isAbstract(clazz.getModifiers())) {
throw new JInteropException(DiagnosticErrorCode.INSTANTIATION_ERROR,
"'" + clazz.getName() + "' is abstract; cannot be instantiated");
}
return Arrays.asList(getConstructors(clazz));
} else {
List<Executable> list = new ArrayList<>();
for (Method method : getMethods(clazz)) {
if (method.getName().equals(methodName)) {
list.add(method);
}
}
return list;
}
} | throw new JInteropException(DiagnosticErrorCode.INSTANTIATION_ERROR, | private List<Executable> getExecutables(Class<?> clazz, String methodName, JMethodKind kind) {
if (kind == JMethodKind.CONSTRUCTOR) {
if (Modifier.isAbstract(clazz.getModifiers())) {
throw new JInteropException(DiagnosticErrorCode.INSTANTIATION_ERROR,
"'" + clazz.getName() + "' is abstract, and cannot be instantiated");
}
return Arrays.asList(getConstructors(clazz));
} else {
List<Executable> list = new ArrayList<>();
for (Method method : getMethods(clazz)) {
if (method.getName().equals(methodName)) {
list.add(method);
}
}
return list;
}
} | class '" + jMethodRequest.declaringClass + "'");
}
} else {
return resolvedJMethods.get(0);
} | class '" + jMethodRequest.declaringClass + "'");
}
} else {
return resolvedJMethods.get(0);
} |
does it mean we no longer set error if all retries has failed? | private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) {
final String partitionKey = options.getPartitionKey();
final String partitionId = options.getPartitionId();
if (!CoreUtils.isNullOrEmpty(partitionKey)
&& !CoreUtils.isNullOrEmpty(partitionId)) {
return monoError(logger, new IllegalArgumentException(String.format(Locale.US,
"SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the"
+ " other can be used. partitionKey: '%s'. partitionId: '%s'",
partitionKey, partitionId)));
}
return getSendLink(options.getPartitionId())
.flatMap(link -> link.getLinkSize()
.flatMap(size -> {
final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES;
final CreateBatchOptions batchOptions = new CreateBatchOptions()
.setPartitionKey(options.getPartitionKey())
.setPartitionId(options.getPartitionId())
.setMaximumSizeInBytes(batchSize);
return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext,
tracerProvider));
})
.flatMap(list -> sendInternal(Flux.fromIterable(list))));
} | final String partitionKey = options.getPartitionKey(); | private Mono<Void> sendInternal(Flux<EventData> events, SendOptions options) {
final String partitionKey = options.getPartitionKey();
final String partitionId = options.getPartitionId();
if (!CoreUtils.isNullOrEmpty(partitionKey)
&& !CoreUtils.isNullOrEmpty(partitionId)) {
return monoError(logger, new IllegalArgumentException(String.format(Locale.US,
"SendOptions.getPartitionKey() and SendOptions.getPartitionId() are both set. Only one or the"
+ " other can be used. partitionKey: '%s'. partitionId: '%s'",
partitionKey, partitionId)));
}
return getSendLink(options.getPartitionId())
.flatMap(link -> link.getLinkSize()
.flatMap(size -> {
final int batchSize = size > 0 ? size : MAX_MESSAGE_LENGTH_BYTES;
final CreateBatchOptions batchOptions = new CreateBatchOptions()
.setPartitionKey(options.getPartitionKey())
.setPartitionId(options.getPartitionId())
.setMaximumSizeInBytes(batchSize);
return events.collect(new EventDataCollector(batchOptions, 1, link::getErrorContext,
tracerProvider));
})
.flatMap(list -> sendInternal(Flux.fromIterable(list))));
} | class EventHubProducerAsyncClient implements Closeable {
private static final int MAX_PARTITION_KEY_LENGTH = 128;
private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s";
private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions();
private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions();
private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class);
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final String fullyQualifiedNamespace;
private final String eventHubName;
private final EventHubConnectionProcessor connectionProcessor;
private final AmqpRetryOptions retryOptions;
private final AmqpRetryPolicy retryPolicy;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final boolean isSharedConnection;
/**
* Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition
* when {@link CreateBatchOptions
* load balance the messages amongst available partitions.
*/
EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName,
EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider,
MessageSerializer messageSerializer, boolean isSharedConnection) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor,
"'connectionProcessor' cannot be null.");
this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.retryPolicy = getRetryPolicy(retryOptions);
this.isSharedConnection = isSharedConnection;
}
/**
* Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Event Hubs namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Event Hub name this client interacts with.
*
* @return The Event Hub name this client interacts with.
*/
public String getEventHubName() {
return eventHubName;
}
/**
* Retrieves information about an Event Hub, including the number of partitions present and their identifiers.
*
* @return The set of information for the Event Hub that this client is associated with.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EventHubProperties> getEventHubProperties() {
return connectionProcessor.flatMap(connection -> connection.getManagementNode())
.flatMap(EventHubManagementNode::getEventHubProperties);
}
/**
* Retrieves the identifiers for the partitions of an Event Hub.
*
* @return A Flux of identifiers for the partitions of an Event Hub.
*/
public Flux<String> getPartitionIds() {
return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds()));
}
/**
* Retrieves information about a specific partition for an Event Hub, including elements that describe the available
* events in the partition event stream.
*
* @param partitionId The unique identifier of a partition associated with the Event Hub.
* @return The set of information for the requested partition under the Event Hub this client is associated with.
* @throws NullPointerException if {@code partitionId} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PartitionProperties> getPartitionProperties(String partitionId) {
return connectionProcessor.flatMap(connection -> connection.getManagementNode())
.flatMap(node -> node.getPartitionProperties(partitionId));
}
/**
* Creates an {@link EventDataBatch} that can fit as many events as the transport allows.
*
* @return A new {@link EventDataBatch} that can fit as many events as the transport allows.
*/
public Mono<EventDataBatch> createBatch() {
return createBatch(DEFAULT_BATCH_OPTIONS);
}
/**
* Creates an {@link EventDataBatch} configured with the options specified.
*
* @param options A set of options used to configure the {@link EventDataBatch}.
* @return A new {@link EventDataBatch} that can fit as many events as the transport allows.
* @throws NullPointerException if {@code options} is null.
*/
public Mono<EventDataBatch> createBatch(CreateBatchOptions options) {
if (options == null) {
return monoError(logger, new NullPointerException("'options' cannot be null."));
}
final String partitionKey = options.getPartitionKey();
final String partitionId = options.getPartitionId();
final int batchMaxSize = options.getMaximumSizeInBytes();
if (!CoreUtils.isNullOrEmpty(partitionKey)
&& !CoreUtils.isNullOrEmpty(partitionId)) {
return monoError(logger, new IllegalArgumentException(String.format(Locale.US,
"CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. "
+ "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'",
partitionKey, partitionId)));
} else if (!CoreUtils.isNullOrEmpty(partitionKey)
&& partitionKey.length() > MAX_PARTITION_KEY_LENGTH) {
return monoError(logger, new IllegalArgumentException(String.format(Locale.US,
"Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey,
MAX_PARTITION_KEY_LENGTH)));
}
return getSendLink(partitionId)
.flatMap(link -> link.getLinkSize()
.flatMap(size -> {
final int maximumLinkSize = size > 0
? size
: MAX_MESSAGE_LENGTH_BYTES;
if (batchMaxSize > maximumLinkSize) {
return monoError(logger,
new IllegalArgumentException(String.format(Locale.US,
"BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).",
batchMaxSize, maximumLinkSize)));
}
final int batchSize = batchMaxSize > 0
? batchMaxSize
: maximumLinkSize;
return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext,
tracerProvider));
}));
}
/**
* Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size
* allowed, an exception will be triggered and the send will fail.
*
* <p>
* For more information regarding the maximum event size allowed, see
* <a href="https:
* Limits</a>.
* </p>
*
* @param event Event to send to the service.
* @return A {@link Mono} that completes when the event is pushed to the service.
*/
Mono<Void> send(EventData event) {
if (event == null) {
return monoError(logger, new NullPointerException("'event' cannot be null."));
}
return send(Flux.just(event));
}
/**
* Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds
* the maximum size allowed, an exception will be triggered and the send will fail.
*
* <p>
* For more information regarding the maximum event size allowed, see
* <a href="https:
* Limits</a>.
* </p>
*
* @param event Event to send to the service.
* @param options The set of options to consider when sending this event.
* @return A {@link Mono} that completes when the event is pushed to the service.
*/
Mono<Void> send(EventData event, SendOptions options) {
if (event == null) {
return monoError(logger, new NullPointerException("'event' cannot be null."));
} else if (options == null) {
return monoError(logger, new NullPointerException("'options' cannot be null."));
}
return send(Flux.just(event), options);
}
/**
* Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the
* maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message
* size is the max amount allowed on the link.
*
* @param events Events to send to the service.
* @return A {@link Mono} that completes when all events are pushed to the service.
*/
Mono<Void> send(Iterable<EventData> events) {
if (events == null) {
return monoError(logger, new NullPointerException("'events' cannot be null."));
}
return send(Flux.fromIterable(events));
}
/**
* Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the
* maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message
* size is the max amount allowed on the link.
*
* @param events Events to send to the service.
* @param options The set of options to consider when sending this batch.
* @return A {@link Mono} that completes when all events are pushed to the service.
*/
Mono<Void> send(Iterable<EventData> events, SendOptions options) {
if (events == null) {
return monoError(logger, new NullPointerException("'events' cannot be null."));
} else if (options == null) {
return monoError(logger, new NullPointerException("'options' cannot be null."));
}
return send(Flux.fromIterable(events), options);
}
/**
* Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the
* maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message
* size is the max amount allowed on the link.
*
* @param events Events to send to the service.
* @return A {@link Mono} that completes when all events are pushed to the service.
*/
Mono<Void> send(Flux<EventData> events) {
if (events == null) {
return monoError(logger, new NullPointerException("'events' cannot be null."));
}
return send(events, DEFAULT_SEND_OPTIONS);
}
/**
* Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the
* maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message
* size is the max amount allowed on the link.
*
* @param events Events to send to the service.
* @param options The set of options to consider when sending this batch.
* @return A {@link Mono} that completes when all events are pushed to the service.
*/
Mono<Void> send(Flux<EventData> events, SendOptions options) {
if (events == null) {
return monoError(logger, new NullPointerException("'events' cannot be null."));
} else if (options == null) {
return monoError(logger, new NullPointerException("'options' cannot be null."));
}
return sendInternal(events, options);
}
/**
* Sends the batch to the associated Event Hub.
*
* @param batch The batch to send to the service.
* @return A {@link Mono} that completes when the batch is pushed to the service.
* @throws NullPointerException if {@code batch} is {@code null}.
* @see EventHubProducerAsyncClient
* @see EventHubProducerAsyncClient
*/
public Mono<Void> send(EventDataBatch batch) {
if (batch == null) {
return monoError(logger, new NullPointerException("'batch' cannot be null."));
} else if (batch.getEvents().isEmpty()) {
logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY);
return Mono.empty();
}
if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) {
logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId());
} else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) {
logger.verbose("Sending batch with size[{}] with partitionKey[{}].",
batch.getCount(), batch.getPartitionKey());
} else {
logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount());
}
final String partitionKey = batch.getPartitionKey();
final boolean isTracingEnabled = tracerProvider.isEnabled();
final AtomicReference<Context> parentContext = isTracingEnabled
? new AtomicReference<>(Context.NONE)
: null;
Context sharedContext = null;
final List<Message> messages = new ArrayList<>();
for (int i = 0; i < batch.getEvents().size(); i++) {
final EventData event = batch.getEvents().get(i);
if (isTracingEnabled) {
parentContext.set(event.getContext());
if (i == 0) {
sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get());
}
tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext()));
}
final Message message = messageSerializer.serialize(event);
if (!CoreUtils.isNullOrEmpty(partitionKey)) {
final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null
? new MessageAnnotations(new HashMap<>())
: message.getMessageAnnotations();
messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey);
message.setMessageAnnotations(messageAnnotations);
}
messages.add(message);
}
if (isTracingEnabled) {
final Context finalSharedContext = sharedContext == null
? Context.NONE
: sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace);
parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND));
}
return withRetry(getSendLink(batch.getPartitionId())
.flatMap(link ->
messages.size() == 1
? link.send(messages.get(0))
: link.send(messages)), retryOptions.getTryTimeout(), retryPolicy)
.doOnEach(signal -> {
if (isTracingEnabled) {
tracerProvider.endSpan(parentContext.get(), signal);
}
});
}
private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) {
return eventBatches
.flatMap(this::send)
.then()
.doOnError(error -> {
logger.error(Messages.ERROR_SENDING_BATCH, error);
});
}
private String getEntityPath(String partitionId) {
return CoreUtils.isNullOrEmpty(partitionId)
? eventHubName
: String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId);
}
private Mono<AmqpSendLink> getSendLink(String partitionId) {
final String entityPath = getEntityPath(partitionId);
final String linkName = getEntityPath(partitionId);
return connectionProcessor
.flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions));
}
/**
* Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying
* connection is also closed.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
if (!isSharedConnection) {
connectionProcessor.dispose();
}
}
/**
* Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then
* it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code
* maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link
* AmqpErrorCondition
*/
private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>,
List<EventDataBatch>> {
private final String partitionKey;
private final String partitionId;
private final int maxMessageSize;
private final Integer maxNumberOfBatches;
private final ErrorContextProvider contextProvider;
private final TracerProvider tracerProvider;
private volatile EventDataBatch currentBatch;
EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider,
TracerProvider tracerProvider) {
this.maxNumberOfBatches = maxNumberOfBatches;
this.maxMessageSize = options.getMaximumSizeInBytes() > 0
? options.getMaximumSizeInBytes()
: MAX_MESSAGE_LENGTH_BYTES;
this.partitionKey = options.getPartitionKey();
this.partitionId = options.getPartitionId();
this.contextProvider = contextProvider;
this.tracerProvider = tracerProvider;
currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider,
tracerProvider);
}
@Override
public Supplier<List<EventDataBatch>> supplier() {
return ArrayList::new;
}
@Override
public BiConsumer<List<EventDataBatch>, EventData> accumulator() {
return (list, event) -> {
EventDataBatch batch = currentBatch;
if (batch.tryAdd(event)) {
return;
}
if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) {
final String message = String.format(Locale.US,
Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches);
throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message,
contextProvider.getErrorContext());
}
currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider,
tracerProvider);
currentBatch.tryAdd(event);
list.add(batch);
};
}
@Override
public BinaryOperator<List<EventDataBatch>> combiner() {
return (existing, another) -> {
existing.addAll(another);
return existing;
};
}
@Override
public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() {
return list -> {
EventDataBatch batch = currentBatch;
currentBatch = null;
if (batch != null) {
list.add(batch);
}
return list;
};
}
@Override
public Set<Characteristics> characteristics() {
return Collections.emptySet();
}
}
} | class EventHubProducerAsyncClient implements Closeable {
private static final int MAX_PARTITION_KEY_LENGTH = 128;
private static final String SENDER_ENTITY_PATH_FORMAT = "%s/Partitions/%s";
private static final SendOptions DEFAULT_SEND_OPTIONS = new SendOptions();
private static final CreateBatchOptions DEFAULT_BATCH_OPTIONS = new CreateBatchOptions();
private final ClientLogger logger = new ClientLogger(EventHubProducerAsyncClient.class);
private final AtomicBoolean isDisposed = new AtomicBoolean();
private final String fullyQualifiedNamespace;
private final String eventHubName;
private final EventHubConnectionProcessor connectionProcessor;
private final AmqpRetryOptions retryOptions;
private final AmqpRetryPolicy retryPolicy;
private final TracerProvider tracerProvider;
private final MessageSerializer messageSerializer;
private final boolean isSharedConnection;
/**
* Creates a new instance of this {@link EventHubProducerAsyncClient} that can send messages to a single partition
* when {@link CreateBatchOptions
* load balance the messages amongst available partitions.
*/
EventHubProducerAsyncClient(String fullyQualifiedNamespace, String eventHubName,
EventHubConnectionProcessor connectionProcessor, AmqpRetryOptions retryOptions, TracerProvider tracerProvider,
MessageSerializer messageSerializer, boolean isSharedConnection) {
this.fullyQualifiedNamespace = Objects.requireNonNull(fullyQualifiedNamespace,
"'fullyQualifiedNamespace' cannot be null.");
this.eventHubName = Objects.requireNonNull(eventHubName, "'eventHubName' cannot be null.");
this.connectionProcessor = Objects.requireNonNull(connectionProcessor,
"'connectionProcessor' cannot be null.");
this.retryOptions = Objects.requireNonNull(retryOptions, "'retryOptions' cannot be null.");
this.tracerProvider = Objects.requireNonNull(tracerProvider, "'tracerProvider' cannot be null.");
this.messageSerializer = Objects.requireNonNull(messageSerializer, "'messageSerializer' cannot be null.");
this.retryPolicy = getRetryPolicy(retryOptions);
this.isSharedConnection = isSharedConnection;
}
/**
* Gets the fully qualified Event Hubs namespace that the connection is associated with. This is likely similar to
* {@code {yournamespace}.servicebus.windows.net}.
*
* @return The fully qualified Event Hubs namespace that the connection is associated with.
*/
public String getFullyQualifiedNamespace() {
return fullyQualifiedNamespace;
}
/**
* Gets the Event Hub name this client interacts with.
*
* @return The Event Hub name this client interacts with.
*/
public String getEventHubName() {
return eventHubName;
}
/**
* Retrieves information about an Event Hub, including the number of partitions present and their identifiers.
*
* @return The set of information for the Event Hub that this client is associated with.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<EventHubProperties> getEventHubProperties() {
return connectionProcessor.flatMap(connection -> connection.getManagementNode())
.flatMap(EventHubManagementNode::getEventHubProperties);
}
/**
* Retrieves the identifiers for the partitions of an Event Hub.
*
* @return A Flux of identifiers for the partitions of an Event Hub.
*/
public Flux<String> getPartitionIds() {
return getEventHubProperties().flatMapMany(properties -> Flux.fromIterable(properties.getPartitionIds()));
}
/**
* Retrieves information about a specific partition for an Event Hub, including elements that describe the available
* events in the partition event stream.
*
* @param partitionId The unique identifier of a partition associated with the Event Hub.
* @return The set of information for the requested partition under the Event Hub this client is associated with.
* @throws NullPointerException if {@code partitionId} is null.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<PartitionProperties> getPartitionProperties(String partitionId) {
return connectionProcessor.flatMap(connection -> connection.getManagementNode())
.flatMap(node -> node.getPartitionProperties(partitionId));
}
/**
* Creates an {@link EventDataBatch} that can fit as many events as the transport allows.
*
* @return A new {@link EventDataBatch} that can fit as many events as the transport allows.
*/
public Mono<EventDataBatch> createBatch() {
return createBatch(DEFAULT_BATCH_OPTIONS);
}
/**
* Creates an {@link EventDataBatch} configured with the options specified.
*
* @param options A set of options used to configure the {@link EventDataBatch}.
* @return A new {@link EventDataBatch} that can fit as many events as the transport allows.
* @throws NullPointerException if {@code options} is null.
*/
public Mono<EventDataBatch> createBatch(CreateBatchOptions options) {
if (options == null) {
return monoError(logger, new NullPointerException("'options' cannot be null."));
}
final String partitionKey = options.getPartitionKey();
final String partitionId = options.getPartitionId();
final int batchMaxSize = options.getMaximumSizeInBytes();
if (!CoreUtils.isNullOrEmpty(partitionKey)
&& !CoreUtils.isNullOrEmpty(partitionId)) {
return monoError(logger, new IllegalArgumentException(String.format(Locale.US,
"CreateBatchOptions.getPartitionKey() and CreateBatchOptions.getPartitionId() are both set. "
+ "Only one or the other can be used. partitionKey: '%s'. partitionId: '%s'",
partitionKey, partitionId)));
} else if (!CoreUtils.isNullOrEmpty(partitionKey)
&& partitionKey.length() > MAX_PARTITION_KEY_LENGTH) {
return monoError(logger, new IllegalArgumentException(String.format(Locale.US,
"Partition key '%s' exceeds the maximum allowed length: '%s'.", partitionKey,
MAX_PARTITION_KEY_LENGTH)));
}
return getSendLink(partitionId)
.flatMap(link -> link.getLinkSize()
.flatMap(size -> {
final int maximumLinkSize = size > 0
? size
: MAX_MESSAGE_LENGTH_BYTES;
if (batchMaxSize > maximumLinkSize) {
return monoError(logger,
new IllegalArgumentException(String.format(Locale.US,
"BatchOptions.maximumSizeInBytes (%s bytes) is larger than the link size (%s bytes).",
batchMaxSize, maximumLinkSize)));
}
final int batchSize = batchMaxSize > 0
? batchMaxSize
: maximumLinkSize;
return Mono.just(new EventDataBatch(batchSize, partitionId, partitionKey, link::getErrorContext,
tracerProvider));
}));
}
/**
* Sends a single event to the associated Event Hub. If the size of the single event exceeds the maximum size
* allowed, an exception will be triggered and the send will fail.
*
* <p>
* For more information regarding the maximum event size allowed, see
* <a href="https:
* Limits</a>.
* </p>
*
* @param event Event to send to the service.
* @return A {@link Mono} that completes when the event is pushed to the service.
*/
Mono<Void> send(EventData event) {
if (event == null) {
return monoError(logger, new NullPointerException("'event' cannot be null."));
}
return send(Flux.just(event));
}
/**
* Sends a single event to the associated Event Hub with the send options. If the size of the single event exceeds
* the maximum size allowed, an exception will be triggered and the send will fail.
*
* <p>
* For more information regarding the maximum event size allowed, see
* <a href="https:
* Limits</a>.
* </p>
*
* @param event Event to send to the service.
* @param options The set of options to consider when sending this event.
* @return A {@link Mono} that completes when the event is pushed to the service.
*/
Mono<Void> send(EventData event, SendOptions options) {
if (event == null) {
return monoError(logger, new NullPointerException("'event' cannot be null."));
} else if (options == null) {
return monoError(logger, new NullPointerException("'options' cannot be null."));
}
return send(Flux.just(event), options);
}
/**
* Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the
* maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message
* size is the max amount allowed on the link.
*
* @param events Events to send to the service.
* @return A {@link Mono} that completes when all events are pushed to the service.
*/
Mono<Void> send(Iterable<EventData> events) {
if (events == null) {
return monoError(logger, new NullPointerException("'events' cannot be null."));
}
return send(Flux.fromIterable(events));
}
/**
* Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the
* maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message
* size is the max amount allowed on the link.
*
* @param events Events to send to the service.
* @param options The set of options to consider when sending this batch.
* @return A {@link Mono} that completes when all events are pushed to the service.
*/
Mono<Void> send(Iterable<EventData> events, SendOptions options) {
if (events == null) {
return monoError(logger, new NullPointerException("'events' cannot be null."));
} else if (options == null) {
return monoError(logger, new NullPointerException("'options' cannot be null."));
}
return send(Flux.fromIterable(events), options);
}
/**
* Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the
* maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message
* size is the max amount allowed on the link.
*
* @param events Events to send to the service.
* @return A {@link Mono} that completes when all events are pushed to the service.
*/
Mono<Void> send(Flux<EventData> events) {
if (events == null) {
return monoError(logger, new NullPointerException("'events' cannot be null."));
}
return send(events, DEFAULT_SEND_OPTIONS);
}
/**
* Sends a set of events to the associated Event Hub using a batched approach. If the size of events exceed the
* maximum size of a single batch, an exception will be triggered and the send will fail. By default, the message
* size is the max amount allowed on the link.
*
* @param events Events to send to the service.
* @param options The set of options to consider when sending this batch.
* @return A {@link Mono} that completes when all events are pushed to the service.
*/
Mono<Void> send(Flux<EventData> events, SendOptions options) {
if (events == null) {
return monoError(logger, new NullPointerException("'events' cannot be null."));
} else if (options == null) {
return monoError(logger, new NullPointerException("'options' cannot be null."));
}
return sendInternal(events, options);
}
/**
* Sends the batch to the associated Event Hub.
*
* @param batch The batch to send to the service.
* @return A {@link Mono} that completes when the batch is pushed to the service.
* @throws NullPointerException if {@code batch} is {@code null}.
* @see EventHubProducerAsyncClient
* @see EventHubProducerAsyncClient
*/
public Mono<Void> send(EventDataBatch batch) {
if (batch == null) {
return monoError(logger, new NullPointerException("'batch' cannot be null."));
} else if (batch.getEvents().isEmpty()) {
logger.warning(Messages.CANNOT_SEND_EVENT_BATCH_EMPTY);
return Mono.empty();
}
if (!CoreUtils.isNullOrEmpty(batch.getPartitionId())) {
logger.verbose("Sending batch with size[{}] to partitionId[{}].", batch.getCount(), batch.getPartitionId());
} else if (!CoreUtils.isNullOrEmpty(batch.getPartitionKey())) {
logger.verbose("Sending batch with size[{}] with partitionKey[{}].",
batch.getCount(), batch.getPartitionKey());
} else {
logger.verbose("Sending batch with size[{}] to be distributed round-robin in service.", batch.getCount());
}
final String partitionKey = batch.getPartitionKey();
final boolean isTracingEnabled = tracerProvider.isEnabled();
final AtomicReference<Context> parentContext = isTracingEnabled
? new AtomicReference<>(Context.NONE)
: null;
Context sharedContext = null;
final List<Message> messages = new ArrayList<>();
for (int i = 0; i < batch.getEvents().size(); i++) {
final EventData event = batch.getEvents().get(i);
if (isTracingEnabled) {
parentContext.set(event.getContext());
if (i == 0) {
sharedContext = tracerProvider.getSharedSpanBuilder(parentContext.get());
}
tracerProvider.addSpanLinks(sharedContext.addData(SPAN_CONTEXT_KEY, event.getContext()));
}
final Message message = messageSerializer.serialize(event);
if (!CoreUtils.isNullOrEmpty(partitionKey)) {
final MessageAnnotations messageAnnotations = message.getMessageAnnotations() == null
? new MessageAnnotations(new HashMap<>())
: message.getMessageAnnotations();
messageAnnotations.getValue().put(AmqpConstants.PARTITION_KEY, partitionKey);
message.setMessageAnnotations(messageAnnotations);
}
messages.add(message);
}
if (isTracingEnabled) {
final Context finalSharedContext = sharedContext == null
? Context.NONE
: sharedContext.addData(ENTITY_PATH_KEY, eventHubName).addData(HOST_NAME_KEY, fullyQualifiedNamespace);
parentContext.set(tracerProvider.startSpan(finalSharedContext, ProcessKind.SEND));
}
return withRetry(getSendLink(batch.getPartitionId())
.flatMap(link ->
messages.size() == 1
? link.send(messages.get(0))
: link.send(messages)), retryOptions.getTryTimeout(), retryPolicy)
.doOnEach(signal -> {
if (isTracingEnabled) {
tracerProvider.endSpan(parentContext.get(), signal);
}
});
}
private Mono<Void> sendInternal(Flux<EventDataBatch> eventBatches) {
return eventBatches
.flatMap(this::send)
.then()
.doOnError(error -> {
logger.error(Messages.ERROR_SENDING_BATCH, error);
});
}
private String getEntityPath(String partitionId) {
return CoreUtils.isNullOrEmpty(partitionId)
? eventHubName
: String.format(Locale.US, SENDER_ENTITY_PATH_FORMAT, eventHubName, partitionId);
}
private Mono<AmqpSendLink> getSendLink(String partitionId) {
final String entityPath = getEntityPath(partitionId);
final String linkName = getEntityPath(partitionId);
return connectionProcessor
.flatMap(connection -> connection.createSendLink(linkName, entityPath, retryOptions));
}
/**
* Disposes of the {@link EventHubProducerAsyncClient}. If the client had a dedicated connection, the underlying
* connection is also closed.
*/
@Override
public void close() {
if (isDisposed.getAndSet(true)) {
return;
}
if (!isSharedConnection) {
connectionProcessor.dispose();
}
}
/**
* Collects EventData into EventDataBatch to send to Event Hubs. If {@code maxNumberOfBatches} is {@code null} then
* it'll collect as many batches as possible. Otherwise, if there are more events than can fit into {@code
* maxNumberOfBatches}, then the collector throws a {@link AmqpException} with {@link
* AmqpErrorCondition
*/
private static class EventDataCollector implements Collector<EventData, List<EventDataBatch>,
List<EventDataBatch>> {
private final String partitionKey;
private final String partitionId;
private final int maxMessageSize;
private final Integer maxNumberOfBatches;
private final ErrorContextProvider contextProvider;
private final TracerProvider tracerProvider;
private volatile EventDataBatch currentBatch;
EventDataCollector(CreateBatchOptions options, Integer maxNumberOfBatches, ErrorContextProvider contextProvider,
TracerProvider tracerProvider) {
this.maxNumberOfBatches = maxNumberOfBatches;
this.maxMessageSize = options.getMaximumSizeInBytes() > 0
? options.getMaximumSizeInBytes()
: MAX_MESSAGE_LENGTH_BYTES;
this.partitionKey = options.getPartitionKey();
this.partitionId = options.getPartitionId();
this.contextProvider = contextProvider;
this.tracerProvider = tracerProvider;
currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider,
tracerProvider);
}
@Override
public Supplier<List<EventDataBatch>> supplier() {
return ArrayList::new;
}
@Override
public BiConsumer<List<EventDataBatch>, EventData> accumulator() {
return (list, event) -> {
EventDataBatch batch = currentBatch;
if (batch.tryAdd(event)) {
return;
}
if (maxNumberOfBatches != null && list.size() == maxNumberOfBatches) {
final String message = String.format(Locale.US,
Messages.EVENT_DATA_DOES_NOT_FIT, maxNumberOfBatches);
throw new AmqpException(false, AmqpErrorCondition.LINK_PAYLOAD_SIZE_EXCEEDED, message,
contextProvider.getErrorContext());
}
currentBatch = new EventDataBatch(maxMessageSize, partitionId, partitionKey, contextProvider,
tracerProvider);
currentBatch.tryAdd(event);
list.add(batch);
};
}
@Override
public BinaryOperator<List<EventDataBatch>> combiner() {
return (existing, another) -> {
existing.addAll(another);
return existing;
};
}
@Override
public Function<List<EventDataBatch>, List<EventDataBatch>> finisher() {
return list -> {
EventDataBatch batch = currentBatch;
currentBatch = null;
if (batch != null) {
list.add(batch);
}
return list;
};
}
@Override
public Set<Characteristics> characteristics() {
return Collections.emptySet();
}
}
} |
Is there absolutely no way for cassandra.start()/cluster.connect() to throw an exception? | public static void closeCassandra() {
session.close();
Cluster cluster = session.getCluster();
if (cluster != null) {
cluster.close();
}
if (cassandra != null) {
cassandra.stop();
}
} | session.close(); | public static void closeCassandra() {
if (session != null) {
session.close();
}
if (cluster != null) {
cluster.close();
}
CASSANDRA_CONTAINER.stop();
} | class CassandraConnectorITCase
extends WriteAheadSinkTestBase<
Tuple3<String, Integer, Integer>,
CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>>> {
private static final String IMAGE_TAG = "3.0";
@ClassRule public static CassandraContainer cassandra = createCassandraContainer(IMAGE_TAG);
private static Session session;
private static final int PORT = 9042;
private final ClusterBuilder builderForReading =
createBuilderWithConsistencyLevel(ConsistencyLevel.ONE);
private final ClusterBuilder builderForWriting =
createBuilderWithConsistencyLevel(ConsistencyLevel.ANY);
private ClusterBuilder createBuilderWithConsistencyLevel(ConsistencyLevel consistencyLevel) {
return new ClusterBuilder() {
@Override
protected Cluster buildCluster(Cluster.Builder builder) {
return builder.addContactPointsWithPorts(
new InetSocketAddress(
cassandra.getHost(), cassandra.getMappedPort(PORT)))
.withQueryOptions(
new QueryOptions()
.setConsistencyLevel(consistencyLevel)
.setSerialConsistencyLevel(ConsistencyLevel.LOCAL_SERIAL))
.withoutJMXReporting()
.withoutMetrics()
.build();
}
};
}
private static final String TABLE_NAME_PREFIX = "flink_";
private static final String TABLE_NAME_VARIABLE = "$TABLE";
private static final String CREATE_KEYSPACE_QUERY =
"CREATE KEYSPACE flink WITH replication= {'class':'SimpleStrategy', 'replication_factor':1};";
private static final String CREATE_TABLE_QUERY =
"CREATE TABLE flink."
+ TABLE_NAME_VARIABLE
+ " (id text PRIMARY KEY, counter int, batch_id int);";
private static final String INSERT_DATA_QUERY =
"INSERT INTO flink."
+ TABLE_NAME_VARIABLE
+ " (id, counter, batch_id) VALUES (?, ?, ?)";
private static final String SELECT_DATA_QUERY =
"SELECT * FROM flink." + TABLE_NAME_VARIABLE + ';';
private static final Random random = new Random();
private int tableID;
private static final ArrayList<Tuple3<String, Integer, Integer>> collection =
new ArrayList<>(20);
private static final ArrayList<Row> rowCollection = new ArrayList<>(20);
private static final TypeInformation[] FIELD_TYPES = {
BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO
};
static {
for (int i = 0; i < 20; i++) {
collection.add(new Tuple3<>(UUID.randomUUID().toString(), i, 0));
rowCollection.add(Row.of(UUID.randomUUID().toString(), i, 0));
}
}
public static CassandraContainer createCassandraContainer(String imageTag) {
CassandraContainer cassandra =
new CassandraContainer(DockerImageName.parse("cassandra").withTag(imageTag));
cassandra.withJmxReporting(false);
return cassandra;
}
@BeforeClass
public static void startAndInitializeCassandra() {
cassandra.start();
Cluster cluster = cassandra.getCluster();
session = cluster.connect();
session.execute(CREATE_KEYSPACE_QUERY);
session.execute(
CREATE_TABLE_QUERY.replace(TABLE_NAME_VARIABLE, TABLE_NAME_PREFIX + "initial"));
}
@Before
public void createTable() {
tableID = random.nextInt(Integer.MAX_VALUE);
session.execute(injectTableName(CREATE_TABLE_QUERY));
}
@AfterClass
@Override
protected CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> createSink()
throws Exception {
return new CassandraTupleWriteAheadSink<>(
injectTableName(INSERT_DATA_QUERY),
TypeExtractor.getForObject(new Tuple3<>("", 0, 0))
.createSerializer(new ExecutionConfig()),
builderForReading,
new CassandraCommitter(builderForReading));
}
@Override
protected TupleTypeInfo<Tuple3<String, Integer, Integer>> createTypeInfo() {
return TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class, Integer.class);
}
@Override
protected Tuple3<String, Integer, Integer> generateValue(int counter, int checkpointID) {
return new Tuple3<>(UUID.randomUUID().toString(), counter, checkpointID);
}
@Override
protected void verifyResultsIdealCircumstances(
CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> sink) {
ResultSet result = session.execute(injectTableName(SELECT_DATA_QUERY));
ArrayList<Integer> list = new ArrayList<>();
for (int x = 1; x <= 60; x++) {
list.add(x);
}
for (com.datastax.driver.core.Row s : result) {
list.remove(new Integer(s.getInt("counter")));
}
Assert.assertTrue(
"The following ID's were not found in the ResultSet: " + list.toString(),
list.isEmpty());
}
@Override
protected void verifyResultsDataPersistenceUponMissedNotify(
CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> sink) {
ResultSet result = session.execute(injectTableName(SELECT_DATA_QUERY));
ArrayList<Integer> list = new ArrayList<>();
for (int x = 1; x <= 60; x++) {
list.add(x);
}
for (com.datastax.driver.core.Row s : result) {
list.remove(new Integer(s.getInt("counter")));
}
Assert.assertTrue(
"The following ID's were not found in the ResultSet: " + list.toString(),
list.isEmpty());
}
@Override
protected void verifyResultsDataDiscardingUponRestore(
CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> sink) {
ResultSet result = session.execute(injectTableName(SELECT_DATA_QUERY));
ArrayList<Integer> list = new ArrayList<>();
for (int x = 1; x <= 20; x++) {
list.add(x);
}
for (int x = 41; x <= 60; x++) {
list.add(x);
}
for (com.datastax.driver.core.Row s : result) {
list.remove(new Integer(s.getInt("counter")));
}
Assert.assertTrue(
"The following ID's were not found in the ResultSet: " + list.toString(),
list.isEmpty());
}
@Override
protected void verifyResultsWhenReScaling(
CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> sink,
int startElementCounter,
int endElementCounter) {
ArrayList<Integer> expected = new ArrayList<>();
for (int i = 1; i <= endElementCounter; i++) {
expected.add(i);
}
ArrayList<Integer> actual = new ArrayList<>();
ResultSet result = session.execute(injectTableName(SELECT_DATA_QUERY));
for (com.datastax.driver.core.Row s : result) {
actual.add(s.getInt("counter"));
}
Collections.sort(actual);
Assert.assertArrayEquals(expected.toArray(), actual.toArray());
}
@Test
public void testCassandraCommitter() throws Exception {
String jobID = new JobID().toString();
CassandraCommitter cc1 = new CassandraCommitter(builderForReading, "flink_auxiliary_cc");
cc1.setJobId(jobID);
cc1.setOperatorId("operator");
CassandraCommitter cc2 = new CassandraCommitter(builderForReading, "flink_auxiliary_cc");
cc2.setJobId(jobID);
cc2.setOperatorId("operator");
CassandraCommitter cc3 = new CassandraCommitter(builderForReading, "flink_auxiliary_cc");
cc3.setJobId(jobID);
cc3.setOperatorId("operator1");
cc1.createResource();
cc1.open();
cc2.open();
cc3.open();
Assert.assertFalse(cc1.isCheckpointCommitted(0, 1));
Assert.assertFalse(cc2.isCheckpointCommitted(1, 1));
Assert.assertFalse(cc3.isCheckpointCommitted(0, 1));
cc1.commitCheckpoint(0, 1);
Assert.assertTrue(cc1.isCheckpointCommitted(0, 1));
Assert.assertFalse(cc2.isCheckpointCommitted(1, 1));
Assert.assertFalse(cc3.isCheckpointCommitted(0, 1));
Assert.assertFalse(cc1.isCheckpointCommitted(0, 2));
cc1.close();
cc2.close();
cc3.close();
cc1 = new CassandraCommitter(builderForReading, "flink_auxiliary_cc");
cc1.setJobId(jobID);
cc1.setOperatorId("operator");
cc1.open();
Assert.assertTrue(cc1.isCheckpointCommitted(0, 1));
Assert.assertFalse(cc1.isCheckpointCommitted(0, 2));
cc1.close();
}
@Test
public void testCassandraTupleAtLeastOnceSink() throws Exception {
CassandraTupleSink<Tuple3<String, Integer, Integer>> sink =
new CassandraTupleSink<>(injectTableName(INSERT_DATA_QUERY), builderForWriting);
try {
sink.open(new Configuration());
for (Tuple3<String, Integer, Integer> value : collection) {
sink.send(value);
}
} finally {
sink.close();
}
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
Assert.assertEquals(20, rs.all().size());
}
@Test
public void testCassandraRowAtLeastOnceSink() throws Exception {
CassandraRowSink sink =
new CassandraRowSink(
FIELD_TYPES.length, injectTableName(INSERT_DATA_QUERY), builderForWriting);
try {
sink.open(new Configuration());
for (Row value : rowCollection) {
sink.send(value);
}
} finally {
sink.close();
}
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
Assert.assertEquals(20, rs.all().size());
}
@Test
public void testCassandraPojoAtLeastOnceSink() throws Exception {
session.execute(CREATE_TABLE_QUERY.replace(TABLE_NAME_VARIABLE, "test"));
CassandraPojoSink<Pojo> sink = new CassandraPojoSink<>(Pojo.class, builderForWriting);
try {
sink.open(new Configuration());
for (int x = 0; x < 20; x++) {
sink.send(new Pojo(UUID.randomUUID().toString(), x, 0));
}
} finally {
sink.close();
}
ResultSet rs = session.execute(SELECT_DATA_QUERY.replace(TABLE_NAME_VARIABLE, "test"));
Assert.assertEquals(20, rs.all().size());
}
@Test
public void testCassandraPojoNoAnnotatedKeyspaceAtLeastOnceSink() throws Exception {
session.execute(
CREATE_TABLE_QUERY.replace(TABLE_NAME_VARIABLE, "testPojoNoAnnotatedKeyspace"));
CassandraPojoSink<PojoNoAnnotatedKeyspace> sink =
new CassandraPojoSink<>(PojoNoAnnotatedKeyspace.class, builderForWriting, "flink");
try {
sink.open(new Configuration());
for (int x = 0; x < 20; x++) {
sink.send(new PojoNoAnnotatedKeyspace(UUID.randomUUID().toString(), x, 0));
}
} finally {
sink.close();
}
ResultSet rs =
session.execute(
SELECT_DATA_QUERY.replace(
TABLE_NAME_VARIABLE, "testPojoNoAnnotatedKeyspace"));
Assert.assertEquals(20, rs.all().size());
}
@Test
public void testCassandraTableSink() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
DataStreamSource<Row> source = env.fromCollection(rowCollection);
tEnv.createTemporaryView("testFlinkTable", source);
((TableEnvironmentInternal) tEnv)
.registerTableSinkInternal(
"cassandraTable",
new CassandraAppendTableSink(
builderForWriting, injectTableName(INSERT_DATA_QUERY))
.configure(
new String[] {"f0", "f1", "f2"},
new TypeInformation[] {
Types.STRING, Types.INT, Types.INT
}));
tEnv.sqlQuery("select * from testFlinkTable").executeInsert("cassandraTable").await();
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
List<Row> input = new ArrayList<>(rowCollection);
List<com.datastax.driver.core.Row> output = rs.all();
for (com.datastax.driver.core.Row o : output) {
Row cmp = new Row(3);
cmp.setField(0, o.getString(0));
cmp.setField(1, o.getInt(2));
cmp.setField(2, o.getInt(1));
Assert.assertTrue(
"Row " + cmp + " was written to Cassandra but not in input.",
input.remove(cmp));
}
Assert.assertTrue(
"The input data was not completely written to Cassandra", input.isEmpty());
}
@Test
public void testCassandraBatchPojoFormat() throws Exception {
session.execute(
CREATE_TABLE_QUERY.replace(
TABLE_NAME_VARIABLE, CustomCassandraAnnotatedPojo.TABLE_NAME));
OutputFormat<CustomCassandraAnnotatedPojo> sink =
new CassandraPojoOutputFormat<>(
builderForWriting,
CustomCassandraAnnotatedPojo.class,
() -> new Mapper.Option[] {Mapper.Option.saveNullFields(true)});
List<CustomCassandraAnnotatedPojo> customCassandraAnnotatedPojos =
IntStream.range(0, 20)
.mapToObj(
x ->
new CustomCassandraAnnotatedPojo(
UUID.randomUUID().toString(), x, 0))
.collect(Collectors.toList());
try {
sink.configure(new Configuration());
sink.open(0, 1);
for (CustomCassandraAnnotatedPojo customCassandraAnnotatedPojo :
customCassandraAnnotatedPojos) {
sink.writeRecord(customCassandraAnnotatedPojo);
}
} finally {
sink.close();
}
ResultSet rs =
session.execute(
SELECT_DATA_QUERY.replace(
TABLE_NAME_VARIABLE, CustomCassandraAnnotatedPojo.TABLE_NAME));
Assert.assertEquals(20, rs.all().size());
InputFormat<CustomCassandraAnnotatedPojo, InputSplit> source =
new CassandraPojoInputFormat<>(
SELECT_DATA_QUERY.replace(TABLE_NAME_VARIABLE, "batches"),
builderForReading,
CustomCassandraAnnotatedPojo.class);
List<CustomCassandraAnnotatedPojo> result = new ArrayList<>();
try {
source.configure(new Configuration());
source.open(null);
while (!source.reachedEnd()) {
CustomCassandraAnnotatedPojo temp = source.nextRecord(null);
result.add(temp);
}
} finally {
source.close();
}
Assert.assertEquals(20, result.size());
result.sort(Comparator.comparingInt(CustomCassandraAnnotatedPojo::getCounter));
customCassandraAnnotatedPojos.sort(
Comparator.comparingInt(CustomCassandraAnnotatedPojo::getCounter));
assertThat(result, samePropertyValuesAs(customCassandraAnnotatedPojos));
}
@Test
public void testCassandraBatchTupleFormat() throws Exception {
OutputFormat<Tuple3<String, Integer, Integer>> sink =
new CassandraOutputFormat<>(injectTableName(INSERT_DATA_QUERY), builderForWriting);
try {
sink.configure(new Configuration());
sink.open(0, 1);
for (Tuple3<String, Integer, Integer> value : collection) {
sink.writeRecord(value);
}
} finally {
sink.close();
}
sink =
new CassandraTupleOutputFormat<>(
injectTableName(INSERT_DATA_QUERY), builderForWriting);
try {
sink.configure(new Configuration());
sink.open(0, 1);
for (Tuple3<String, Integer, Integer> value : collection) {
sink.writeRecord(value);
}
} finally {
sink.close();
}
InputFormat<Tuple3<String, Integer, Integer>, InputSplit> source =
new CassandraInputFormat<>(injectTableName(SELECT_DATA_QUERY), builderForReading);
List<Tuple3<String, Integer, Integer>> result = new ArrayList<>();
try {
source.configure(new Configuration());
source.open(null);
while (!source.reachedEnd()) {
result.add(source.nextRecord(new Tuple3<String, Integer, Integer>()));
}
} finally {
source.close();
}
Assert.assertEquals(20, result.size());
}
@Test
public void testCassandraBatchRowFormat() throws Exception {
OutputFormat<Row> sink =
new CassandraRowOutputFormat(injectTableName(INSERT_DATA_QUERY), builderForWriting);
try {
sink.configure(new Configuration());
sink.open(0, 1);
for (Row value : rowCollection) {
sink.writeRecord(value);
}
} finally {
sink.close();
}
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
List<com.datastax.driver.core.Row> rows = rs.all();
Assert.assertEquals(rowCollection.size(), rows.size());
}
private String injectTableName(String target) {
return target.replace(TABLE_NAME_VARIABLE, TABLE_NAME_PREFIX + tableID);
}
@Test
public void testCassandraScalaTupleAtLeastOnceSinkBuilderDetection() throws Exception {
Class<scala.Tuple1<String>> c =
(Class<scala.Tuple1<String>>) new scala.Tuple1<>("hello").getClass();
Seq<TypeInformation<?>> typeInfos =
JavaConverters.asScalaBufferConverter(
Collections.<TypeInformation<?>>singletonList(
BasicTypeInfo.STRING_TYPE_INFO))
.asScala();
Seq<String> fieldNames =
JavaConverters.asScalaBufferConverter(Collections.singletonList("_1")).asScala();
CaseClassTypeInfo<scala.Tuple1<String>> typeInfo =
new CaseClassTypeInfo<scala.Tuple1<String>>(c, null, typeInfos, fieldNames) {
@Override
public TypeSerializer<scala.Tuple1<String>> createSerializer(
ExecutionConfig config) {
return null;
}
};
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<scala.Tuple1<String>> input =
env.fromElements(new scala.Tuple1<>("hello")).returns(typeInfo);
CassandraSink.CassandraSinkBuilder<scala.Tuple1<String>> sinkBuilder =
CassandraSink.addSink(input);
assertTrue(sinkBuilder instanceof CassandraSink.CassandraScalaProductSinkBuilder);
}
@Test
public void testCassandraScalaTupleAtLeastSink() throws Exception {
CassandraScalaProductSink<scala.Tuple3<String, Integer, Integer>> sink =
new CassandraScalaProductSink<>(
injectTableName(INSERT_DATA_QUERY), builderForWriting);
List<scala.Tuple3<String, Integer, Integer>> scalaTupleCollection = new ArrayList<>(20);
for (int i = 0; i < 20; i++) {
scalaTupleCollection.add(new scala.Tuple3<>(UUID.randomUUID().toString(), i, 0));
}
try {
sink.open(new Configuration());
for (scala.Tuple3<String, Integer, Integer> value : scalaTupleCollection) {
sink.invoke(value, SinkContextUtil.forTimestamp(0));
}
} finally {
sink.close();
}
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
List<com.datastax.driver.core.Row> rows = rs.all();
Assert.assertEquals(scalaTupleCollection.size(), rows.size());
for (com.datastax.driver.core.Row row : rows) {
scalaTupleCollection.remove(
new scala.Tuple3<>(
row.getString("id"), row.getInt("counter"), row.getInt("batch_id")));
}
Assert.assertEquals(0, scalaTupleCollection.size());
}
@Test
public void testCassandraScalaTuplePartialColumnUpdate() throws Exception {
CassandraSinkBaseConfig config =
CassandraSinkBaseConfig.newBuilder().setIgnoreNullFields(true).build();
CassandraScalaProductSink<scala.Tuple3<String, Integer, Integer>> sink =
new CassandraScalaProductSink<>(
injectTableName(INSERT_DATA_QUERY), builderForWriting, config);
String id = UUID.randomUUID().toString();
Integer counter = 1;
Integer batchId = 0;
scala.Tuple3<String, Integer, Integer> scalaTupleRecordFirst =
new scala.Tuple3<>(id, counter, null);
scala.Tuple3<String, Integer, Integer> scalaTupleRecordSecond =
new scala.Tuple3<>(id, null, batchId);
try {
sink.open(new Configuration());
sink.invoke(scalaTupleRecordFirst, SinkContextUtil.forTimestamp(0));
sink.invoke(scalaTupleRecordSecond, SinkContextUtil.forTimestamp(0));
} finally {
sink.close();
}
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
List<com.datastax.driver.core.Row> rows = rs.all();
Assert.assertEquals(1, rows.size());
for (com.datastax.driver.core.Row row : rows) {
Assert.assertEquals(
new scala.Tuple3<>(id, counter, batchId),
new scala.Tuple3<>(
row.getString("id"), row.getInt("counter"), row.getInt("batch_id")));
}
}
} | class CassandraConnectorITCase
extends WriteAheadSinkTestBase<
Tuple3<String, Integer, Integer>,
CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>>> {
@ClassRule
public static final CassandraContainer CASSANDRA_CONTAINER = createCassandraContainer();
private static final int PORT = 9042;
private static Cluster cluster;
private static Session session;
private final ClusterBuilder builderForReading =
createBuilderWithConsistencyLevel(ConsistencyLevel.ONE);
private final ClusterBuilder builderForWriting =
createBuilderWithConsistencyLevel(ConsistencyLevel.ANY);
private ClusterBuilder createBuilderWithConsistencyLevel(ConsistencyLevel consistencyLevel) {
return new ClusterBuilder() {
@Override
protected Cluster buildCluster(Cluster.Builder builder) {
return builder.addContactPointsWithPorts(
new InetSocketAddress(
CASSANDRA_CONTAINER.getHost(),
CASSANDRA_CONTAINER.getMappedPort(PORT)))
.withQueryOptions(
new QueryOptions()
.setConsistencyLevel(consistencyLevel)
.setSerialConsistencyLevel(ConsistencyLevel.LOCAL_SERIAL))
.withoutJMXReporting()
.withoutMetrics()
.build();
}
};
}
private static final String TABLE_NAME_PREFIX = "flink_";
private static final String TABLE_NAME_VARIABLE = "$TABLE";
private static final String CREATE_KEYSPACE_QUERY =
"CREATE KEYSPACE flink WITH replication= {'class':'SimpleStrategy', 'replication_factor':1};";
private static final String CREATE_TABLE_QUERY =
"CREATE TABLE flink."
+ TABLE_NAME_VARIABLE
+ " (id text PRIMARY KEY, counter int, batch_id int);";
private static final String INSERT_DATA_QUERY =
"INSERT INTO flink."
+ TABLE_NAME_VARIABLE
+ " (id, counter, batch_id) VALUES (?, ?, ?)";
private static final String SELECT_DATA_QUERY =
"SELECT * FROM flink." + TABLE_NAME_VARIABLE + ';';
private static final Random random = new Random();
private int tableID;
private static final ArrayList<Tuple3<String, Integer, Integer>> collection =
new ArrayList<>(20);
private static final ArrayList<Row> rowCollection = new ArrayList<>(20);
private static final TypeInformation[] FIELD_TYPES = {
BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.INT_TYPE_INFO
};
static {
for (int i = 0; i < 20; i++) {
collection.add(new Tuple3<>(UUID.randomUUID().toString(), i, 0));
rowCollection.add(Row.of(UUID.randomUUID().toString(), i, 0));
}
}
public static CassandraContainer createCassandraContainer() {
CassandraContainer cassandra = new CassandraContainer(DockerImageVersions.CASSANDRA_3);
cassandra.withJmxReporting(false);
return cassandra;
}
@BeforeClass
public static void startAndInitializeCassandra() {
CASSANDRA_CONTAINER.start();
cluster = CASSANDRA_CONTAINER.getCluster();
session = cluster.connect();
session.execute(CREATE_KEYSPACE_QUERY);
session.execute(
CREATE_TABLE_QUERY.replace(TABLE_NAME_VARIABLE, TABLE_NAME_PREFIX + "initial"));
}
@Before
public void createTable() {
tableID = random.nextInt(Integer.MAX_VALUE);
session.execute(injectTableName(CREATE_TABLE_QUERY));
}
@AfterClass
@Override
protected CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> createSink()
throws Exception {
return new CassandraTupleWriteAheadSink<>(
injectTableName(INSERT_DATA_QUERY),
TypeExtractor.getForObject(new Tuple3<>("", 0, 0))
.createSerializer(new ExecutionConfig()),
builderForReading,
new CassandraCommitter(builderForReading));
}
@Override
protected TupleTypeInfo<Tuple3<String, Integer, Integer>> createTypeInfo() {
return TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class, Integer.class);
}
@Override
protected Tuple3<String, Integer, Integer> generateValue(int counter, int checkpointID) {
return new Tuple3<>(UUID.randomUUID().toString(), counter, checkpointID);
}
@Override
protected void verifyResultsIdealCircumstances(
CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> sink) {
ResultSet result = session.execute(injectTableName(SELECT_DATA_QUERY));
ArrayList<Integer> list = new ArrayList<>();
for (int x = 1; x <= 60; x++) {
list.add(x);
}
for (com.datastax.driver.core.Row s : result) {
list.remove(new Integer(s.getInt("counter")));
}
Assert.assertTrue(
"The following ID's were not found in the ResultSet: " + list.toString(),
list.isEmpty());
}
@Override
protected void verifyResultsDataPersistenceUponMissedNotify(
CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> sink) {
ResultSet result = session.execute(injectTableName(SELECT_DATA_QUERY));
ArrayList<Integer> list = new ArrayList<>();
for (int x = 1; x <= 60; x++) {
list.add(x);
}
for (com.datastax.driver.core.Row s : result) {
list.remove(new Integer(s.getInt("counter")));
}
Assert.assertTrue(
"The following ID's were not found in the ResultSet: " + list.toString(),
list.isEmpty());
}
@Override
protected void verifyResultsDataDiscardingUponRestore(
CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> sink) {
ResultSet result = session.execute(injectTableName(SELECT_DATA_QUERY));
ArrayList<Integer> list = new ArrayList<>();
for (int x = 1; x <= 20; x++) {
list.add(x);
}
for (int x = 41; x <= 60; x++) {
list.add(x);
}
for (com.datastax.driver.core.Row s : result) {
list.remove(new Integer(s.getInt("counter")));
}
Assert.assertTrue(
"The following ID's were not found in the ResultSet: " + list.toString(),
list.isEmpty());
}
@Override
protected void verifyResultsWhenReScaling(
CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> sink,
int startElementCounter,
int endElementCounter) {
ArrayList<Integer> expected = new ArrayList<>();
for (int i = 1; i <= endElementCounter; i++) {
expected.add(i);
}
ArrayList<Integer> actual = new ArrayList<>();
ResultSet result = session.execute(injectTableName(SELECT_DATA_QUERY));
for (com.datastax.driver.core.Row s : result) {
actual.add(s.getInt("counter"));
}
Collections.sort(actual);
Assert.assertArrayEquals(expected.toArray(), actual.toArray());
}
@Test
public void testCassandraCommitter() throws Exception {
String jobID = new JobID().toString();
CassandraCommitter cc1 = new CassandraCommitter(builderForReading, "flink_auxiliary_cc");
cc1.setJobId(jobID);
cc1.setOperatorId("operator");
CassandraCommitter cc2 = new CassandraCommitter(builderForReading, "flink_auxiliary_cc");
cc2.setJobId(jobID);
cc2.setOperatorId("operator");
CassandraCommitter cc3 = new CassandraCommitter(builderForReading, "flink_auxiliary_cc");
cc3.setJobId(jobID);
cc3.setOperatorId("operator1");
cc1.createResource();
cc1.open();
cc2.open();
cc3.open();
Assert.assertFalse(cc1.isCheckpointCommitted(0, 1));
Assert.assertFalse(cc2.isCheckpointCommitted(1, 1));
Assert.assertFalse(cc3.isCheckpointCommitted(0, 1));
cc1.commitCheckpoint(0, 1);
Assert.assertTrue(cc1.isCheckpointCommitted(0, 1));
Assert.assertFalse(cc2.isCheckpointCommitted(1, 1));
Assert.assertFalse(cc3.isCheckpointCommitted(0, 1));
Assert.assertFalse(cc1.isCheckpointCommitted(0, 2));
cc1.close();
cc2.close();
cc3.close();
cc1 = new CassandraCommitter(builderForReading, "flink_auxiliary_cc");
cc1.setJobId(jobID);
cc1.setOperatorId("operator");
cc1.open();
Assert.assertTrue(cc1.isCheckpointCommitted(0, 1));
Assert.assertFalse(cc1.isCheckpointCommitted(0, 2));
cc1.close();
}
@Test
public void testCassandraTupleAtLeastOnceSink() throws Exception {
CassandraTupleSink<Tuple3<String, Integer, Integer>> sink =
new CassandraTupleSink<>(injectTableName(INSERT_DATA_QUERY), builderForWriting);
try {
sink.open(new Configuration());
for (Tuple3<String, Integer, Integer> value : collection) {
sink.send(value);
}
} finally {
sink.close();
}
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
Assert.assertEquals(20, rs.all().size());
}
@Test
public void testCassandraRowAtLeastOnceSink() throws Exception {
CassandraRowSink sink =
new CassandraRowSink(
FIELD_TYPES.length, injectTableName(INSERT_DATA_QUERY), builderForWriting);
try {
sink.open(new Configuration());
for (Row value : rowCollection) {
sink.send(value);
}
} finally {
sink.close();
}
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
Assert.assertEquals(20, rs.all().size());
}
@Test
public void testCassandraPojoAtLeastOnceSink() throws Exception {
session.execute(CREATE_TABLE_QUERY.replace(TABLE_NAME_VARIABLE, "test"));
CassandraPojoSink<Pojo> sink = new CassandraPojoSink<>(Pojo.class, builderForWriting);
try {
sink.open(new Configuration());
for (int x = 0; x < 20; x++) {
sink.send(new Pojo(UUID.randomUUID().toString(), x, 0));
}
} finally {
sink.close();
}
ResultSet rs = session.execute(SELECT_DATA_QUERY.replace(TABLE_NAME_VARIABLE, "test"));
Assert.assertEquals(20, rs.all().size());
}
@Test
public void testCassandraPojoNoAnnotatedKeyspaceAtLeastOnceSink() throws Exception {
session.execute(
CREATE_TABLE_QUERY.replace(TABLE_NAME_VARIABLE, "testPojoNoAnnotatedKeyspace"));
CassandraPojoSink<PojoNoAnnotatedKeyspace> sink =
new CassandraPojoSink<>(PojoNoAnnotatedKeyspace.class, builderForWriting, "flink");
try {
sink.open(new Configuration());
for (int x = 0; x < 20; x++) {
sink.send(new PojoNoAnnotatedKeyspace(UUID.randomUUID().toString(), x, 0));
}
} finally {
sink.close();
}
ResultSet rs =
session.execute(
SELECT_DATA_QUERY.replace(
TABLE_NAME_VARIABLE, "testPojoNoAnnotatedKeyspace"));
Assert.assertEquals(20, rs.all().size());
}
@Test
public void testCassandraTableSink() throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(4);
StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
DataStreamSource<Row> source = env.fromCollection(rowCollection);
tEnv.createTemporaryView("testFlinkTable", source);
((TableEnvironmentInternal) tEnv)
.registerTableSinkInternal(
"cassandraTable",
new CassandraAppendTableSink(
builderForWriting, injectTableName(INSERT_DATA_QUERY))
.configure(
new String[] {"f0", "f1", "f2"},
new TypeInformation[] {
Types.STRING, Types.INT, Types.INT
}));
tEnv.sqlQuery("select * from testFlinkTable").executeInsert("cassandraTable").await();
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
List<Row> input = new ArrayList<>(rowCollection);
List<com.datastax.driver.core.Row> output = rs.all();
for (com.datastax.driver.core.Row o : output) {
Row cmp = new Row(3);
cmp.setField(0, o.getString(0));
cmp.setField(1, o.getInt(2));
cmp.setField(2, o.getInt(1));
Assert.assertTrue(
"Row " + cmp + " was written to Cassandra but not in input.",
input.remove(cmp));
}
Assert.assertTrue(
"The input data was not completely written to Cassandra", input.isEmpty());
}
@Test
public void testCassandraBatchPojoFormat() throws Exception {
session.execute(
CREATE_TABLE_QUERY.replace(
TABLE_NAME_VARIABLE, CustomCassandraAnnotatedPojo.TABLE_NAME));
OutputFormat<CustomCassandraAnnotatedPojo> sink =
new CassandraPojoOutputFormat<>(
builderForWriting,
CustomCassandraAnnotatedPojo.class,
() -> new Mapper.Option[] {Mapper.Option.saveNullFields(true)});
List<CustomCassandraAnnotatedPojo> customCassandraAnnotatedPojos =
IntStream.range(0, 20)
.mapToObj(
x ->
new CustomCassandraAnnotatedPojo(
UUID.randomUUID().toString(), x, 0))
.collect(Collectors.toList());
try {
sink.configure(new Configuration());
sink.open(0, 1);
for (CustomCassandraAnnotatedPojo customCassandraAnnotatedPojo :
customCassandraAnnotatedPojos) {
sink.writeRecord(customCassandraAnnotatedPojo);
}
} finally {
sink.close();
}
ResultSet rs =
session.execute(
SELECT_DATA_QUERY.replace(
TABLE_NAME_VARIABLE, CustomCassandraAnnotatedPojo.TABLE_NAME));
Assert.assertEquals(20, rs.all().size());
InputFormat<CustomCassandraAnnotatedPojo, InputSplit> source =
new CassandraPojoInputFormat<>(
SELECT_DATA_QUERY.replace(TABLE_NAME_VARIABLE, "batches"),
builderForReading,
CustomCassandraAnnotatedPojo.class);
List<CustomCassandraAnnotatedPojo> result = new ArrayList<>();
try {
source.configure(new Configuration());
source.open(null);
while (!source.reachedEnd()) {
CustomCassandraAnnotatedPojo temp = source.nextRecord(null);
result.add(temp);
}
} finally {
source.close();
}
Assert.assertEquals(20, result.size());
result.sort(Comparator.comparingInt(CustomCassandraAnnotatedPojo::getCounter));
customCassandraAnnotatedPojos.sort(
Comparator.comparingInt(CustomCassandraAnnotatedPojo::getCounter));
assertThat(result, samePropertyValuesAs(customCassandraAnnotatedPojos));
}
@Test
public void testCassandraBatchTupleFormat() throws Exception {
OutputFormat<Tuple3<String, Integer, Integer>> sink =
new CassandraOutputFormat<>(injectTableName(INSERT_DATA_QUERY), builderForWriting);
try {
sink.configure(new Configuration());
sink.open(0, 1);
for (Tuple3<String, Integer, Integer> value : collection) {
sink.writeRecord(value);
}
} finally {
sink.close();
}
sink =
new CassandraTupleOutputFormat<>(
injectTableName(INSERT_DATA_QUERY), builderForWriting);
try {
sink.configure(new Configuration());
sink.open(0, 1);
for (Tuple3<String, Integer, Integer> value : collection) {
sink.writeRecord(value);
}
} finally {
sink.close();
}
InputFormat<Tuple3<String, Integer, Integer>, InputSplit> source =
new CassandraInputFormat<>(injectTableName(SELECT_DATA_QUERY), builderForReading);
List<Tuple3<String, Integer, Integer>> result = new ArrayList<>();
try {
source.configure(new Configuration());
source.open(null);
while (!source.reachedEnd()) {
result.add(source.nextRecord(new Tuple3<String, Integer, Integer>()));
}
} finally {
source.close();
}
Assert.assertEquals(20, result.size());
}
@Test
public void testCassandraBatchRowFormat() throws Exception {
OutputFormat<Row> sink =
new CassandraRowOutputFormat(injectTableName(INSERT_DATA_QUERY), builderForWriting);
try {
sink.configure(new Configuration());
sink.open(0, 1);
for (Row value : rowCollection) {
sink.writeRecord(value);
}
} finally {
sink.close();
}
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
List<com.datastax.driver.core.Row> rows = rs.all();
Assert.assertEquals(rowCollection.size(), rows.size());
}
private String injectTableName(String target) {
return target.replace(TABLE_NAME_VARIABLE, TABLE_NAME_PREFIX + tableID);
}
@Test
public void testCassandraScalaTupleAtLeastOnceSinkBuilderDetection() throws Exception {
Class<scala.Tuple1<String>> c =
(Class<scala.Tuple1<String>>) new scala.Tuple1<>("hello").getClass();
Seq<TypeInformation<?>> typeInfos =
JavaConverters.asScalaBufferConverter(
Collections.<TypeInformation<?>>singletonList(
BasicTypeInfo.STRING_TYPE_INFO))
.asScala();
Seq<String> fieldNames =
JavaConverters.asScalaBufferConverter(Collections.singletonList("_1")).asScala();
CaseClassTypeInfo<scala.Tuple1<String>> typeInfo =
new CaseClassTypeInfo<scala.Tuple1<String>>(c, null, typeInfos, fieldNames) {
@Override
public TypeSerializer<scala.Tuple1<String>> createSerializer(
ExecutionConfig config) {
return null;
}
};
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStream<scala.Tuple1<String>> input =
env.fromElements(new scala.Tuple1<>("hello")).returns(typeInfo);
CassandraSink.CassandraSinkBuilder<scala.Tuple1<String>> sinkBuilder =
CassandraSink.addSink(input);
assertTrue(sinkBuilder instanceof CassandraSink.CassandraScalaProductSinkBuilder);
}
@Test
public void testCassandraScalaTupleAtLeastSink() throws Exception {
CassandraScalaProductSink<scala.Tuple3<String, Integer, Integer>> sink =
new CassandraScalaProductSink<>(
injectTableName(INSERT_DATA_QUERY), builderForWriting);
List<scala.Tuple3<String, Integer, Integer>> scalaTupleCollection = new ArrayList<>(20);
for (int i = 0; i < 20; i++) {
scalaTupleCollection.add(new scala.Tuple3<>(UUID.randomUUID().toString(), i, 0));
}
try {
sink.open(new Configuration());
for (scala.Tuple3<String, Integer, Integer> value : scalaTupleCollection) {
sink.invoke(value, SinkContextUtil.forTimestamp(0));
}
} finally {
sink.close();
}
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
List<com.datastax.driver.core.Row> rows = rs.all();
Assert.assertEquals(scalaTupleCollection.size(), rows.size());
for (com.datastax.driver.core.Row row : rows) {
scalaTupleCollection.remove(
new scala.Tuple3<>(
row.getString("id"), row.getInt("counter"), row.getInt("batch_id")));
}
Assert.assertEquals(0, scalaTupleCollection.size());
}
@Test
public void testCassandraScalaTuplePartialColumnUpdate() throws Exception {
CassandraSinkBaseConfig config =
CassandraSinkBaseConfig.newBuilder().setIgnoreNullFields(true).build();
CassandraScalaProductSink<scala.Tuple3<String, Integer, Integer>> sink =
new CassandraScalaProductSink<>(
injectTableName(INSERT_DATA_QUERY), builderForWriting, config);
String id = UUID.randomUUID().toString();
Integer counter = 1;
Integer batchId = 0;
scala.Tuple3<String, Integer, Integer> scalaTupleRecordFirst =
new scala.Tuple3<>(id, counter, null);
scala.Tuple3<String, Integer, Integer> scalaTupleRecordSecond =
new scala.Tuple3<>(id, null, batchId);
try {
sink.open(new Configuration());
sink.invoke(scalaTupleRecordFirst, SinkContextUtil.forTimestamp(0));
sink.invoke(scalaTupleRecordSecond, SinkContextUtil.forTimestamp(0));
} finally {
sink.close();
}
ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY));
List<com.datastax.driver.core.Row> rows = rs.all();
Assert.assertEquals(1, rows.size());
for (com.datastax.driver.core.Row row : rows) {
Assert.assertEquals(
new scala.Tuple3<>(id, counter, batchId),
new scala.Tuple3<>(
row.getString("id"), row.getInt("counter"), row.getInt("batch_id")));
}
}
} |
I see. We rearrange the node in the binding pattern scenario anyway. e.g. a `SimpleNameReferenceNode` will be converted to a `CaptureBindingPatternNode`. Therefore shall add the `MemberTypeDescriptorNode` conversion at the same place? https://github.com/ballerina-platform/ballerina-lang/blob/31e1d2c7b78e23ab880f16650ac74a11dcc0c89b/compiler/ballerina-parser/src/main/java/io/ballerina/compiler/internal/parser/BallerinaParser.java#L18650 | private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {
switch (peek(lookahead + 1).kind) {
case IDENTIFIER_TOKEN:
SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;
switch (tokenAfterIdentifier) {
case ON_KEYWORD:
case OPEN_BRACE_TOKEN:
return true;
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
case QUESTION_MARK_TOKEN:
return false;
default:
return false;
}
case ON_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse listener declaration, given the qualifier.
* <p>
* <code>
* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;
* </code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.LISTENER_DECL);
STNode listenerKeyword = parseListenerKeyword();
if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode listenerDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);
endContext();
return listenerDecl;
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse listener keyword.
*
* @return Parsed node
*/
private STNode parseListenerKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LISTENER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LISTENER_KEYWORD);
return parseListenerKeyword();
}
}
/**
* Parse constant declaration, given the qualifier.
* <p>
* <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.CONSTANT_DECL);
STNode constKeyword = parseConstantKeyword();
return parseConstDecl(metadata, qualifier, constKeyword);
}
/**
* Parse the components that follows after the const keyword of a constant declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case ANNOTATION_KEYWORD:
endContext();
return parseAnnotationDeclaration(metadata, qualifier, constKeyword);
case IDENTIFIER_TOKEN:
STNode constantDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false);
endContext();
return constantDecl;
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.CONST_DECL_TYPE);
return parseConstDecl(metadata, qualifier, constKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
boolean isListener) {
STNode varNameOrTypeName = parseStatementStartIdentifier();
return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);
}
/**
* Parse the component that follows the first identifier in a const decl. The identifier
* can be either the type-name (a user defined type) or the var-name there the type-name
* is not present.
*
* @param qualifier Qualifier that precedes the constant decl
* @param keyword Keyword
* @param typeOrVarName Identifier that follows the const-keywoord
* @return Parsed node
*/
private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,
STNode typeOrVarName, boolean isListener) {
if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode type = typeOrVarName;
STNode variableName = parseVariableName();
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
STNode type;
STNode variableName;
switch (peek().kind) {
case IDENTIFIER_TOKEN:
type = typeOrVarName;
variableName = parseVariableName();
break;
case EQUAL_TOKEN:
variableName = ((STSimpleNameReferenceNode) typeOrVarName).name;
type = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.CONST_DECL_RHS);
return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener);
}
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,
STNode type, STNode variableName) {
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
if (isListener) {
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse const keyword.
*
* @return Parsed node
*/
private STNode parseConstantKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONST_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONST_KEYWORD);
return parseConstantKeyword();
}
}
/**
* Parse typeof expression.
* <p>
* <code>
* typeof-expr := typeof expression
* </code>
*
* @param isRhsExpr
* @return Typeof expression node
*/
private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode typeofKeyword = parseTypeofKeyword();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);
}
/**
* Parse typeof-keyword.
*
* @return Typeof-keyword node
*/
private STNode parseTypeofKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TYPEOF_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TYPEOF_KEYWORD);
return parseTypeofKeyword();
}
}
/**
* Parse optional type descriptor given the type.
* <p>
* <code>optional-type-descriptor := type-descriptor `?`</code>
* </p>
*
* @param typeDescriptorNode Preceding type descriptor
* @return Parsed node
*/
private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {
startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);
STNode questionMarkToken = parseQuestionMark();
endContext();
return createOptionalTypeDesc(typeDescriptorNode, questionMarkToken);
}
private STNode createOptionalTypeDesc(STNode typeDescNode, STNode questionMarkToken) {
if (typeDescNode.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDescNode;
STNode middleTypeDesc = createOptionalTypeDesc(unionTypeDesc.rightTypeDesc, questionMarkToken);
typeDescNode = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);
} else if (typeDescNode.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDescNode;
STNode middleTypeDesc = createOptionalTypeDesc(intersectionTypeDesc.rightTypeDesc, questionMarkToken);
typeDescNode = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,
intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);
} else {
typeDescNode = validateForUsageOfVar(typeDescNode);
typeDescNode = STNodeFactory.createOptionalTypeDescriptorNode(typeDescNode, questionMarkToken);
}
return typeDescNode;
}
/**
* Parse unary expression.
* <p>
* <code>
* unary-expr := + expression | - expression | ~ expression | ! expression
* </code>
*
* @param isRhsExpr
* @return Unary expression node
*/
private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode unaryOperator = parseUnaryOperator();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);
}
/**
* Parse unary operator.
* <code>UnaryOperator := + | - | ~ | !</code>
*
* @return Parsed node
*/
private STNode parseUnaryOperator() {
STToken token = peek();
if (isUnaryOperator(token.kind)) {
return consume();
} else {
recover(token, ParserRuleContext.UNARY_OPERATOR);
return parseUnaryOperator();
}
}
/**
* Check whether the given token kind is a unary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise
*/
private boolean isUnaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse array type descriptor.
* <p>
* <code>
* array-type-descriptor := array-member-type-descriptor [ [ array-length ] ]
* array-member-type-descriptor := type-descriptor
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* inferred-array-length := *
* </code>
* </p>
*
* @param memberTypeDesc
* @return Parsed Node
*/
private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode openBracketToken = parseOpenBracket();
STNode arrayLengthNode = parseArrayLength();
STNode closeBracketToken = parseCloseBracket();
endContext();
return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken);
}
private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode,
STNode closeBracketToken) {
memberTypeDesc = validateForUsageOfVar(memberTypeDesc);
if (arrayLengthNode != null) {
switch (arrayLengthNode.kind) {
case ASTERISK_LITERAL:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind;
if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken,
arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
arrayLengthNode = STNodeFactory.createEmptyNode();
}
}
List<STNode> arrayDimensions = new ArrayList();
if (memberTypeDesc.kind == SyntaxKind.ARRAY_TYPE_DESC) {
STArrayTypeDescriptorNode innerArrayType = (STArrayTypeDescriptorNode) memberTypeDesc;
STNode innerArrayDimensions = innerArrayType.dimensions;
int dimensionCount = innerArrayDimensions.bucketCount();
for (int i = 0; i < dimensionCount; i++) {
arrayDimensions.add(innerArrayDimensions.childInBucket(i));
}
memberTypeDesc = innerArrayType.memberTypeDesc;
}
STNode arrayDimension = STNodeFactory.createArrayDimensionNode(openBracketToken, arrayLengthNode,
closeBracketToken);
arrayDimensions.add(arrayDimension);
STNode arrayDimensionNodeList = STNodeFactory.createNodeList(arrayDimensions);
return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, arrayDimensionNodeList);
}
/**
* Parse array length.
* <p>
* <code>
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* constant-reference-expr := variable-reference-expr
* </code>
* </p>
*
* @return Parsed array length
*/
private STNode parseArrayLength() {
STToken token = peek();
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);
default:
recover(token, ParserRuleContext.ARRAY_LENGTH);
return parseArrayLength();
}
}
/**
* Parse annotations.
* <p>
* <i>Note: In the <a href="https:
* annotations-list is specified as one-or-more annotations. And the usage is marked as
* optional annotations-list. However, for the consistency of the tree, here we make the
* annotation-list as zero-or-more annotations, and the usage is not-optional.</i>
* <p>
* <code>annots := annotation*</code>
*
* @return Parsed node
*/
private STNode parseOptionalAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation list with at least one annotation.
*
* @return Annotation list
*/
private STNode parseAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
annotList.add(parseAnnotation());
while (peek().kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation attachment.
* <p>
* <code>annotation := @ annot-tag-reference annot-value</code>
*
* @return Parsed node
*/
private STNode parseAnnotation() {
STNode atToken = parseAtToken();
STNode annotReference;
if (isPredeclaredIdentifier(peek().kind)) {
annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);
} else {
annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
annotReference = STNodeFactory.createSimpleNameReferenceNode(annotReference);
}
STNode annotValue;
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
annotValue = parseMappingConstructorExpr();
} else {
annotValue = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);
}
/**
* Parse '@' token.
*
* @return Parsed node
*/
private STNode parseAtToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.AT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.AT);
return parseAtToken();
}
}
/**
* Parse metadata. Meta data consist of optional doc string and
* an annotations list.
* <p>
* <code>metadata := [DocumentationString] annots</code>
*
* @return Parse node
*/
private STNode parseMetaData() {
STNode docString;
STNode annotations;
switch (peek().kind) {
case DOCUMENTATION_STRING:
docString = parseMarkdownDocumentation();
annotations = parseOptionalAnnotations();
break;
case AT_TOKEN:
docString = STNodeFactory.createEmptyNode();
annotations = parseOptionalAnnotations();
break;
default:
return STNodeFactory.createEmptyNode();
}
return createMetadata(docString, annotations);
}
/**
* Create metadata node.
*
* @return A metadata node
*/
private STNode createMetadata(STNode docString, STNode annotations) {
if (annotations == null && docString == null) {
return STNodeFactory.createEmptyNode();
} else {
return STNodeFactory.createMetadataNode(docString, annotations);
}
}
/**
* Parse type test expression.
* <code>
* type-test-expr := expression (is | !is) type-descriptor
* </code>
*
* @param lhsExpr Preceding expression of the is expression
* @return Is expression node
*/
private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode isOrNotIsKeyword = parseIsOrNotIsKeyword();
STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr);
return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isOrNotIsKeyword, typeDescriptor);
}
/**
* Parse `is` keyword or `!is` keyword.
*
* @return is-keyword or not-is-keyword node
*/
private STNode parseIsOrNotIsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IS_KEYWORD ||
token.kind == SyntaxKind.NOT_IS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IS_KEYWORD);
return parseIsOrNotIsKeyword();
}
}
/**
* Parse local type definition statement statement.
* <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code>
*
* @return local type definition statement statement
*/
private STNode parseLocalTypeDefinitionStatement(STNode annots) {
startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);
STNode typeKeyword = parseTypeKeyword();
STNode typeName = parseTypeName();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,
semicolon);
}
/**
* Parse statement which is only consists of an action or expression.
*
* @param annots Annotations
* @return Statement node
*/
private STNode parseExpressionStatement(STNode annots) {
startContext(ParserRuleContext.EXPRESSION_STATEMENT);
STNode expression = parseActionOrExpressionInLhs(annots);
return getExpressionAsStatement(expression);
}
/**
* Parse statements that starts with an expression.
*
* @return Statement node
*/
private STNode parseStatementStartWithExpr(STNode annots) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode expr = parseActionOrExpressionInLhs(annots);
return parseStatementStartWithExprRhs(expr);
}
/**
* Parse the component followed by the expression, at the beginning of a statement.
*
* @param expression Action or expression in LHS
* @return Statement node
*/
private STNode parseStatementStartWithExprRhs(STNode expression) {
SyntaxKind nextTokenKind = peek().kind;
if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) {
return getExpressionAsStatement(expression);
}
switch (nextTokenKind) {
case EQUAL_TOKEN:
switchContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(expression);
case IDENTIFIER_TOKEN:
default:
if (isCompoundAssignment(nextTokenKind)) {
return parseCompoundAssignmentStmtRhs(expression);
}
ParserRuleContext context;
if (isPossibleExpressionStatement(expression)) {
context = ParserRuleContext.EXPR_STMT_RHS;
} else {
context = ParserRuleContext.STMT_START_WITH_EXPR_RHS;
}
recover(peek(), context);
return parseStatementStartWithExprRhs(expression);
}
}
private boolean isPossibleExpressionStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
case CHECK_EXPRESSION:
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
return true;
default:
return false;
}
}
private STNode getExpressionAsStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
return parseCallStatement(expression);
case CHECK_EXPRESSION:
return parseCheckStatement(expression);
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
return parseActionStatement(expression);
default:
STNode semicolon = parseSemicolon();
endContext();
expression = getExpression(expression);
STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,
expression, semicolon);
exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);
return exprStmt;
}
}
private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {
STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);
STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;
if (lengthExprs.isEmpty()) {
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),
indexedExpr.closeBracket);
}
STNode lengthExpr = lengthExprs.get(0);
switch (lengthExpr.kind) {
case SIMPLE_NAME_REFERENCE:
STSimpleNameReferenceNode nameRef = (STSimpleNameReferenceNode) lengthExpr;
if (nameRef.name.isMissing()) {
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),
indexedExpr.closeBracket);
}
break;
case ASTERISK_LITERAL:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind;
if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(
indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);
lengthExpr = STNodeFactory.createEmptyNode();
}
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket);
}
/**
* <p>
* Parse call statement, given the call expression.
* </p>
* <code>
* call-stmt := call-expr ;
* <br/>
* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr
* </code>
*
* @param expression Call expression associated with the call statement
* @return Call statement node
*/
private STNode parseCallStatement(STNode expression) {
return parseCallStatementOrCheckStatement(expression);
}
/**
* <p>
* Parse checking statement.
* </p>
* <code>
* checking-stmt := checking-expr ;
* <br/>
* checking-expr := checking-keyword expr ;
* </code>
*
* @param expression Checking expression associated with the checking statement
* @return Checking statement node
*/
private STNode parseCheckStatement(STNode expression) {
return parseCallStatementOrCheckStatement(expression);
}
private STNode parseCallStatementOrCheckStatement(STNode expression) {
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);
}
private STNode parseActionStatement(STNode action) {
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);
}
/**
* Parse remote method call action, given the starting expression.
* <p>
* <code>
* remote-method-call-action := expression -> method-name ( arg-list )
* <br/>
* async-send-action := expression -> peer-worker ;
* </code>
*
* @param isRhsExpr Is this an RHS action
* @param expression LHS expression
* @return
*/
private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {
STNode rightArrow = parseRightArrow();
return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);
}
private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {
STNode name;
STToken nextToken = peek();
switch (nextToken.kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
return parseAsyncSendAction(expression, rightArrow, name);
case IDENTIFIER_TOKEN:
name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());
break;
case CONTINUE_KEYWORD:
case COMMIT_KEYWORD:
name = getKeywordAsSimpleNameRef();
break;
default:
STToken token = peek();
recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS);
return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);
}
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseRemoteMethodCallAction(expression, rightArrow, name);
case SEMICOLON_TOKEN:
return parseAsyncSendAction(expression, rightArrow, name);
default:
recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END);
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
}
private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {
return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);
}
private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {
STNode openParenToken = parseArgListOpenParenthesis();
STNode arguments = parseArgsList();
STNode closeParenToken = parseArgListCloseParenthesis();
return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,
closeParenToken);
}
/**
* Parse right arrow (<code>-></code>) token.
*
* @return Parsed node
*/
private STNode parseRightArrow() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.RIGHT_ARROW);
return parseRightArrow();
}
}
/**
* Parse map type descriptor.
* map-type-descriptor := `map` type-parameter
*
* @return Parsed node
*/
private STNode parseMapTypeDescriptor(STNode mapKeyword) {
STNode typeParameter = parseTypeParameter();
return STNodeFactory.createMapTypeDescriptorNode(mapKeyword, typeParameter);
}
/**
* Parse parameterized type descriptor.
* parameterized-type-descriptor := `typedesc` [type-parameter]
* <br/> | `future` [type-parameter]
* <br/> | `xml` [type-parameter]
* <br/> | `error` [type-parameter]
*
* @return Parsed node
*/
private STNode parseParameterizedTypeDescriptor(STNode keywordToken) {
STNode typeParamNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
typeParamNode = parseTypeParameter();
} else {
typeParamNode = STNodeFactory.createEmptyNode();
}
SyntaxKind parameterizedTypeDescKind = getParameterizedTypeDescKind(keywordToken);
return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeDescKind, keywordToken,
typeParamNode);
}
private SyntaxKind getParameterizedTypeDescKind(STNode keywordToken) {
switch (keywordToken.kind) {
case TYPEDESC_KEYWORD:
return SyntaxKind.TYPEDESC_TYPE_DESC;
case FUTURE_KEYWORD:
return SyntaxKind.FUTURE_TYPE_DESC;
case XML_KEYWORD:
return SyntaxKind.XML_TYPE_DESC;
case ERROR_KEYWORD:
default:
return SyntaxKind.ERROR_TYPE_DESC;
}
}
/**
* Parse <code> < </code> token.
*
* @return Parsed node
*/
private STNode parseGTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.GT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.GT);
return parseGTToken();
}
}
/**
* Parse <code> > </code> token.
*
* @return Parsed node
*/
private STNode parseLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.LT);
return parseLTToken();
}
}
/**
* Parse nil literal. Here nil literal is only referred to ( ).
*
* @return Parsed node
*/
private STNode parseNilLiteral() {
startContext(ParserRuleContext.NIL_LITERAL);
STNode openParenthesisToken = parseOpenParenthesis();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);
}
/**
* Parse annotation declaration, given the qualifier.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {
startContext(ParserRuleContext.ANNOTATION_DECL);
STNode annotationKeyword = parseAnnotationKeyword();
STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
endContext();
return annotDecl;
}
/**
* Parse annotation keyword.
*
* @return Parsed node
*/
private STNode parseAnnotationKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOTATION_KEYWORD);
return parseAnnotationKeyword();
}
}
/**
* Parse the components that follows after the annotation keyword of a annotation declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @param annotationKeyword
* @return Parsed node
*/
private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);
return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
/**
* Parse annotation tag.
* <p>
* <code>annot-tag := identifier</code>
*
* @return
*/
private STNode parseAnnotationTag() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.ANNOTATION_TAG);
return parseAnnotationTag();
}
}
private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);
if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag, annotTag);
}
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {
STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,
ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);
}
/**
* Parse the component that follows the first identifier in an annotation decl. The identifier
* can be either the type-name (a user defined type) or the annot-tag, where the type-name
* is not present.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the annotation decl
* @param constKeyword Const keyword
* @param annotationKeyword Annotation keyword
* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDescOrAnnotTag) {
STToken nextToken = peek();
STNode typeDesc;
STNode annotTag;
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
typeDesc = typeDescOrAnnotTag;
annotTag = parseAnnotationTag();
break;
case SEMICOLON_TOKEN:
case ON_KEYWORD:
typeDesc = STNodeFactory.createEmptyNode();
annotTag = typeDescOrAnnotTag;
break;
default:
recover(peek(), ParserRuleContext.ANNOT_DECL_RHS);
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);
}
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDesc, STNode annotTag) {
STNode onKeyword;
STNode attachPoints;
STToken nextToken = peek();
switch (nextToken.kind) {
case SEMICOLON_TOKEN:
onKeyword = STNodeFactory.createEmptyNode();
attachPoints = STNodeFactory.createEmptyNodeList();
break;
case ON_KEYWORD:
onKeyword = parseOnKeyword();
attachPoints = parseAnnotationAttachPoints();
onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
break;
default:
recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS);
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,
typeDesc, annotTag, onKeyword, attachPoints, semicolonToken);
}
/**
* Parse annotation attach points.
* <p>
* <code>
* annot-attach-points := annot-attach-point (, annot-attach-point)*
* <br/><br/>
* annot-attach-point := dual-attach-point | source-only-attach-point
* <br/><br/>
* dual-attach-point := [source] dual-attach-point-ident
* <br/><br/>
* dual-attach-point-ident :=
* type
* | class
* | [object|service remote] function
* | parameter
* | return
* | service
* | [object|record] field
* <br/><br/>
* source-only-attach-point := source source-only-attach-point-ident
* <br/><br/>
* source-only-attach-point-ident :=
* annotation
* | external
* | var
* | const
* | listener
* | worker
* </code>
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoints() {
startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);
List<STNode> attachPoints = new ArrayList<>();
STToken nextToken = peek();
if (isEndAnnotAttachPointList(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode attachPoint = parseAnnotationAttachPoint();
attachPoints.add(attachPoint);
nextToken = peek();
STNode leadingComma;
while (!isEndAnnotAttachPointList(nextToken.kind)) {
leadingComma = parseAttachPointEnd();
if (leadingComma == null) {
break;
}
attachPoints.add(leadingComma);
attachPoint = parseAnnotationAttachPoint();
if (attachPoint == null) {
STToken missingAttachPointIdent = SyntaxErrors.createMissingToken(SyntaxKind.TYPE_KEYWORD);
STNode identList = STNodeFactory.createNodeList(missingAttachPointIdent);
attachPoint = STNodeFactory.createAnnotationAttachPointNode(STNodeFactory.createEmptyNode(), identList);
attachPoint = SyntaxErrors.addDiagnostic(attachPoint,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
attachPoints.add(attachPoint);
break;
}
attachPoints.add(attachPoint);
nextToken = peek();
}
if (attachPoint.lastToken().isMissing() && this.tokenReader.peek().kind == SyntaxKind.IDENTIFIER_TOKEN &&
!this.tokenReader.head().hasTrailingNewline()) {
STToken nextNonVirtualToken = this.tokenReader.read();
updateLastNodeInListWithInvalidNode(attachPoints, nextNonVirtualToken,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextNonVirtualToken.text());
}
endContext();
return STNodeFactory.createNodeList(attachPoints);
}
/**
* Parse annotation attach point end.
*
* @return Parsed node
*/
private STNode parseAttachPointEnd() {
switch (peek().kind) {
case SEMICOLON_TOKEN:
return null;
case COMMA_TOKEN:
return consume();
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_END);
return parseAttachPointEnd();
}
}
private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse annotation attach point.
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoint() {
switch (peek().kind) {
case EOF_TOKEN:
return null;
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
case SOURCE_KEYWORD:
STNode sourceKeyword = parseSourceKeyword();
return parseAttachPointIdent(sourceKeyword);
case OBJECT_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case RECORD_KEYWORD:
case CLASS_KEYWORD:
sourceKeyword = STNodeFactory.createEmptyNode();
STNode firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT);
return parseAnnotationAttachPoint();
}
}
/**
* Parse source keyword.
*
* @return Parsed node
*/
private STNode parseSourceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SOURCE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SOURCE_KEYWORD);
return parseSourceKeyword();
}
}
/**
* Parse attach point ident gievn.
* <p>
* <code>
* source-only-attach-point-ident := annotation | external | var | const | listener | worker
* <br/><br/>
* dual-attach-point-ident := type | class | [object|service remote] function | parameter
* | return | service | [object|record] field
* </code>
*
* @param sourceKeyword Source keyword
* @return Parsed node
*/
private STNode parseAttachPointIdent(STNode sourceKeyword) {
switch (peek().kind) {
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
STNode firstIdent = consume();
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case OBJECT_KEYWORD:
case RESOURCE_KEYWORD:
case RECORD_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT);
return parseAttachPointIdent(sourceKeyword);
}
}
/**
* Parse dual-attach-point ident.
*
* @param sourceKeyword Source keyword
* @param firstIdent first part of the dual attach-point
* @return Parsed node
*/
private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {
STNode secondIdent;
switch (firstIdent.kind) {
case OBJECT_KEYWORD:
secondIdent = parseIdentAfterObjectIdent();
break;
case RESOURCE_KEYWORD:
secondIdent = parseFunctionIdent();
break;
case RECORD_KEYWORD:
secondIdent = parseFieldIdent();
break;
case SERVICE_KEYWORD:
return parseServiceAttachPoint(sourceKeyword, firstIdent);
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
default:
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
/**
* Parse remote ident.
*
* @return Parsed node
*/
private STNode parseRemoteIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.REMOTE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.REMOTE_IDENT);
return parseRemoteIdent();
}
}
/**
* Parse service attach point.
* <code>service-attach-point := service | service remote function</code>
*
* @return Parsed node
*/
private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) {
STNode identList;
STToken token = peek();
switch (token.kind) {
case REMOTE_KEYWORD:
STNode secondIdent = parseRemoteIdent();
STNode thirdIdent = parseFunctionIdent();
identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case COMMA_TOKEN:
case SEMICOLON_TOKEN:
identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
default:
recover(token, ParserRuleContext.SERVICE_IDENT_RHS);
return parseServiceAttachPoint(sourceKeyword, firstIdent);
}
}
/**
* Parse the idents that are supported after object-ident.
*
* @return Parsed node
*/
private STNode parseIdentAfterObjectIdent() {
STToken token = peek();
switch (token.kind) {
case FUNCTION_KEYWORD:
case FIELD_KEYWORD:
return consume();
default:
recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);
return parseIdentAfterObjectIdent();
}
}
/**
* Parse function ident.
*
* @return Parsed node
*/
private STNode parseFunctionIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FUNCTION_IDENT);
return parseFunctionIdent();
}
}
/**
* Parse field ident.
*
* @return Parsed node
*/
private STNode parseFieldIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FIELD_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FIELD_IDENT);
return parseFieldIdent();
}
}
/**
* Parse XML namespace declaration.
* <p>
* <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;
* <br/>
* xml-namespace-uri := simple-const-expr
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @return
*/
private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {
startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);
STNode xmlnsKeyword = parseXMLNSKeyword();
STNode namespaceUri = parseSimpleConstExpr();
while (!isValidXMLNameSpaceURI(namespaceUri)) {
xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri,
DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);
namespaceUri = parseSimpleConstExpr();
}
STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
endContext();
return xmlnsDecl;
}
/**
* Parse xmlns keyword.
*
* @return Parsed node
*/
private STNode parseXMLNSKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XMLNS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XMLNS_KEYWORD);
return parseXMLNSKeyword();
}
}
private boolean isValidXMLNameSpaceURI(STNode expr) {
switch (expr.kind) {
case STRING_LITERAL:
case QUALIFIED_NAME_REFERENCE:
case SIMPLE_NAME_REFERENCE:
return true;
case IDENTIFIER_TOKEN:
default:
return false;
}
}
private STNode parseSimpleConstExpr() {
startContext(ParserRuleContext.CONSTANT_EXPRESSION);
STNode expr = parseSimpleConstExprInternal();
endContext();
return expr;
}
/**
* Parse simple constants expr.
*
* @return Parsed node
*/
private STNode parseSimpleConstExprInternal() {
STToken nextToken = peek();
switch (nextToken.kind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return parseBasicLiteral();
case PLUS_TOKEN:
case MINUS_TOKEN:
return parseSignedIntOrFloat();
case OPEN_PAREN_TOKEN:
return parseNilLiteral();
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
recover(nextToken, ParserRuleContext.CONSTANT_EXPRESSION_START);
return parseSimpleConstExprInternal();
}
}
/**
* Parse the portion after the namsepsace-uri of an XML declaration.
*
* @param xmlnsKeyword XMLNS keyword
* @param namespaceUri Namespace URI
* @return Parsed node
*/
private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {
STNode asKeyword = STNodeFactory.createEmptyNode();
STNode namespacePrefix = STNodeFactory.createEmptyNode();
switch (peek().kind) {
case AS_KEYWORD:
asKeyword = parseAsKeyword();
namespacePrefix = parseNamespacePrefix();
break;
case SEMICOLON_TOKEN:
break;
default:
recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL);
return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
}
STNode semicolon = parseSemicolon();
if (isModuleVar) {
return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,
namespacePrefix, semicolon);
}
return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,
semicolon);
}
/**
* Parse import prefix.
*
* @return Parsed node
*/
private STNode parseNamespacePrefix() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);
return parseNamespacePrefix();
}
}
/**
* Parse named worker declaration.
* <p>
* <code>named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt }
* </code>
*
* @param annots Annotations attached to the worker decl
* @param qualifiers Preceding transactional keyword in a list
* @return Parsed node
*/
private STNode parseNamedWorkerDeclaration(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.NAMED_WORKER_DECL);
STNode transactionalKeyword = getTransactionalKeyword(qualifiers);
STNode workerKeyword = parseWorkerKeyword();
STNode workerName = parseWorkerName();
STNode returnTypeDesc = parseReturnTypeDescriptor();
STNode workerBody = parseBlockNode();
endContext();
return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName,
returnTypeDesc, workerBody);
}
private STNode getTransactionalKeyword(List<STNode> qualifierList) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,
((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
STNode transactionalKeyword;
if (validatedList.isEmpty()) {
transactionalKeyword = STNodeFactory.createEmptyNode();
} else {
transactionalKeyword = validatedList.get(0);
}
return transactionalKeyword;
}
private STNode parseReturnTypeDescriptor() {
STToken token = peek();
if (token.kind != SyntaxKind.RETURNS_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
STNode returnsKeyword = consume();
STNode annot = parseOptionalAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);
return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);
}
/**
* Parse worker keyword.
*
* @return Parsed node
*/
private STNode parseWorkerKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_KEYWORD);
return parseWorkerKeyword();
}
}
/**
* Parse worker name.
* <p>
* <code>worker-name := identifier</code>
*
* @return Parsed node
*/
private STNode parseWorkerName() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_NAME);
return parseWorkerName();
}
}
/**
* Parse lock statement.
* <code>lock-stmt := lock block-stmt [on-fail-clause]</code>
*
* @return Lock statement
*/
private STNode parseLockStatement() {
startContext(ParserRuleContext.LOCK_STMT);
STNode lockKeyword = parseLockKeyword();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause);
}
/**
* Parse lock-keyword.
*
* @return lock-keyword node
*/
private STNode parseLockKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LOCK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LOCK_KEYWORD);
return parseLockKeyword();
}
}
/**
* Parse union type descriptor.
* union-type-descriptor := type-descriptor | type-descriptor
*
* @param leftTypeDesc Type desc in the LHS os the union type desc.
* @param context Current context.
* @return parsed union type desc node
*/
private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode pipeToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,
TypePrecedence.UNION);
return mergeTypesWithUnion(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Creates a union type descriptor after validating lhs and rhs types.
* <p>
* <i>Note: Since type precedence and associativity are not taken into account here,
* this method should not be called directly when types are unknown.
* <br/>
* Call {@link
*
* @param leftTypeDesc lhs type
* @param pipeToken pipe token
* @param rightTypeDesc rhs type
* @return a UnionTypeDescriptorNode
*/
private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Parse pipe token.
*
* @return parsed pipe token node
*/
private STNode parsePipeToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.PIPE);
return parsePipeToken();
}
}
private boolean isTypeStartingToken(SyntaxKind nodeKind) {
return isTypeStartingToken(nodeKind, getNextNextToken());
}
private static boolean isTypeStartingToken(SyntaxKind nextTokenKind, STToken nextNextToken) {
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
case SERVICE_KEYWORD:
case RECORD_KEYWORD:
case OBJECT_KEYWORD:
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
case OPEN_PAREN_TOKEN:
case MAP_KEYWORD:
case STREAM_KEYWORD:
case TABLE_KEYWORD:
case FUNCTION_KEYWORD:
case OPEN_BRACKET_TOKEN:
case DISTINCT_KEYWORD:
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case TRANSACTION_KEYWORD:
return true;
default:
if (isParameterizedTypeToken(nextTokenKind)) {
return true;
}
if (isSingletonTypeDescStart(nextTokenKind, nextNextToken)) {
return true;
}
return isSimpleType(nextTokenKind);
}
}
/**
* Check if the token kind is a type descriptor in terminal expression.
* <p>
* simple-type-in-expr :=
* boolean | int | byte | float | decimal | string | handle | json | anydata | any | never
*
* @param nodeKind token kind to check
* @return <code>true</code> for simple type token in expression. <code>false</code> otherwise.
*/
private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) {
switch (nodeKind) {
case VAR_KEYWORD:
case READONLY_KEYWORD:
return false;
default:
return isSimpleType(nodeKind);
}
}
static boolean isSimpleType(SyntaxKind nodeKind) {
switch (nodeKind) {
case INT_KEYWORD:
case FLOAT_KEYWORD:
case DECIMAL_KEYWORD:
case BOOLEAN_KEYWORD:
case STRING_KEYWORD:
case BYTE_KEYWORD:
case JSON_KEYWORD:
case HANDLE_KEYWORD:
case ANY_KEYWORD:
case ANYDATA_KEYWORD:
case NEVER_KEYWORD:
case VAR_KEYWORD:
case READONLY_KEYWORD:
return true;
default:
return false;
}
}
static boolean isPredeclaredPrefix(SyntaxKind nodeKind) {
switch (nodeKind) {
case BOOLEAN_KEYWORD:
case DECIMAL_KEYWORD:
case ERROR_KEYWORD:
case FLOAT_KEYWORD:
case FUTURE_KEYWORD:
case INT_KEYWORD:
case MAP_KEYWORD:
case OBJECT_KEYWORD:
case STREAM_KEYWORD:
case STRING_KEYWORD:
case TABLE_KEYWORD:
case TRANSACTION_KEYWORD:
case TYPEDESC_KEYWORD:
case XML_KEYWORD:
return true;
default:
return false;
}
}
private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) {
return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN;
}
private static SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) {
switch (typeKeyword) {
case INT_KEYWORD:
return SyntaxKind.INT_TYPE_DESC;
case FLOAT_KEYWORD:
return SyntaxKind.FLOAT_TYPE_DESC;
case DECIMAL_KEYWORD:
return SyntaxKind.DECIMAL_TYPE_DESC;
case BOOLEAN_KEYWORD:
return SyntaxKind.BOOLEAN_TYPE_DESC;
case STRING_KEYWORD:
return SyntaxKind.STRING_TYPE_DESC;
case BYTE_KEYWORD:
return SyntaxKind.BYTE_TYPE_DESC;
case JSON_KEYWORD:
return SyntaxKind.JSON_TYPE_DESC;
case HANDLE_KEYWORD:
return SyntaxKind.HANDLE_TYPE_DESC;
case ANY_KEYWORD:
return SyntaxKind.ANY_TYPE_DESC;
case ANYDATA_KEYWORD:
return SyntaxKind.ANYDATA_TYPE_DESC;
case NEVER_KEYWORD:
return SyntaxKind.NEVER_TYPE_DESC;
case VAR_KEYWORD:
return SyntaxKind.VAR_TYPE_DESC;
case READONLY_KEYWORD:
return SyntaxKind.READONLY_TYPE_DESC;
default:
assert false : typeKeyword + " is not a built-in type";
return SyntaxKind.TYPE_REFERENCE;
}
}
/**
* Parse fork-keyword.
*
* @return Fork-keyword node
*/
private STNode parseForkKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FORK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FORK_KEYWORD);
return parseForkKeyword();
}
}
/**
* Parse fork statement.
* <code>fork-stmt := fork { named-worker-decl+ }</code>
*
* @return Fork statement
*/
private STNode parseForkStatement() {
startContext(ParserRuleContext.FORK_STMT);
STNode forkKeyword = parseForkKeyword();
STNode openBrace = parseOpenBrace();
ArrayList<STNode> workers = new ArrayList<>();
while (!isEndOfStatements()) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
if (validateStatement(stmt)) {
continue;
}
switch (stmt.kind) {
case NAMED_WORKER_DECLARATION:
workers.add(stmt);
break;
default:
if (workers.isEmpty()) {
openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
} else {
updateLastNodeInListWithInvalidNode(workers, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
}
}
}
STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);
STNode closeBrace = parseCloseBrace();
endContext();
STNode forkStmt =
STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);
if (isNodeListEmpty(namedWorkerDeclarations)) {
return SyntaxErrors.addDiagnostic(forkStmt,
DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);
}
return forkStmt;
}
/**
* Parse trap expression.
* <p>
* <code>
* trap-expr := trap expression
* </code>
*
* @param allowActions Allow actions
* @param isRhsExpr Whether this is a RHS expression or not
* @return Trap expression node
*/
private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
STNode trapKeyword = parseTrapKeyword();
STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr);
if (isAction(expr)) {
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);
}
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);
}
/**
* Parse trap-keyword.
*
* @return Trap-keyword node
*/
private STNode parseTrapKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRAP_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRAP_KEYWORD);
return parseTrapKeyword();
}
}
/**
* Parse list constructor expression.
* <p>
* <code>
* list-constructor-expr := [ [ list-members ] ]
* <br/>
* list-members := list-member (, list-member)*
* <br/>
* list-member := expression | spread-member
* <br/>
* spread-member := ... expression
* </code>
*
* @return Parsed node
*/
private STNode parseListConstructorExpr() {
startContext(ParserRuleContext.LIST_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode listMembers = parseListMembers();
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createListConstructorExpressionNode(openBracket, listMembers, closeBracket);
}
/**
* Parse optional list member list.
*
* @return Parsed node
*/
private STNode parseListMembers() {
List<STNode> listMembers = new ArrayList<>();
if (isEndOfListConstructor(peek().kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode listMember = parseListMember();
listMembers.add(listMember);
return parseListMembers(listMembers);
}
private STNode parseListMembers(List<STNode> listMembers) {
STNode listConstructorMemberEnd;
while (!isEndOfListConstructor(peek().kind)) {
listConstructorMemberEnd = parseListConstructorMemberEnd();
if (listConstructorMemberEnd == null) {
break;
}
listMembers.add(listConstructorMemberEnd);
STNode listMember = parseListMember();
listMembers.add(listMember);
}
return STNodeFactory.createNodeList(listMembers);
}
/**
* Parse list member.
* <p>
* <code>
* list-member := expression | spread-member
* </code>
*
* @return Parsed node
*/
private STNode parseListMember() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {
return parseSpreadMember();
} else {
return parseExpression();
}
}
/**
* Parse spread member.
* <p>
* <code>
* spread-member := ... expression
* </code>
*
* @return Parsed node
*/
private STNode parseSpreadMember() {
STNode ellipsis = parseEllipsis();
STNode expr = parseExpression();
return STNodeFactory.createSpreadMemberNode(ellipsis, expr);
}
private boolean isEndOfListConstructor(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListConstructorMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);
return parseListConstructorMemberEnd();
}
}
/**
* Parse foreach statement.
* <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code>
*
* @return foreach statement
*/
private STNode parseForEachStatement() {
startContext(ParserRuleContext.FOREACH_STMT);
STNode forEachKeyword = parseForEachKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);
STNode inKeyword = parseInKeyword();
STNode actionOrExpr = parseActionOrExpression();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,
blockStatement, onFailClause);
}
/**
* Parse foreach-keyword.
*
* @return ForEach-keyword node
*/
private STNode parseForEachKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FOREACH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FOREACH_KEYWORD);
return parseForEachKeyword();
}
}
/**
* Parse in-keyword.
*
* @return In-keyword node
*/
private STNode parseInKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IN_KEYWORD);
return parseInKeyword();
}
}
/**
* Parse type cast expression.
* <p>
* <code>
* type-cast-expr := < type-cast-param > expression
* <br/>
* type-cast-param := [annots] type-descriptor | annots
* </code>
*
* @return Parsed node
*/
private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
startContext(ParserRuleContext.TYPE_CAST);
STNode ltToken = parseLTToken();
return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr);
}
private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions,
boolean isInConditionalExpr) {
STNode typeCastParam = parseTypeCastParam();
STNode gtToken = parseGTToken();
endContext();
STNode expression =
parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);
return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);
}
private STNode parseTypeCastParam() {
STNode annot;
STNode type;
STToken token = peek();
switch (token.kind) {
case AT_TOKEN:
annot = parseOptionalAnnotations();
token = peek();
if (isTypeStartingToken(token.kind)) {
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
} else {
type = STNodeFactory.createEmptyNode();
}
break;
default:
annot = STNodeFactory.createEmptyNode();
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
break;
}
return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);
}
/**
* Parse table constructor expression.
* <p>
* <code>
* table-constructor-expr-rhs := [ [row-list] ]
* </code>
*
* @param tableKeyword tableKeyword that precedes this rhs
* @param keySpecifier keySpecifier that precedes this rhs
* @return Parsed node
*/
private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {
switchContext(ParserRuleContext.TABLE_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode rowList = parseRowList();
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,
closeBracket);
}
/**
* Parse table-keyword.
*
* @return Table-keyword node
*/
private STNode parseTableKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TABLE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TABLE_KEYWORD);
return parseTableKeyword();
}
}
/**
* Parse table rows.
* <p>
* <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code>
*
* @return Parsed node
*/
private STNode parseRowList() {
STToken nextToken = peek();
if (isEndOfTableRowList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> mappings = new ArrayList<>();
STNode mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
STNode rowEnd;
while (!isEndOfTableRowList(nextToken.kind)) {
rowEnd = parseTableRowEnd();
if (rowEnd == null) {
break;
}
mappings.add(rowEnd);
mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
}
return STNodeFactory.createNodeList(mappings);
}
private boolean isEndOfTableRowList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
case COMMA_TOKEN:
case OPEN_BRACE_TOKEN:
return false;
default:
return isEndOfMappingConstructor(tokenKind);
}
}
private STNode parseTableRowEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.TABLE_ROW_END);
return parseTableRowEnd();
}
}
/**
* Parse key specifier.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier() {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode keyKeyword = parseKeyKeyword();
STNode openParen = parseOpenParenthesis();
STNode fieldNames = parseFieldNames();
STNode closeParen = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);
}
/**
* Parse key-keyword.
*
* @return Key-keyword node
*/
private STNode parseKeyKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.KEY_KEYWORD) {
return consume();
}
if (isKeyKeyword(token)) {
return getKeyKeyword(consume());
}
recover(token, ParserRuleContext.KEY_KEYWORD);
return parseKeyKeyword();
}
static boolean isKeyKeyword(STToken token) {
return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());
}
private STNode getKeyKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),
token.diagnostics());
}
private STToken getUnderscoreKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.UNDERSCORE_KEYWORD, token.leadingMinutiae(),
token.trailingMinutiae(), token.diagnostics());
}
/**
* Parse field names.
* <p>
* <code>field-name-list := [ field-name (, field-name)* ]</code>
*
* @return Parsed node
*/
private STNode parseFieldNames() {
STToken nextToken = peek();
if (isEndOfFieldNamesList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> fieldNames = new ArrayList<>();
STNode fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
STNode leadingComma;
while (!isEndOfFieldNamesList(nextToken.kind)) {
leadingComma = parseComma();
fieldNames.add(leadingComma);
fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
}
return STNodeFactory.createNodeList(fieldNames);
}
private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case IDENTIFIER_TOKEN:
return false;
default:
return true;
}
}
/**
* Parse error-keyword.
*
* @return Parsed error-keyword node
*/
private STNode parseErrorKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ERROR_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ERROR_KEYWORD);
return parseErrorKeyword();
}
}
/**
* Parse stream type descriptor.
* <p>
* stream-type-descriptor := stream [stream-type-parameters]
* <br/>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type descriptor node
*/
private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) {
STNode streamTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
streamTypeParamsNode = parseStreamTypeParamsNode();
} else {
streamTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);
}
/**
* Parse stream type params node.
* <p>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type params node
*/
private STNode parseStreamTypeParamsNode() {
STNode ltToken = parseLTToken();
startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
endContext();
return streamTypedesc;
}
private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {
STNode commaToken, rightTypeDescNode, gtToken;
switch (peek().kind) {
case COMMA_TOKEN:
commaToken = parseComma();
rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
break;
case GT_TOKEN:
commaToken = STNodeFactory.createEmptyNode();
rightTypeDescNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS);
return parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
}
gtToken = parseGTToken();
return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,
gtToken);
}
/**
* Parse stream-keyword.
*
* @return Parsed stream-keyword node
*/
private STNode parseStreamKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STREAM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.STREAM_KEYWORD);
return parseStreamKeyword();
}
}
/**
* Parse let expression.
* <p>
* <code>
* let-expr := let let-var-decl [, let-var-decl]* in expression
* </code>
*
* @return Parsed node
*/
private STNode parseLetExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);
STNode inKeyword = parseInKeyword();
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);
}
/**
* Parse let-keyword.
*
* @return Let-keyword node
*/
private STNode parseLetKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LET_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LET_KEYWORD);
return parseLetKeyword();
}
}
/**
* Parse let variable declarations.
* <p>
* <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code>
*
* @return Parsed node
*/
private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {
startContext(context);
List<STNode> varDecls = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfLetVarDeclarations(nextToken.kind, getNextNextToken())) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode varDec = parseLetVarDecl(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
STNode leadingComma;
while (!isEndOfLetVarDeclarations(nextToken.kind, getNextNextToken())) {
leadingComma = parseComma();
varDecls.add(leadingComma);
varDec = parseLetVarDecl(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(varDecls);
}
static boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind, STToken nextNextToken) {
switch (tokenKind) {
case COMMA_TOKEN:
case AT_TOKEN:
return false;
case IN_KEYWORD:
return true;
default:
return !isTypeStartingToken(tokenKind, nextNextToken);
}
}
/**
* Parse let variable declaration.
* <p>
* <code>let-var-decl := [annots] typed-binding-pattern = expression</code>
*
* @return Parsed node
*/
private STNode parseLetVarDecl(boolean isRhsExpr) {
STNode annot = parseOptionalAnnotations();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);
STNode assign = parseAssignOp();
STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);
return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);
}
/**
* Parse raw backtick string template expression.
* <p>
* <code>BacktickString := `expression`</code>
*
* @return Template expression node
*/
private STNode parseTemplateExpression() {
STNode type = STNodeFactory.createEmptyNode();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
private STNode parseTemplateContent() {
List<STNode> items = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
items.add(contentItem);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
private boolean isEndOfBacktickContent(SyntaxKind kind) {
switch (kind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
return false;
}
}
private STNode parseTemplateItem() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return parseInterpolation();
}
return consume();
}
/**
* Parse string template expression.
* <p>
* <code>string-template-expr := string ` expression `</code>
*
* @return String template expression node
*/
private STNode parseStringTemplateExpression() {
STNode type = parseStringKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
/**
* Parse <code>string</code> keyword.
*
* @return string keyword node
*/
private STNode parseStringKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STRING_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.STRING_KEYWORD);
return parseStringKeyword();
}
}
/**
* Parse XML template expression.
* <p>
* <code>xml-template-expr := xml BacktickString</code>
*
* @return XML template expression
*/
private STNode parseXMLTemplateExpression() {
STNode xmlKeyword = parseXMLKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content;
STNode endingBackTick;
if (startingBackTick.isMissing()) {
startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
content = STNodeFactory.createEmptyNodeList();
STNode templateExpr = STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION,
xmlKeyword, startingBackTick, content, endingBackTick);
templateExpr = SyntaxErrors.addDiagnostic(templateExpr, DiagnosticErrorCode.ERROR_MISSING_BACKTICK_STRING);
return templateExpr;
}
content = parseTemplateContentAsXML();
endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,
startingBackTick, content, endingBackTick);
}
/**
* Parse <code>xml</code> keyword.
*
* @return xml keyword node
*/
private STNode parseXMLKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XML_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XML_KEYWORD);
return parseXMLKeyword();
}
}
/**
* Parse the content of the template string as XML. This method first read the
* input in the same way as the raw-backtick-template (BacktickString). Then
* it parses the content as XML.
*
* @return XML node
*/
private STNode parseTemplateContentAsXML() {
ArrayDeque<STNode> expressions = new ArrayDeque<>();
StringBuilder xmlStringBuilder = new StringBuilder();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {
xmlStringBuilder.append(((STToken) contentItem).text());
} else {
xmlStringBuilder.append("${}");
expressions.add(contentItem);
}
nextToken = peek();
}
CharReader charReader = CharReader.from(xmlStringBuilder.toString());
AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader));
XMLParser xmlParser = new XMLParser(tokenReader, expressions);
return xmlParser.parse();
}
/**
* Parse interpolation of a back-tick string.
* <p>
* <code>
* interpolation := ${ expression }
* </code>
*
* @return Interpolation node
*/
private STNode parseInterpolation() {
startContext(ParserRuleContext.INTERPOLATION);
STNode interpolStart = parseInterpolationStart();
STNode expr = parseExpression();
while (!isEndOfInterpolation()) {
STToken nextToken = consume();
expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());
}
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);
}
private boolean isEndOfInterpolation() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
ParserMode currentLexerMode = this.tokenReader.getCurrentMode();
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION &&
currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT;
}
}
/**
* Parse interpolation start token.
* <p>
* <code>interpolation-start := ${</code>
*
* @return Interpolation start token
*/
private STNode parseInterpolationStart() {
STToken token = peek();
if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);
return parseInterpolationStart();
}
}
/**
* Parse back-tick token.
*
* @return Back-tick token
*/
private STNode parseBacktickToken(ParserRuleContext ctx) {
STToken token = peek();
if (token.kind == SyntaxKind.BACKTICK_TOKEN) {
return consume();
} else {
recover(token, ctx);
return parseBacktickToken(ctx);
}
}
/**
* Parse table type descriptor.
* <p>
* table-type-descriptor := table row-type-parameter [key-constraint]
* row-type-parameter := type-parameter
* key-constraint := key-specifier | key-type-constraint
* key-specifier := key ( [ field-name (, field-name)* ] )
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed table type desc node.
*/
private STNode parseTableTypeDescriptor(STNode tableKeywordToken) {
STNode rowTypeParameterNode = parseRowTypeParameter();
STNode keyConstraintNode;
STToken nextToken = peek();
if (isKeyKeyword(nextToken)) {
STNode keyKeywordToken = getKeyKeyword(consume());
keyConstraintNode = parseKeyConstraint(keyKeywordToken);
} else {
keyConstraintNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);
}
/**
* Parse row type parameter node.
* <p>
* row-type-parameter := type-parameter
* </p>
*
* @return Parsed node.
*/
private STNode parseRowTypeParameter() {
startContext(ParserRuleContext.ROW_TYPE_PARAM);
STNode rowTypeParameterNode = parseTypeParameter();
endContext();
return rowTypeParameterNode;
}
/**
* Parse type parameter node.
* <p>
* type-parameter := < type-descriptor >
* </p>
*
* @return Parsed node
*/
private STNode parseTypeParameter() {
STNode ltToken = parseLTToken();
STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
STNode gtToken = parseGTToken();
return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);
}
/**
* Parse key constraint.
* <p>
* key-constraint := key-specifier | key-type-constraint
* </p>
*
* @return Parsed node.
*/
private STNode parseKeyConstraint(STNode keyKeywordToken) {
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
return parseKeySpecifier(keyKeywordToken);
case LT_TOKEN:
return parseKeyTypeConstraint(keyKeywordToken);
default:
recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS);
return parseKeyConstraint(keyKeywordToken);
}
}
/**
* Parse key specifier given parsed key keyword token.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier(STNode keyKeywordToken) {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode openParenToken = parseOpenParenthesis();
STNode fieldNamesNode = parseFieldNames();
STNode closeParenToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);
}
/**
* Parse key type constraint.
* <p>
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed node
*/
private STNode parseKeyTypeConstraint(STNode keyKeywordToken) {
STNode typeParameterNode = parseTypeParameter();
return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);
}
/**
* Parse function type descriptor.
* <p>
* <code>
* function-type-descriptor := function-quals function function-signature
* <br/> | [isolated] function
* <br/>
* function-quals := (transactional | isolated)*
* </code>
*
* @param qualifiers Preceding type descriptor qualifiers
* @return Function type descriptor node
*/
private STNode parseFunctionTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC);
STNode functionKeyword = parseFunctionKeyword();
boolean hasFuncSignature = false;
STNode signature = STNodeFactory.createEmptyNode();
if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN ||
isSyntaxKindInList(qualifiers, SyntaxKind.TRANSACTIONAL_KEYWORD)) {
signature = parseFuncSignature(true);
hasFuncSignature = true;
}
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, hasFuncSignature);
STNode qualifierList = nodes[0];
functionKeyword = nodes[1];
endContext();
return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature);
}
private STNode getLastNodeInList(List<STNode> nodeList) {
return nodeList.get(nodeList.size() - 1);
}
private STNode[] createFuncTypeQualNodeList(List<STNode> qualifierList, STNode functionKeyword,
boolean hasFuncSignature) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) {
validatedList.add(qualifier);
} else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
functionKeyword = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(functionKeyword, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
STNode nodeList = STNodeFactory.createNodeList(validatedList);
return new STNode[]{ nodeList, functionKeyword };
}
private boolean isRegularFuncQual(SyntaxKind tokenKind) {
switch (tokenKind) {
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse explicit anonymous function expression.
* <p>
* <code>explicit-anonymous-function-expr :=
* [annots] (isolated| transactional) function function-signature anon-func-body</code>
*
* @param annots Annotations.
* @param qualifiers Function qualifiers
* @param isRhsExpr Is expression in rhs context
* @return Anonymous function expression node
*/
private STNode parseExplicitFunctionExpression(STNode annots, List<STNode> qualifiers, boolean isRhsExpr) {
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
STNode funcKeyword = parseFunctionKeyword();
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, funcKeyword, true);
STNode qualifierList = nodes[0];
funcKeyword = nodes[1];
STNode funcSignature = parseFuncSignature(false);
STNode funcBody = parseAnonFuncBody(isRhsExpr);
return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword,
funcSignature, funcBody);
}
/**
* Parse anonymous function body.
* <p>
* <code>anon-func-body := block-function-body | expr-function-body</code>
*
* @param isRhsExpr Is expression in rhs context
* @return Anon function body node
*/
private STNode parseAnonFuncBody(boolean isRhsExpr) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case EOF_TOKEN:
STNode body = parseFunctionBodyBlock(true);
endContext();
return body;
case RIGHT_DOUBLE_ARROW_TOKEN:
endContext();
return parseExpressionFuncBody(true, isRhsExpr);
default:
recover(peek(), ParserRuleContext.ANON_FUNC_BODY);
return parseAnonFuncBody(isRhsExpr);
}
}
/**
* Parse expression function body.
* <p>
* <code>expr-function-body := => expression</code>
*
* @param isAnon Is anonymous function.
* @param isRhsExpr Is expression in rhs context
* @return Expression function body node
*/
private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode semiColon;
if (isAnon) {
semiColon = STNodeFactory.createEmptyNode();
} else {
semiColon = parseSemicolon();
}
return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);
}
/**
* Parse '=>' token.
*
* @return Double right arrow token
*/
private STNode parseDoubleRightArrow() {
STToken token = peek();
if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);
return parseDoubleRightArrow();
}
}
private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {
switch (params.kind) {
case SIMPLE_NAME_REFERENCE:
case INFER_PARAM_LIST:
break;
case BRACED_EXPRESSION:
params = getAnonFuncParam((STBracedExpressionNode) params);
break;
case NIL_LITERAL:
STNilLiteralNode nilLiteralNode = (STNilLiteralNode) params;
params = STNodeFactory.createImplicitAnonymousFunctionParameters(nilLiteralNode.openParenToken,
STNodeFactory.createNodeList(new ArrayList<>()), nilLiteralNode.closeParenToken);
break;
default:
STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam);
}
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);
}
/**
* Create a new anon-func-param node from a braced expression.
*
* @param bracedExpression Braced expression
* @return Anon-func param node
*/
private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) {
List<STNode> paramList = new ArrayList<>();
STNode innerExpression = bracedExpression.expression;
STNode openParen = bracedExpression.openParen;
if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
paramList.add(innerExpression);
} else {
openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
}
return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen,
STNodeFactory.createNodeList(paramList), bracedExpression.closeParen);
}
/**
* Parse implicit anon function expression.
*
* @param openParen Open parenthesis token
* @param firstParam First parameter
* @param isRhsExpr Is expression in rhs context
* @return Implicit anon function expression node
*/
private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {
List<STNode> paramList = new ArrayList<>();
paramList.add(firstParam);
STToken nextToken = peek();
STNode paramEnd;
STNode param;
while (!isEndOfAnonFuncParametersList(nextToken.kind)) {
paramEnd = parseImplicitAnonFuncParamEnd();
if (paramEnd == null) {
break;
}
paramList.add(paramEnd);
param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);
param = STNodeFactory.createSimpleNameReferenceNode(param);
paramList.add(param);
nextToken = peek();
}
STNode params = STNodeFactory.createNodeList(paramList);
STNode closeParen = parseCloseParenthesis();
endContext();
STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(inferedParams, isRhsExpr);
}
private STNode parseImplicitAnonFuncParamEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);
return parseImplicitAnonFuncParamEnd();
}
}
private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse tuple type descriptor.
* <p>
* <code>tuple-type-descriptor := [ tuple-member-type-descriptors ]
* <br/><br/>
* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]
* | [ tuple-rest-descriptor ]
* <br/><br/>
* member-type-descriptor := [annots] type-descriptor
* tuple-rest-descriptor := type-descriptor ...
* </code>
*
* @return
*/
private STNode parseTupleTypeDesc() {
STNode openBracket = parseOpenBracket();
startContext(ParserRuleContext.TUPLE_MEMBERS);
STNode memberTypeDesc = parseTupleMemberTypeDescList();
STNode closeBracket = parseCloseBracket();
endContext();
openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,
DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);
return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);
}
/**
* Parse tuple member type descriptors.
*
* @return Parsed node
*/
private STNode parseTupleMemberTypeDescList() {
List<STNode> typeDescList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfTypeList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode typeDesc = parseMemberDescriptor();
return parseTupleTypeMembers(typeDesc, typeDescList);
}
private STNode parseTupleTypeMembers(STNode membertypeDesc, List<STNode> memberTypeDescList) {
STNode tupleMemberRhs;
while (!isEndOfTypeList(peek().kind)) {
if (membertypeDesc.kind == SyntaxKind.REST_TYPE) {
membertypeDesc = invalidateTypeDescAfterRestDesc(membertypeDesc);
break;
}
tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
memberTypeDescList.add(membertypeDesc);
memberTypeDescList.add(tupleMemberRhs);
membertypeDesc = parseMemberDescriptor();
}
memberTypeDescList.add(membertypeDesc);
return STNodeFactory.createNodeList(memberTypeDescList);
}
private STNode parseMemberDescriptor() {
STNode annot = parseOptionalAnnotations();
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
return createMemberOrRestNode(annot, typeDesc);
}
private STNode createMemberOrRestNode(STNode annot, STNode typeDesc) {
STNode tupleMemberRhs = parseTypeDescInTupleRhs();
if (tupleMemberRhs != null) {
if (!((STNodeList) annot).isEmpty()) {
typeDesc = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(typeDesc, annot,
DiagnosticErrorCode.ERROR_ANNOTATIONS_NOT_ALLOWED_FOR_TUPLE_REST_DESCRIPTOR);
}
return STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);
}
return STNodeFactory.createMemberTypeDescriptorNode(annot, typeDesc);
}
private STNode invalidateTypeDescAfterRestDesc(STNode restDescriptor) {
while (!isEndOfTypeList(peek().kind)) {
STNode tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, tupleMemberRhs, null);
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, parseMemberDescriptor(),
DiagnosticErrorCode.ERROR_TYPE_DESC_AFTER_REST_DESCRIPTOR);
}
return restDescriptor;
}
private STNode parseTupleMemberRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.TUPLE_TYPE_MEMBER_RHS);
return parseTupleMemberRhs();
}
}
private STNode parseTypeDescInTupleRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
return null;
case ELLIPSIS_TOKEN:
return parseEllipsis();
default:
recover(nextToken, ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);
return parseTypeDescInTupleRhs();
}
}
private boolean isEndOfTypeList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse table constructor or query expression.
* <p>
* <code>
* table-constructor-or-query-expr := table-constructor-expr | query-expr
* <br/>
* table-constructor-expr := table [key-specifier] [ [row-list] ]
* <br/>
* query-expr := [query-construct-type] query-pipeline select-clause
* [query-construct-type] query-pipeline select-clause on-conflict-clause?
* <br/>
* query-construct-type := table key-specifier | stream
* </code>
*
* @return Parsed node
*/
private STNode parseTableConstructorOrQuery(boolean isRhsExpr) {
startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);
STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr);
endContext();
return tableOrQueryExpr;
}
private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) {
STNode queryConstructType;
switch (peek().kind) {
case FROM_KEYWORD:
queryConstructType = STNodeFactory.createEmptyNode();
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case STREAM_KEYWORD:
queryConstructType = parseQueryConstructType(parseStreamKeyword(), null);
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case TABLE_KEYWORD:
STNode tableKeyword = parseTableKeyword();
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START);
return parseTableConstructorOrQueryInternal(isRhsExpr);
}
}
private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {
STNode keySpecifier;
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
keySpecifier = STNodeFactory.createEmptyNode();
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
case KEY_KEYWORD:
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
case IDENTIFIER_TOKEN:
if (isKeyKeyword(nextToken)) {
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
}
break;
default:
break;
}
recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS);
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);
}
private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {
switch (peek().kind) {
case FROM_KEYWORD:
return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);
case OPEN_BRACKET_TOKEN:
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS);
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
}
}
/**
* Parse query construct type.
* <p>
* <code>query-construct-type := table key-specifier | stream</code>
*
* @return Parsed node
*/
private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {
return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);
}
/**
* Parse query action or expression.
* <p>
* <code>
* query-expr-rhs := query-pipeline select-clause
* query-pipeline select-clause on-conflict-clause?
* <br/>
* query-pipeline := from-clause intermediate-clause*
* </code>
*
* @param queryConstructType queryConstructType that precedes this rhs
* @return Parsed node
*/
private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {
switchContext(ParserRuleContext.QUERY_EXPRESSION);
STNode fromClause = parseFromClause(isRhsExpr);
List<STNode> clauses = new ArrayList<>();
STNode intermediateClause;
STNode selectClause = null;
while (!isEndOfIntermediateClause(peek().kind)) {
intermediateClause = parseIntermediateClause(isRhsExpr);
if (intermediateClause == null) {
break;
}
if (selectClause != null) {
selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,
DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);
continue;
}
if (intermediateClause.kind != SyntaxKind.SELECT_CLAUSE) {
clauses.add(intermediateClause);
continue;
}
selectClause = intermediateClause;
if (isNestedQueryExpr() || !isValidIntermediateQueryStart(peek().kind)) {
break;
}
}
if (peek().kind == SyntaxKind.DO_KEYWORD) {
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
return parseQueryAction(queryConstructType, queryPipeline, selectClause);
}
if (selectClause == null) {
STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);
STNode expr = STNodeFactory
.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);
if (clauses.isEmpty()) {
fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
} else {
int lastIndex = clauses.size() - 1;
STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),
DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
clauses.set(lastIndex, intClauseWithDiagnostic);
}
}
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
STNode onConflictClause = parseOnConflictClause(isRhsExpr);
return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,
onConflictClause);
}
/**
* Check whether currently parsing query expr is a nested query expression.
*
* @return <code>true</code> if currently parsing query-expr is a nested query-expr. <code>false</code> otherwise.
*/
private boolean isNestedQueryExpr() {
return Collections.frequency(this.errorHandler.getContextStack(), ParserRuleContext.QUERY_EXPRESSION) > 1;
}
private boolean isValidIntermediateQueryStart(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case FROM_KEYWORD:
case WHERE_KEYWORD:
case LET_KEYWORD:
case SELECT_KEYWORD:
case JOIN_KEYWORD:
case OUTER_KEYWORD:
case ORDER_KEYWORD:
case BY_KEYWORD:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
case LIMIT_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse an intermediate clause.
* <p>
* <code>
* intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause
* </code>
*
* @return Parsed node
*/
private STNode parseIntermediateClause(boolean isRhsExpr) {
switch (peek().kind) {
case FROM_KEYWORD:
return parseFromClause(isRhsExpr);
case WHERE_KEYWORD:
return parseWhereClause(isRhsExpr);
case LET_KEYWORD:
return parseLetClause(isRhsExpr);
case SELECT_KEYWORD:
return parseSelectClause(isRhsExpr);
case JOIN_KEYWORD:
case OUTER_KEYWORD:
return parseJoinClause(isRhsExpr);
case ORDER_KEYWORD:
case BY_KEYWORD:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return parseOrderByClause(isRhsExpr);
case LIMIT_KEYWORD:
return parseLimitClause(isRhsExpr);
case DO_KEYWORD:
case SEMICOLON_TOKEN:
case ON_KEYWORD:
case CONFLICT_KEYWORD:
return null;
default:
recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS);
return parseIntermediateClause(isRhsExpr);
}
}
/**
* Parse join-keyword.
*
* @return Join-keyword node
*/
private STNode parseJoinKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.JOIN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.JOIN_KEYWORD);
return parseJoinKeyword();
}
}
/**
* Parse equals keyword.
*
* @return Parsed node
*/
private STNode parseEqualsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.EQUALS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.EQUALS_KEYWORD);
return parseEqualsKeyword();
}
}
private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RESOURCE_KEYWORD:
case LISTENER_KEYWORD:
case DOCUMENTATION_STRING:
case PRIVATE_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
case DO_KEYWORD:
case ON_KEYWORD:
case CONFLICT_KEYWORD:
return true;
default:
return isValidExprRhsStart(tokenKind, SyntaxKind.NONE);
}
}
/**
* Parse from clause.
* <p>
* <code>from-clause := from typed-binding-pattern in expression</code>
*
* @return Parsed node
*/
private STNode parseFromClause(boolean isRhsExpr) {
STNode fromKeyword = parseFromKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);
}
/**
* Parse from-keyword.
*
* @return From-keyword node
*/
private STNode parseFromKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FROM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FROM_KEYWORD);
return parseFromKeyword();
}
}
/**
* Parse where clause.
* <p>
* <code>where-clause := where expression</code>
*
* @return Parsed node
*/
private STNode parseWhereClause(boolean isRhsExpr) {
STNode whereKeyword = parseWhereKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createWhereClauseNode(whereKeyword, expression);
}
/**
* Parse where-keyword.
*
* @return Where-keyword node
*/
private STNode parseWhereKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WHERE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WHERE_KEYWORD);
return parseWhereKeyword();
}
}
/**
* Parse limit-keyword.
*
* @return limit-keyword node
*/
private STNode parseLimitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LIMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LIMIT_KEYWORD);
return parseLimitKeyword();
}
}
/**
* Parse let clause.
* <p>
* <code>let-clause := let let-var-decl [, let-var-decl]* </code>
*
* @return Parsed node
*/
private STNode parseLetClause(boolean isRhsExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);
}
/**
* Parse order-keyword.
*
* @return Order-keyword node
*/
private STNode parseOrderKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ORDER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ORDER_KEYWORD);
return parseOrderKeyword();
}
}
/**
* Parse by-keyword.
*
* @return By-keyword node
*/
private STNode parseByKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BY_KEYWORD);
return parseByKeyword();
}
}
/**
* Parse order by clause.
* <p>
* <code>order-by-clause := order by order-key-list
* </code>
*
* @return Parsed node
*/
private STNode parseOrderByClause(boolean isRhsExpr) {
STNode orderKeyword = parseOrderKeyword();
STNode byKeyword = parseByKeyword();
STNode orderKeys = parseOrderKeyList(isRhsExpr);
byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);
return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);
}
/**
* Parse order key.
* <p>
* <code>order-key-list := order-key [, order-key]*</code>
*
* @return Parsed node
*/
private STNode parseOrderKeyList(boolean isRhsExpr) {
startContext(ParserRuleContext.ORDER_KEY_LIST);
List<STNode> orderKeys = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfOrderKeys(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
STNode orderKeyListMemberEnd;
while (!isEndOfOrderKeys(nextToken.kind)) {
orderKeyListMemberEnd = parseOrderKeyListMemberEnd();
if (orderKeyListMemberEnd == null) {
break;
}
orderKeys.add(orderKeyListMemberEnd);
orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(orderKeys);
}
private boolean isEndOfOrderKeys(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return false;
case SEMICOLON_TOKEN:
case EOF_TOKEN:
return true;
default:
return isQueryClauseStartToken(tokenKind);
}
}
private boolean isQueryClauseStartToken(SyntaxKind tokenKind) {
switch (tokenKind) {
case SELECT_KEYWORD:
case LET_KEYWORD:
case WHERE_KEYWORD:
case OUTER_KEYWORD:
case JOIN_KEYWORD:
case ORDER_KEYWORD:
case DO_KEYWORD:
case FROM_KEYWORD:
case LIMIT_KEYWORD:
return true;
default:
return false;
}
}
private STNode parseOrderKeyListMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case EOF_TOKEN:
return null;
default:
if (isQueryClauseStartToken(nextToken.kind)) {
return null;
}
recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);
return parseOrderKeyListMemberEnd();
}
}
/**
* Parse order key.
* <p>
* <code>order-key := expression (ascending | descending)?</code>
*
* @return Parsed node
*/
private STNode parseOrderKey(boolean isRhsExpr) {
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode orderDirection;
STToken nextToken = peek();
switch (nextToken.kind) {
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
orderDirection = consume();
break;
default:
orderDirection = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createOrderKeyNode(expression, orderDirection);
}
/**
* Parse select clause.
* <p>
* <code>select-clause := select expression</code>
*
* @return Parsed node
*/
private STNode parseSelectClause(boolean isRhsExpr) {
startContext(ParserRuleContext.SELECT_CLAUSE);
STNode selectKeyword = parseSelectKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
return STNodeFactory.createSelectClauseNode(selectKeyword, expression);
}
/**
* Parse select-keyword.
*
* @return Select-keyword node
*/
private STNode parseSelectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SELECT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SELECT_KEYWORD);
return parseSelectKeyword();
}
}
/**
* Parse on-conflict clause.
* <p>
* <code>
* onConflictClause := on conflict expression
* </code>
*
* @return On conflict clause node
*/
private STNode parseOnConflictClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
startContext(ParserRuleContext.ON_CONFLICT_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode conflictKeyword = parseConflictKeyword();
endContext();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);
}
/**
* Parse conflict keyword.
*
* @return Conflict keyword node
*/
private STNode parseConflictKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONFLICT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONFLICT_KEYWORD);
return parseConflictKeyword();
}
}
/**
* Parse limit clause.
* <p>
* <code>limitClause := limit expression</code>
*
* @return Limit expression node
*/
private STNode parseLimitClause(boolean isRhsExpr) {
STNode limitKeyword = parseLimitKeyword();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createLimitClauseNode(limitKeyword, expr);
}
/**
* Parse join clause.
* <p>
* <code>
* join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause
* <br/>
* join-var-decl := join (typeName | var) bindingPattern
* <br/>
* outer-join-var-decl := outer join var binding-pattern
* </code>
*
* @return Join clause
*/
private STNode parseJoinClause(boolean isRhsExpr) {
startContext(ParserRuleContext.JOIN_CLAUSE);
STNode outerKeyword;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {
outerKeyword = consume();
} else {
outerKeyword = STNodeFactory.createEmptyNode();
}
STNode joinKeyword = parseJoinKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
STNode onCondition = parseOnClause(isRhsExpr);
return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,
onCondition);
}
/**
* Parse on clause.
* <p>
* <code>on clause := `on` expression `equals` expression</code>
*
* @return On clause node
*/
private STNode parseOnClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (isQueryClauseStartToken(nextToken.kind)) {
return createMissingOnClauseNode();
}
startContext(ParserRuleContext.ON_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode equalsKeyword = parseEqualsKeyword();
endContext();
STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
private STNode createMissingOnClauseNode() {
STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD);
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);
STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD);
STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
/**
* Parse start action.
* <p>
* <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code>
*
* @return Start action node
*/
private STNode parseStartAction(STNode annots) {
STNode startKeyword = parseStartKeyword();
STNode expr = parseActionOrExpression();
switch (expr.kind) {
case FUNCTION_CALL:
case METHOD_CALL:
case REMOTE_METHOD_CALL_ACTION:
break;
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case FIELD_ACCESS:
case ASYNC_SEND_ACTION:
expr = generateValidExprForStartAction(expr);
break;
default:
startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,
DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);
STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
funcName = STNodeFactory.createSimpleNameReferenceNode(funcName);
STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);
STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);
expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken,
STNodeFactory.createEmptyNodeList(), closeParenToken);
break;
}
return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);
}
private STNode generateValidExprForStartAction(STNode expr) {
STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);
STNode arguments = STNodeFactory.createEmptyNodeList();
STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);
switch (expr.kind) {
case FIELD_ACCESS:
STFieldAccessExpressionNode fieldAccessExpr = (STFieldAccessExpressionNode) expr;
return STNodeFactory.createMethodCallExpressionNode(fieldAccessExpr.expression,
fieldAccessExpr.dotToken, fieldAccessExpr.fieldName, openParenToken, arguments,
closeParenToken);
case ASYNC_SEND_ACTION:
STAsyncSendActionNode asyncSendAction = (STAsyncSendActionNode) expr;
return STNodeFactory.createRemoteMethodCallActionNode(asyncSendAction.expression,
asyncSendAction.rightArrowToken, asyncSendAction.peerWorker, openParenToken, arguments,
closeParenToken);
default:
return STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);
}
}
/**
* Parse start keyword.
*
* @return Start keyword node
*/
private STNode parseStartKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.START_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.START_KEYWORD);
return parseStartKeyword();
}
}
/**
* Parse flush action.
* <p>
* <code>flush-action := flush [peer-worker]</code>
*
* @return flush action node
*/
private STNode parseFlushAction() {
STNode flushKeyword = parseFlushKeyword();
STNode peerWorker = parseOptionalPeerWorkerName();
return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);
}
/**
* Parse flush keyword.
*
* @return flush keyword node
*/
private STNode parseFlushKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FLUSH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FLUSH_KEYWORD);
return parseFlushKeyword();
}
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parseOptionalPeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
return STNodeFactory.createEmptyNode();
}
}
/**
* Parse intersection type descriptor.
* <p>
* intersection-type-descriptor := type-descriptor & type-descriptor
* </p>
*
* @return Parsed node
*/
private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode bitwiseAndToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,
TypePrecedence.INTERSECTION);
return mergeTypesWithIntersection(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Creates an intersection type descriptor after validating lhs and rhs types.
* <p>
* <i>Note: Since type precedence and associativity are not taken into account here,
* this method should not be called directly when types are unknown.
* <br/>
* Call {@link
*
* @param leftTypeDesc lhs type
* @param bitwiseAndToken bitwise-and token
* @param rightTypeDesc rhs type
* @return an IntersectionTypeDescriptorNode
*/
private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Parse singleton type descriptor.
* <p>
* singleton-type-descriptor := simple-const-expr
* simple-const-expr :=
* nil-literal
* | boolean-literal
* | [Sign] int-literal
* | [Sign] floating-point-literal
* | string-literal
* | constant-reference-expr
* </p>
*/
private STNode parseSingletonTypeDesc() {
STNode simpleContExpr = parseSimpleConstExpr();
return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);
}
private STNode parseSignedIntOrFloat() {
STNode operator = parseUnaryOperator();
STNode literal;
STToken nextToken = peek();
switch (nextToken.kind) {
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
literal = parseBasicLiteral();
break;
default:
literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN);
literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal);
}
return STNodeFactory.createUnaryExpressionNode(operator, literal);
}
private static boolean isSingletonTypeDescStart(SyntaxKind tokenKind, STToken nextNextToken) {
switch (tokenKind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isIntOrFloat(nextNextToken);
default:
return false;
}
}
static boolean isIntOrFloat(STToken token) {
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
return true;
default:
return false;
}
}
/**
* Check whether the parser reached to a valid expression start.
*
* @param nextTokenKind Kind of the next immediate token.
* @param nextTokenIndex Index to the next token.
* @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise
*/
private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {
nextTokenIndex++;
switch (nextTokenKind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;
if (nextNextTokenKind == SyntaxKind.PIPE_TOKEN || nextNextTokenKind == SyntaxKind.BITWISE_AND_TOKEN) {
nextTokenIndex++;
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);
}
return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN ||
nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||
nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||
isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case IDENTIFIER_TOKEN:
return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case OPEN_PAREN_TOKEN:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case OPEN_BRACE_TOKEN:
case TYPEOF_KEYWORD:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
case TRAP_KEYWORD:
case OPEN_BRACKET_TOKEN:
case LT_TOKEN:
case FROM_KEYWORD:
case LET_KEYWORD:
case BACKTICK_TOKEN:
case NEW_KEYWORD:
case LEFT_ARROW_TOKEN:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
case BASE16_KEYWORD:
case BASE64_KEYWORD:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);
case TABLE_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;
case STREAM_KEYWORD:
STToken nextNextToken = peek(nextTokenIndex);
return nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||
nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||
nextNextToken.kind == SyntaxKind.FROM_KEYWORD;
case ERROR_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;
case XML_KEYWORD:
case STRING_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;
case START_KEYWORD:
case FLUSH_KEYWORD:
case WAIT_KEYWORD:
default:
return false;
}
}
/**
* Parse sync send action.
* <p>
* <code>sync-send-action := expression ->> peer-worker</code>
*
* @param expression LHS expression of the sync send action
* @return Sync send action node
*/
private STNode parseSyncSendAction(STNode expression) {
STNode syncSendToken = parseSyncSendToken();
STNode peerWorker = parsePeerWorkerName();
return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parsePeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
recover(token, ParserRuleContext.PEER_WORKER_NAME);
return parsePeerWorkerName();
}
}
/**
* Parse sync send token.
* <p>
* <code>sync-send-token := ->> </code>
*
* @return sync send token
*/
private STNode parseSyncSendToken() {
STToken token = peek();
if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.SYNC_SEND_TOKEN);
return parseSyncSendToken();
}
}
/**
* Parse receive action.
* <p>
* <code>receive-action := single-receive-action | multiple-receive-action</code>
*
* @return Receive action
*/
private STNode parseReceiveAction() {
STNode leftArrow = parseLeftArrowToken();
STNode receiveWorkers = parseReceiveWorkers();
return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);
}
private STNode parseReceiveWorkers() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
case IDENTIFIER_TOKEN:
return parsePeerWorkerName();
case OPEN_BRACE_TOKEN:
return parseMultipleReceiveWorkers();
default:
recover(peek(), ParserRuleContext.RECEIVE_WORKERS);
return parseReceiveWorkers();
}
}
/**
* Parse multiple worker receivers.
* <p>
* <code>{ receive-field (, receive-field)* }</code>
*
* @return Multiple worker receiver node
*/
private STNode parseMultipleReceiveWorkers() {
startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);
STNode openBrace = parseOpenBrace();
STNode receiveFields = parseReceiveFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);
return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);
}
private STNode parseReceiveFields() {
List<STNode> receiveFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfReceiveFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
STNode recieveFieldEnd;
while (!isEndOfReceiveFields(nextToken.kind)) {
recieveFieldEnd = parseReceiveFieldEnd();
if (recieveFieldEnd == null) {
break;
}
receiveFields.add(recieveFieldEnd);
receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
}
return STNodeFactory.createNodeList(receiveFields);
}
private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseReceiveFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);
return parseReceiveFieldEnd();
}
}
/**
* Parse receive field.
* <p>
* <code>receive-field := peer-worker | field-name : peer-worker</code>
*
* @return Receiver field node
*/
private STNode parseReceiveField() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
return STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);
return createQualifiedReceiveField(identifier);
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD);
return parseReceiveField();
}
}
private STNode createQualifiedReceiveField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode peerWorker = parsePeerWorkerName();
return createQualifiedNameReferenceNode(identifier, colon, peerWorker);
}
/**
* Parse left arrow (<-) token.
*
* @return left arrow token
*/
private STNode parseLeftArrowToken() {
STToken token = peek();
if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);
return parseLeftArrowToken();
}
}
/**
* Parse signed right shift token (>>).
* This method should only be called by seeing a `DOUBLE_GT_TOKEN` or
* by seeing a `GT_TOKEN` followed by a `GT_TOKEN`
*
* @return Parsed node
*/
private STNode parseSignedRightShiftToken() {
STNode firstToken = consume();
if (firstToken.kind == SyntaxKind.DOUBLE_GT_TOKEN) {
return firstToken;
}
STToken endLGToken = consume();
STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, firstToken.leadingMinutiae(),
endLGToken.trailingMinutiae());
if (hasTrailingMinutiae(firstToken)) {
doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);
}
return doubleGTToken;
}
/**
* Parse unsigned right shift token (>>>).
* This method should only be called by seeing a `TRIPPLE_GT_TOKEN` or
* by seeing a `GT_TOKEN` followed by two `GT_TOKEN`s
*
* @return Parsed node
*/
private STNode parseUnsignedRightShiftToken() {
STNode firstToken = consume();
if (firstToken.kind == SyntaxKind.TRIPPLE_GT_TOKEN) {
return firstToken;
}
STNode middleGTToken = consume();
STNode endLGToken = consume();
STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,
firstToken.leadingMinutiae(), endLGToken.trailingMinutiae());
boolean validOpenGTToken = !hasTrailingMinutiae(firstToken);
boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken);
if (validOpenGTToken && validMiddleGTToken) {
return unsignedRightShiftToken;
}
unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);
return unsignedRightShiftToken;
}
/**
* Parse wait action.
* <p>
* <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code>
*
* @return Wait action node
*/
private STNode parseWaitAction() {
STNode waitKeyword = parseWaitKeyword();
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
return parseMultiWaitAction(waitKeyword);
}
return parseSingleOrAlternateWaitAction(waitKeyword);
}
/**
* Parse wait keyword.
*
* @return wait keyword
*/
private STNode parseWaitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WAIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WAIT_KEYWORD);
return parseWaitKeyword();
}
}
/**
* Parse single or alternate wait actions.
* <p>
* <code>
* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+
* <br/>
* wait-future-expr := expression but not mapping-constructor-expr
* </code>
*
* @param waitKeyword wait keyword
* @return Single or alternate wait action node
*/
private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);
STToken nextToken = peek();
if (isEndOfWaitFutureExprList(nextToken.kind)) {
endContext();
STNode waitFutureExprs = STNodeFactory
.createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);
}
List<STNode> waitFutureExprList = new ArrayList<>();
STNode waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
STNode waitFutureExprEnd;
while (!isEndOfWaitFutureExprList(nextToken.kind)) {
waitFutureExprEnd = parseWaitFutureExprEnd();
if (waitFutureExprEnd == null) {
break;
}
waitFutureExprList.add(waitFutureExprEnd);
waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
}
endContext();
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));
}
private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case OPEN_BRACE_TOKEN:
return true;
case PIPE_TOKEN:
default:
return false;
}
}
private STNode parseWaitFutureExpr() {
STNode waitFutureExpr = parseActionOrExpression();
if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {
waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,
DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);
} else if (isAction(waitFutureExpr)) {
waitFutureExpr =
SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);
}
return waitFutureExpr;
}
private STNode parseWaitFutureExprEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
return parsePipeToken();
default:
if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) {
return null;
}
recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END);
return parseWaitFutureExprEnd();
}
}
/**
* Parse multiple wait action.
* <p>
* <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code>
*
* @param waitKeyword Wait keyword
* @return Multiple wait action node
*/
private STNode parseMultiWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.MULTI_WAIT_FIELDS);
STNode openBrace = parseOpenBrace();
STNode waitFields = parseWaitFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);
STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);
}
private STNode parseWaitFields() {
List<STNode> waitFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfWaitFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
STNode waitFieldEnd;
while (!isEndOfWaitFields(nextToken.kind)) {
waitFieldEnd = parseWaitFieldEnd();
if (waitFieldEnd == null) {
break;
}
waitFields.add(waitFieldEnd);
waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
}
return STNodeFactory.createNodeList(waitFields);
}
private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseWaitFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_END);
return parseWaitFieldEnd();
}
}
/**
* Parse wait field.
* <p>
* <code>wait-field := variable-name | field-name : wait-future-expr</code>
*
* @return Receiver field node
*/
private STNode parseWaitField() {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);
identifier = STNodeFactory.createSimpleNameReferenceNode(identifier);
return createQualifiedWaitField(identifier);
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);
return parseWaitField();
}
}
private STNode createQualifiedWaitField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode waitFutureExpr = parseWaitFutureExpr();
return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);
}
/**
* Parse annot access expression.
* <p>
* <code>
* annot-access-expr := expression .@ annot-tag-reference
* <br/>
* annot-tag-reference := qualified-identifier | identifier
* </code>
*
* @param lhsExpr Preceding expression of the annot access access
* @return Parsed node
*/
private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode annotAccessToken = parseAnnotChainingToken();
STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);
}
/**
* Parse annot-chaining-token.
*
* @return Parsed node
*/
private STNode parseAnnotChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);
return parseAnnotChainingToken();
}
}
/**
* Parse field access identifier.
* <p>
* <code>field-access-identifier := qualified-identifier | identifier</code>
*
* @return Parsed node
*/
private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {
STToken nextToken = peek();
if (!isPredeclaredIdentifier(nextToken.kind)) {
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);
return parseQualifiedIdentifier(identifier, isInConditionalExpr);
}
return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);
}
/**
* Parse query action.
* <p>
* <code>query-action := query-pipeline do-clause
* <br/>
* do-clause := do block-stmt
* </code>
*
* @param queryConstructType Query construct type. This is only for validation
* @param queryPipeline Query pipeline
* @param selectClause Select clause if any This is only for validation.
* @return Query action node
*/
private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause) {
if (queryConstructType != null) {
queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType,
DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION);
}
if (selectClause != null) {
queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,
DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);
}
startContext(ParserRuleContext.DO_CLAUSE);
STNode doKeyword = parseDoKeyword();
STNode blockStmt = parseBlockNode();
endContext();
return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt);
}
/**
* Parse 'do' keyword.
*
* @return do keyword node
*/
private STNode parseDoKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.DO_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.DO_KEYWORD);
return parseDoKeyword();
}
}
/**
* Parse optional field access or xml optional attribute access expression.
* <p>
* <code>
* optional-field-access-expr := expression ?. field-name
* <br/>
* xml-optional-attribute-access-expr := expression ?. xml-attribute-name
* <br/>
* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier
* <br/>
* xml-qualified-name := xml-namespace-prefix : identifier
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @param lhsExpr Preceding expression of the optional access
* @return Parsed node
*/
private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode optionalFieldAccessToken = parseOptionalChainingToken();
STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);
}
/**
* Parse optional chaining token.
*
* @return parsed node
*/
private STNode parseOptionalChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);
return parseOptionalChainingToken();
}
}
/**
* Parse conditional expression.
* <p>
* <code>conditional-expr := expression ? expression : expression</code>
*
* @param lhsExpr Preceding expression of the question mark
* @param isInConditionalExpr whether calling from a conditional-expr
* @return Parsed node
*/
private STNode parseConditionalExpression(STNode lhsExpr, boolean isInConditionalExpr) {
startContext(ParserRuleContext.CONDITIONAL_EXPRESSION);
STNode questionMark = parseQuestionMark();
STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);
if (peek().kind != SyntaxKind.COLON_TOKEN) {
if (middleExpr.kind == SyntaxKind.CONDITIONAL_EXPRESSION) {
STConditionalExpressionNode innerConditionalExpr = (STConditionalExpressionNode) middleExpr;
STNode innerMiddleExpr = innerConditionalExpr.middleExpression;
STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, false);
if (rightMostQNameRef != null) {
middleExpr = generateConditionalExprForRightMost(innerConditionalExpr.lhsExpression,
innerConditionalExpr.questionMarkToken, innerMiddleExpr, rightMostQNameRef);
endContext();
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,
innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);
}
STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, true);
if (leftMostQNameRef != null) {
middleExpr = generateConditionalExprForLeftMost(innerConditionalExpr.lhsExpression,
innerConditionalExpr.questionMarkToken, innerMiddleExpr, leftMostQNameRef);
endContext();
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,
innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);
}
}
STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, false);
if (rightMostQNameRef != null) {
endContext();
return generateConditionalExprForRightMost(lhsExpr, questionMark, middleExpr, rightMostQNameRef);
}
STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, true);
if (leftMostQNameRef != null) {
endContext();
return generateConditionalExprForLeftMost(lhsExpr, questionMark, middleExpr, leftMostQNameRef);
}
}
return parseConditionalExprRhs(lhsExpr, questionMark, middleExpr, isInConditionalExpr);
}
private STNode generateConditionalExprForRightMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,
STNode rightMostQualifiedNameRef) {
STQualifiedNameReferenceNode qualifiedNameRef =
(STQualifiedNameReferenceNode) rightMostQualifiedNameRef;
STNode endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);
STNode simpleNameRef =
ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);
middleExpr = middleExpr.replace(rightMostQualifiedNameRef, simpleNameRef);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,
endExpr);
}
private STNode generateConditionalExprForLeftMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,
STNode leftMostQualifiedNameRef) {
STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) leftMostQualifiedNameRef;
STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);
STNode endExpr = middleExpr.replace(leftMostQualifiedNameRef, simpleNameRef);
middleExpr = ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,
endExpr);
}
private STNode parseConditionalExprRhs(STNode lhsExpr, STNode questionMark, STNode middleExpr,
boolean isInConditionalExpr) {
STNode colon = parseColon();
endContext();
STNode endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false,
isInConditionalExpr);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);
}
/**
* Parse enum declaration.
* <p>
* module-enum-decl :=
* metadata
* [public] enum identifier { enum-member (, enum-member)* }
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @param metadata
* @param qualifier
* @return Parsed enum node.
*/
private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.MODULE_ENUM_DECLARATION);
STNode enumKeywordToken = parseEnumKeyword();
STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);
STNode openBraceToken = parseOpenBrace();
STNode enumMemberList = parseEnumMemberList();
STNode closeBraceToken = parseCloseBrace();
endContext();
openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,
DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);
return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,
openBraceToken, enumMemberList, closeBraceToken);
}
/**
* Parse 'enum' keyword.
*
* @return enum keyword node
*/
private STNode parseEnumKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ENUM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ENUM_KEYWORD);
return parseEnumKeyword();
}
}
/**
* Parse enum member list.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return enum member list node.
*/
private STNode parseEnumMemberList() {
startContext(ParserRuleContext.ENUM_MEMBER_LIST);
if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> enumMemberList = new ArrayList<>();
STNode enumMember = parseEnumMember();
STNode enumMemberRhs;
while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) {
enumMemberRhs = parseEnumMemberEnd();
if (enumMemberRhs == null) {
break;
}
enumMemberList.add(enumMember);
enumMemberList.add(enumMemberRhs);
enumMember = parseEnumMember();
}
enumMemberList.add(enumMember);
endContext();
return STNodeFactory.createNodeList(enumMemberList);
}
/**
* Parse enum member.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return Parsed enum member node.
*/
private STNode parseEnumMember() {
STNode metadata;
switch (peek().kind) {
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
metadata = STNodeFactory.createEmptyNode();
}
STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);
return parseEnumMemberRhs(metadata, identifierNode);
}
private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {
STNode equalToken, constExprNode;
switch (peek().kind) {
case EQUAL_TOKEN:
equalToken = parseAssignOp();
constExprNode = parseExpression();
break;
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
equalToken = STNodeFactory.createEmptyNode();
constExprNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS);
return parseEnumMemberRhs(metadata, identifierNode);
}
return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);
}
private STNode parseEnumMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_END);
return parseEnumMemberEnd();
}
}
private STNode parseTransactionStmtOrVarDecl(STNode annots, List<STNode> qualifiers, STToken transactionKeyword) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
reportInvalidStatementAnnots(annots, qualifiers);
reportInvalidQualifierList(qualifiers);
return parseTransactionStatement(transactionKeyword);
case COLON_TOKEN:
if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
default:
Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF);
if (solution.action == Action.KEEP ||
(solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword);
}
}
/**
* Parse transaction statement.
* <p>
* <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code>
*
* @return Transaction statement node
*/
private STNode parseTransactionStatement(STNode transactionKeyword) {
startContext(ParserRuleContext.TRANSACTION_STMT);
STNode blockStmt = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause);
}
/**
* Parse commit action.
* <p>
* <code>commit-action := "commit"</code>
*
* @return Commit action node
*/
private STNode parseCommitAction() {
STNode commitKeyword = parseCommitKeyword();
return STNodeFactory.createCommitActionNode(commitKeyword);
}
/**
* Parse commit keyword.
*
* @return parsed node
*/
private STNode parseCommitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.COMMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.COMMIT_KEYWORD);
return parseCommitKeyword();
}
}
/**
* Parse retry statement.
* <p>
* <code>
* retry-stmt := `retry` retry-spec block-stmt [on-fail-clause]
* <br/>
* retry-spec := [type-parameter] [ `(` arg-list `)` ]
* </code>
*
* @return Retry statement node
*/
private STNode parseRetryStatement() {
startContext(ParserRuleContext.RETRY_STMT);
STNode retryKeyword = parseRetryKeyword();
STNode retryStmt = parseRetryKeywordRhs(retryKeyword);
return retryStmt;
}
private STNode parseRetryKeywordRhs(STNode retryKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case LT_TOKEN:
STNode typeParam = parseTypeParameter();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
case OPEN_PAREN_TOKEN:
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
typeParam = STNodeFactory.createEmptyNode();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
default:
recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS);
return parseRetryKeywordRhs(retryKeyword);
}
}
private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {
STNode args;
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
args = parseParenthesizedArgList();
break;
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
args = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS);
return parseRetryTypeParamRhs(retryKeyword, typeParam);
}
STNode blockStmt = parseRetryBody();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause);
}
private STNode parseRetryBody() {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
return parseBlockNode();
case TRANSACTION_KEYWORD:
return parseTransactionStatement(consume());
default:
recover(peek(), ParserRuleContext.RETRY_BODY);
return parseRetryBody();
}
}
/**
* Parse optional on fail clause.
*
* @return Parsed node
*/
private STNode parseOptionalOnFailClause() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ON_KEYWORD) {
return parseOnFailClause();
}
if (isEndOfRegularCompoundStmt(nextToken.kind)) {
return STNodeFactory.createEmptyNode();
}
recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS);
return parseOptionalOnFailClause();
}
private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) {
switch (nodeKind) {
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case AT_TOKEN:
case EOF_TOKEN:
return true;
default:
return isStatementStartingToken(nodeKind);
}
}
private boolean isStatementStartingToken(SyntaxKind nodeKind) {
switch (nodeKind) {
case FINAL_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case PANIC_KEYWORD:
case CONTINUE_KEYWORD:
case BREAK_KEYWORD:
case RETURN_KEYWORD:
case LOCK_KEYWORD:
case OPEN_BRACE_TOKEN:
case FORK_KEYWORD:
case FOREACH_KEYWORD:
case XMLNS_KEYWORD:
case TRANSACTION_KEYWORD:
case RETRY_KEYWORD:
case ROLLBACK_KEYWORD:
case MATCH_KEYWORD:
case FAIL_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case TRAP_KEYWORD:
case START_KEYWORD:
case FLUSH_KEYWORD:
case LEFT_ARROW_TOKEN:
case WAIT_KEYWORD:
case COMMIT_KEYWORD:
case WORKER_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
return true;
default:
if (isTypeStartingToken(nodeKind)) {
return true;
}
if (isValidExpressionStart(nodeKind, 1)) {
return true;
}
return false;
}
}
/**
* Parse on fail clause.
* <p>
* <code>
* on-fail-clause := on fail typed-binding-pattern statement-block
* </code>
*
* @return On fail clause node
*/
private STNode parseOnFailClause() {
startContext(ParserRuleContext.ON_FAIL_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode failKeyword = parseFailKeyword();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false,
TypePrecedence.DEFAULT);
STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_NAME);
STNode blockStatement = parseBlockNode();
endContext();
return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier,
blockStatement);
}
/**
* Parse retry keyword.
*
* @return parsed node
*/
private STNode parseRetryKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RETRY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.RETRY_KEYWORD);
return parseRetryKeyword();
}
}
/**
* Parse transaction statement.
* <p>
* <code>rollback-stmt := "rollback" [expression] ";"</code>
*
* @return Rollback statement node
*/
private STNode parseRollbackStatement() {
startContext(ParserRuleContext.ROLLBACK_STMT);
STNode rollbackKeyword = parseRollbackKeyword();
STNode expression;
if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {
expression = STNodeFactory.createEmptyNode();
} else {
expression = parseExpression();
}
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);
}
/**
* Parse rollback keyword.
*
* @return Rollback keyword node
*/
private STNode parseRollbackKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ROLLBACK_KEYWORD);
return parseRollbackKeyword();
}
}
/**
* Parse transactional expression.
* <p>
* <code>transactional-expr := "transactional"</code>
*
* @return Transactional expression node
*/
private STNode parseTransactionalExpression() {
STNode transactionalKeyword = parseTransactionalKeyword();
return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);
}
/**
* Parse transactional keyword.
*
* @return Transactional keyword node
*/
private STNode parseTransactionalKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD);
return parseTransactionalKeyword();
}
}
/**
* Parse base16 literal.
* <p>
* <code>
* byte-array-literal := Base16Literal | Base64Literal
* <br/>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* </code>
*
* @return parsed node
*/
private STNode parseByteArrayLiteral() {
STNode type;
if (peek().kind == SyntaxKind.BASE16_KEYWORD) {
type = parseBase16Keyword();
} else {
type = parseBase64Keyword();
}
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
if (startingBackTick.isMissing()) {
startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode content = STNodeFactory.createEmptyNode();
STNode byteArrayLiteral =
STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);
byteArrayLiteral =
SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);
return byteArrayLiteral;
}
STNode content = parseByteArrayContent();
return parseByteArrayLiteral(type, startingBackTick, content);
}
/**
* Parse byte array literal.
*
* @param typeKeyword keyword token, possible values are `base16` and `base64`
* @param startingBackTick starting backtick token
* @param byteArrayContent byte array literal content to be validated
* @return parsed byte array literal node
*/
private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) {
STNode content = STNodeFactory.createEmptyNode();
STNode newStartingBackTick = startingBackTick;
STNodeList items = (STNodeList) byteArrayContent;
if (items.size() == 1) {
STNode item = items.get(0);
if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else {
content = item;
}
} else if (items.size() > 1) {
STNode clonedStartingBackTick = startingBackTick;
for (int index = 0; index < items.size(); index++) {
STNode item = items.get(index);
clonedStartingBackTick =
SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);
}
newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
}
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);
}
/**
* Parse <code>base16</code> keyword.
*
* @return base16 keyword node
*/
private STNode parseBase16Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE16_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE16_KEYWORD);
return parseBase16Keyword();
}
}
/**
* Parse <code>base64</code> keyword.
*
* @return base64 keyword node
*/
private STNode parseBase64Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE64_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE64_KEYWORD);
return parseBase64Keyword();
}
}
/**
* Validate and parse byte array literal content.
* An error is reported, if the content is invalid.
*
* @return parsed node
*/
private STNode parseByteArrayContent() {
STToken nextToken = peek();
List<STNode> items = new ArrayList<>();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode content = parseTemplateItem();
items.add(content);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
/**
* Validate base16 literal content.
* <p>
* <code>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* HexGroup := WS HexDigit WS HexDigit
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase16LiteralContent(String content) {
char[] charArray = content.toCharArray();
int hexDigitCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
default:
if (isHexDigit(c)) {
hexDigitCount++;
} else {
return false;
}
break;
}
}
return hexDigitCount % 2 == 0;
}
/**
* Validate base64 literal content.
* <p>
* <code>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* <br/>
* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char
* <br/>
* PaddedBase64Group :=
* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar
* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar
* <br/>
* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /
* <br/>
* PaddingChar := =
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase64LiteralContent(String content) {
char[] charArray = content.toCharArray();
int base64CharCount = 0;
int paddingCharCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
case LexerTerminals.EQUAL:
paddingCharCount++;
break;
default:
if (isBase64Char(c)) {
if (paddingCharCount == 0) {
base64CharCount++;
} else {
return false;
}
} else {
return false;
}
break;
}
}
if (paddingCharCount > 2) {
return false;
} else if (paddingCharCount == 0) {
return base64CharCount % 4 == 0;
} else {
return base64CharCount % 4 == 4 - paddingCharCount;
}
}
/**
* <p>
* Check whether a given char is a base64 char.
* </p>
* <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code>
*
* @param c character to check
* @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise.
*/
static boolean isBase64Char(int c) {
if ('a' <= c && c <= 'z') {
return true;
}
if ('A' <= c && c <= 'Z') {
return true;
}
if (c == '+' || c == '/') {
return true;
}
return isDigit(c);
}
static boolean isHexDigit(int c) {
if ('a' <= c && c <= 'f') {
return true;
}
if ('A' <= c && c <= 'F') {
return true;
}
return isDigit(c);
}
static boolean isDigit(int c) {
return ('0' <= c && c <= '9');
}
/**
* Parse xml filter expression.
* <p>
* <code>xml-filter-expr := expression .< xml-name-pattern ></code>
*
* @param lhsExpr Preceding expression of .< token
* @return Parsed node
*/
private STNode parseXMLFilterExpression(STNode lhsExpr) {
STNode xmlNamePatternChain = parseXMLFilterExpressionRhs();
return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>filer-expression-rhs := .< xml-name-pattern ></code>
*
* @return Parsed node
*/
private STNode parseXMLFilterExpressionRhs() {
STNode dotLTToken = parseDotLTToken();
return parseXMLNamePatternChain(dotLTToken);
}
/**
* Parse xml name pattern chain.
* <p>
* <code>
* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step
* <br/>
* filer-expression-rhs := .< xml-name-pattern >
* <br/>
* xml-element-children-step := /< xml-name-pattern >
* <br/>
* xml-element-descendants-step := /**\/<xml-name-pattern >
* </code>
*
* @param startToken Preceding token of xml name pattern
* @return Parsed node
*/
private STNode parseXMLNamePatternChain(STNode startToken) {
startContext(ParserRuleContext.XML_NAME_PATTERN);
STNode xmlNamePattern = parseXMLNamePattern();
STNode gtToken = parseGTToken();
endContext();
startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,
DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);
return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);
}
/**
* Parse <code> .< </code> token.
*
* @return Parsed node
*/
private STNode parseDotLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);
return parseDotLTToken();
}
}
/**
* Parse xml name pattern.
* <p>
* <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code>
*
* @return Parsed node
*/
private STNode parseXMLNamePattern() {
List<STNode> xmlAtomicNamePatternList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfXMLNamePattern(nextToken.kind)) {
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
STNode separator;
while (!isEndOfXMLNamePattern(peek().kind)) {
separator = parseXMLNamePatternSeparator();
if (separator == null) {
break;
}
xmlAtomicNamePatternList.add(separator);
xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
}
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {
switch (tokenKind) {
case GT_TOKEN:
case EOF_TOKEN:
return true;
case IDENTIFIER_TOKEN:
case ASTERISK_TOKEN:
case COLON_TOKEN:
default:
return false;
}
}
private STNode parseXMLNamePatternSeparator() {
STToken token = peek();
switch (token.kind) {
case PIPE_TOKEN:
return consume();
case GT_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);
return parseXMLNamePatternSeparator();
}
}
/**
* Parse xml atomic name pattern.
* <p>
* <code>
* xml-atomic-name-pattern :=
* *
* | identifier
* | xml-namespace-prefix : identifier
* | xml-namespace-prefix : *
* </code>
*
* @return Parsed node
*/
private STNode parseXMLAtomicNamePattern() {
startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);
STNode atomicNamePattern = parseXMLAtomicNamePatternBody();
endContext();
return atomicNamePattern;
}
private STNode parseXMLAtomicNamePatternBody() {
STToken token = peek();
STNode identifier;
switch (token.kind) {
case ASTERISK_TOKEN:
return consume();
case IDENTIFIER_TOKEN:
identifier = consume();
break;
default:
recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);
return parseXMLAtomicNamePatternBody();
}
return parseXMLAtomicNameIdentifier(identifier);
}
private STNode parseXMLAtomicNameIdentifier(STNode identifier) {
STToken token = peek();
if (token.kind == SyntaxKind.COLON_TOKEN) {
STNode colon = consume();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {
STToken endToken = consume();
return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);
}
}
return STNodeFactory.createSimpleNameReferenceNode(identifier);
}
/**
* Parse xml step expression.
* <p>
* <code>xml-step-expr := expression xml-step-start</code>
*
* @param lhsExpr Preceding expression of /*, /<, or /**\/< token
* @return Parsed node
*/
private STNode parseXMLStepExpression(STNode lhsExpr) {
STNode xmlStepStart = parseXMLStepStart();
return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>
* xml-step-start :=
* xml-all-children-step
* | xml-element-children-step
* | xml-element-descendants-step
* <br/>
* xml-all-children-step := /*
* </code>
*
* @return Parsed node
*/
private STNode parseXMLStepStart() {
STToken token = peek();
STNode startToken;
switch (token.kind) {
case SLASH_ASTERISK_TOKEN:
return consume();
case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:
startToken = parseDoubleSlashDoubleAsteriskLTToken();
break;
case SLASH_LT_TOKEN:
default:
startToken = parseSlashLTToken();
break;
}
return parseXMLNamePatternChain(startToken);
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseSlashLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);
return parseSlashLTToken();
}
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseDoubleSlashDoubleAsteriskLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);
return parseDoubleSlashDoubleAsteriskLTToken();
}
}
/**
* Parse match statement.
* <p>
* <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code>
*
* @return Match statement
*/
private STNode parseMatchStatement() {
startContext(ParserRuleContext.MATCH_STMT);
STNode matchKeyword = parseMatchKeyword();
STNode actionOrExpr = parseActionOrExpression();
startContext(ParserRuleContext.MATCH_BODY);
STNode openBrace = parseOpenBrace();
List<STNode> matchClausesList = new ArrayList<>();
while (!isEndOfMatchClauses(peek().kind)) {
STNode clause = parseMatchClause();
matchClausesList.add(clause);
}
STNode matchClauses = STNodeFactory.createNodeList(matchClausesList);
if (isNodeListEmpty(matchClauses)) {
openBrace = SyntaxErrors.addDiagnostic(openBrace,
DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES);
}
STNode closeBrace = parseCloseBrace();
endContext();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace,
onFailClause);
}
/**
* Parse match keyword.
*
* @return Match keyword node
*/
private STNode parseMatchKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.MATCH_KEYWORD);
return parseMatchKeyword();
}
}
private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case TYPE_KEYWORD:
return true;
default:
return isEndOfStatements();
}
}
/**
* Parse a single match match clause.
* <p>
* <code>
* match-clause := match-pattern-list [match-guard] => block-stmt
* <br/>
* match-guard := if expression
* </code>
*
* @return A match clause
*/
private STNode parseMatchClause() {
STNode matchPatterns = parseMatchPatternList();
STNode matchGuard = parseMatchGuard();
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode blockStmt = parseBlockNode();
if (isNodeListEmpty(matchPatterns)) {
STToken identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode constantPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);
matchPatterns = STNodeFactory.createNodeList(constantPattern);
DiagnosticErrorCode errorCode = DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN;
if (matchGuard != null) {
matchGuard = SyntaxErrors.addDiagnostic(matchGuard, errorCode);
} else {
rightDoubleArrow = SyntaxErrors.addDiagnostic(rightDoubleArrow, errorCode);
}
}
return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);
}
/**
* Parse match guard.
* <p>
* <code>match-guard := if expression</code>
*
* @return Match guard
*/
private STNode parseMatchGuard() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IF_KEYWORD:
STNode ifKeyword = parseIfKeyword();
STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false);
return STNodeFactory.createMatchGuardNode(ifKeyword, expr);
case RIGHT_DOUBLE_ARROW_TOKEN:
return STNodeFactory.createEmptyNode();
default:
recover(nextToken, ParserRuleContext.OPTIONAL_MATCH_GUARD);
return parseMatchGuard();
}
}
/**
* Parse match patterns list.
* <p>
* <code>match-pattern-list := match-pattern (| match-pattern)*</code>
*
* @return Match patterns list
*/
private STNode parseMatchPatternList() {
startContext(ParserRuleContext.MATCH_PATTERN);
List<STNode> matchClauses = new ArrayList<>();
while (!isEndOfMatchPattern(peek().kind)) {
STNode clause = parseMatchPattern();
if (clause == null) {
break;
}
matchClauses.add(clause);
STNode seperator = parseMatchPatternListMemberRhs();
if (seperator == null) {
break;
}
matchClauses.add(seperator);
}
endContext();
return STNodeFactory.createNodeList(matchClauses);
}
private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case PIPE_TOKEN:
case IF_KEYWORD:
case RIGHT_DOUBLE_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse match pattern.
* <p>
* <code>
* match-pattern := var binding-pattern
* | wildcard-match-pattern
* | const-pattern
* | list-match-pattern
* | mapping-match-pattern
* | error-match-pattern
* </code>
*
* @return Match pattern
*/
private STNode parseMatchPattern() {
STToken nextToken = peek();
if (isPredeclaredIdentifier(nextToken.kind)) {
STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
}
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
return parseSimpleConstExpr();
case VAR_KEYWORD:
return parseVarTypedBindingPattern();
case OPEN_BRACKET_TOKEN:
return parseListMatchPattern();
case OPEN_BRACE_TOKEN:
return parseMappingMatchPattern();
case ERROR_KEYWORD:
return parseErrorMatchPattern();
default:
recover(nextToken, ParserRuleContext.MATCH_PATTERN_START);
return parseMatchPattern();
}
}
private STNode parseMatchPatternListMemberRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
return parsePipeToken();
case IF_KEYWORD:
case RIGHT_DOUBLE_ARROW_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS);
return parseMatchPatternListMemberRhs();
}
}
/**
* Parse var typed binding pattern.
* <p>
* <code>var binding-pattern</code>
* </p>
*
* @return Parsed typed binding pattern node
*/
private STNode parseVarTypedBindingPattern() {
STNode varKeyword = parseVarKeyword();
STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword);
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern);
}
/**
* Parse var keyword.
*
* @return Var keyword node
*/
private STNode parseVarKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.VAR_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.VAR_KEYWORD);
return parseVarKeyword();
}
}
/**
* Parse list match pattern.
* <p>
* <code>
* list-match-pattern := [ list-member-match-patterns ]
* list-member-match-patterns :=
* match-pattern (, match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* </code>
* </p>
*
* @return Parsed list match pattern node
*/
private STNode parseListMatchPattern() {
startContext(ParserRuleContext.LIST_MATCH_PATTERN);
STNode openBracketToken = parseOpenBracket();
List<STNode> matchPatternList = new ArrayList<>();
STNode listMatchPatternMemberRhs = null;
boolean isEndOfFields = false;
while (!isEndOfListMatchPattern()) {
STNode listMatchPatternMember = parseListMatchPatternMember();
matchPatternList.add(listMatchPatternMember);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
isEndOfFields = true;
break;
}
if (listMatchPatternMemberRhs != null) {
matchPatternList.add(listMatchPatternMemberRhs);
} else {
break;
}
}
while (isEndOfFields && listMatchPatternMemberRhs != null) {
updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null);
if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {
break;
}
STNode invalidField = parseListMatchPatternMember();
updateLastNodeInListWithInvalidNode(matchPatternList, invalidField,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
}
STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);
STNode closeBracketToken = parseCloseBracket();
endContext();
return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken);
}
public boolean isEndOfListMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListMatchPatternMember() {
STNode nextToken = peek();
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
default:
return parseMatchPattern();
}
}
/**
* Parse rest match pattern.
* <p>
* <code>
* rest-match-pattern := ... var variable-name
* </code>
* </p>
*
* @return Parsed rest match pattern node
*/
private STNode parseRestMatchPattern() {
startContext(ParserRuleContext.REST_MATCH_PATTERN);
STNode ellipsisToken = parseEllipsis();
STNode varKeywordToken = parseVarKeyword();
STNode variableName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);
return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);
}
private STNode parseListMatchPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);
return parseListMatchPatternMemberRhs();
}
}
/**
* Parse mapping match pattern.
* <p>
* mapping-match-pattern := { field-match-patterns }
* <br/>
* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* <br/>
* field-match-pattern := field-name : match-pattern
* <br/>
* rest-match-pattern := ... var variable-name
* </p>
*
* @return Parsed Node.
*/
private STNode parseMappingMatchPattern() {
startContext(ParserRuleContext.MAPPING_MATCH_PATTERN);
STNode openBraceToken = parseOpenBrace();
STNode fieldMatchPatterns = parseFieldMatchPatternList();
STNode closeBraceToken = parseCloseBrace();
endContext();
return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken);
}
private STNode parseFieldMatchPatternList() {
List<STNode> fieldMatchPatterns = new ArrayList<>();
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
return STNodeFactory.createEmptyNodeList();
}
fieldMatchPatterns.add(fieldMatchPatternMember);
if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
invalidateExtraFieldMatchPatterns(fieldMatchPatterns);
return STNodeFactory.createNodeList(fieldMatchPatterns);
}
return parseFieldMatchPatternList(fieldMatchPatterns);
}
private STNode parseFieldMatchPatternList(List<STNode> fieldMatchPatterns) {
while (!isEndOfMappingMatchPattern()) {
STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();
if (fieldMatchPatternRhs == null) {
break;
}
fieldMatchPatterns.add(fieldMatchPatternRhs);
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
fieldMatchPatternMember = createMissingFieldMatchPattern();
}
fieldMatchPatterns.add(fieldMatchPatternMember);
if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
invalidateExtraFieldMatchPatterns(fieldMatchPatterns);
break;
}
}
return STNodeFactory.createNodeList(fieldMatchPatterns);
}
private STNode createMissingFieldMatchPattern() {
STNode fieldName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode colon = SyntaxErrors.createMissingToken(SyntaxKind.COLON_TOKEN);
STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode matchPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);
STNode fieldMatchPatternMember = STNodeFactory.createFieldMatchPatternNode(fieldName, colon, matchPattern);
fieldMatchPatternMember = SyntaxErrors.addDiagnostic(fieldMatchPatternMember,
DiagnosticErrorCode.ERROR_MISSING_FIELD_MATCH_PATTERN_MEMBER);
return fieldMatchPatternMember;
}
/**
* Parse and invalidate all field match pattern members after a rest-match-pattern.
*
* @param fieldMatchPatterns field-match-patterns list
*/
private void invalidateExtraFieldMatchPatterns(List<STNode> fieldMatchPatterns) {
while (!isEndOfMappingMatchPattern()) {
STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();
if (fieldMatchPatternRhs == null) {
break;
}
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) fieldMatchPatternRhs).text());
} else {
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs, null);
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternMember,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
}
}
}
private STNode parseFieldMatchPatternMember() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return parseFieldMatchPattern();
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.FIELD_MATCH_PATTERNS_START);
return parseFieldMatchPatternMember();
}
}
/**
* Parse filed match pattern.
* <p>
* field-match-pattern := field-name : match-pattern
* </p>
*
* @return Parsed field match pattern node
*/
public STNode parseFieldMatchPattern() {
STNode fieldNameNode = parseVariableName();
STNode colonToken = parseColon();
STNode matchPattern = parseMatchPattern();
return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);
}
public boolean isEndOfMappingMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseFieldMatchPatternRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);
return parseFieldMatchPatternRhs();
}
}
private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr);
default:
if (isMatchPatternEnd(peek().kind)) {
return typeRefOrConstExpr;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
}
}
private boolean isMatchPatternEnd(SyntaxKind tokenKind) {
switch (tokenKind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_PAREN_TOKEN:
case PIPE_TOKEN:
case IF_KEYWORD:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse functional match pattern.
* <p>
* error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern )
* error-arg-list-match-pattern :=
* error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns]
* | [error-field-match-patterns]
* error-message-match-pattern := simple-match-pattern
* error-cause-match-pattern := simple-match-pattern | error-match-pattern
* simple-match-pattern :=
* wildcard-match-pattern
* | const-pattern
* | var variable-name
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* </p>
*
* @return Parsed functional match pattern node.
*/
private STNode parseErrorMatchPattern() {
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
STNode errorKeyword = consume();
return parseErrorMatchPattern(errorKeyword);
}
private STNode parseErrorMatchPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorMatchPattern(errorKeyword);
}
return parseErrorMatchPattern(errorKeyword, typeRef);
}
private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesisToken = parseOpenParenthesis();
STNode argListMatchPatternNode = parseErrorArgListMatchPatterns();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken,
argListMatchPatternNode, closeParenthesisToken);
}
private STNode parseErrorArgListMatchPatterns() {
List<STNode> argListMatchPatterns = new ArrayList<>();
if (isEndOfErrorFieldMatchPatterns()) {
return STNodeFactory.createNodeList(argListMatchPatterns);
}
startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG);
STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START);
endContext();
if (isSimpleMatchPattern(firstArg.kind)) {
argListMatchPatterns.add(firstArg);
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END);
if (argEnd != null) {
STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS);
if (isValidSecondArgMatchPattern(secondArg.kind)) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(secondArg);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
}
}
} else {
if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&
firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) {
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
} else {
argListMatchPatterns.add(firstArg);
}
}
parseErrorFieldMatchPatterns(argListMatchPatterns);
return STNodeFactory.createNodeList(argListMatchPatterns);
}
private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) {
switch (matchPatternKind) {
case IDENTIFIER_TOKEN:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case NULL_LITERAL:
case NIL_LITERAL:
case BOOLEAN_LITERAL:
case TYPED_BINDING_PATTERN:
case UNARY_EXPRESSION:
return true;
default:
return false;
}
}
private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case ERROR_MATCH_PATTERN:
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
return true;
default:
if (isSimpleMatchPattern(syntaxKind)) {
return true;
}
return false;
}
}
/**
* Parse error field match patterns.
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* @param argListMatchPatterns
*/
private void parseErrorFieldMatchPatterns(List<STNode> argListMatchPatterns) {
SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN;
while (!isEndOfErrorFieldMatchPatterns()) {
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN);
DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListMatchPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);
}
}
}
private boolean isEndOfErrorFieldMatchPatterns() {
return isEndOfErrorFieldBindingPatterns();
}
private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgListMatchPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListMatchPattern(ParserRuleContext context) {
STToken nextToken = peek();
if (isPredeclaredIdentifier(nextToken.kind)) {
return parseNamedArgOrSimpleMatchPattern();
}
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseMatchPattern();
case VAR_KEYWORD:
STNode varType = createBuiltinSimpleNameReference(consume());
STNode variableName = createCaptureOrWildcardBP(parseVariableName());
return STNodeFactory.createTypedBindingPatternNode(varType, variableName);
case CLOSE_PAREN_TOKEN:
return SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN);
default:
recover(nextToken, context);
return parseErrorArgListMatchPattern(context);
}
}
private STNode parseNamedArgOrSimpleMatchPattern() {
STNode constRefExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);
if (constRefExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || peek().kind != SyntaxKind.EQUAL_TOKEN) {
return constRefExpr;
}
return parseNamedArgMatchPattern(((STSimpleNameReferenceNode) constRefExpr).name);
}
/**
* Parses the next named arg match pattern.
* <br/>
* <code>named-arg-match-pattern := arg-name = match-pattern</code>
* <br/>
* <br/>
*
* @return arg match pattern list node added the new arg match pattern
*/
private STNode parseNamedArgMatchPattern(STNode identifier) {
startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);
STNode equalToken = parseAssignOp();
STNode matchPattern = parseMatchPattern();
endContext();
return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);
}
private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
default:
return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED;
}
}
/**
* Parse markdown documentation.
*
* @return markdown documentation node
*/
private STNode parseMarkdownDocumentation() {
List<STNode> markdownDocLineList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {
STToken documentationString = consume();
STNode parsedDocLines = parseDocumentationString(documentationString);
appendParsedDocumentationLines(markdownDocLineList, parsedDocLines);
nextToken = peek();
}
STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);
return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);
}
/**
* Parse documentation string.
*
* @return markdown documentation line list node
*/
private STNode parseDocumentationString(STToken documentationStringToken) {
List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());
Collection<STNodeDiagnostic> diagnostics = new ArrayList<>((documentationStringToken.diagnostics()));
CharReader charReader = CharReader.from(documentationStringToken.text());
DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics);
AbstractTokenReader tokenReader = new TokenReader(documentationLexer);
DocumentationParser documentationParser = new DocumentationParser(tokenReader);
return documentationParser.parse();
}
private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) {
List<STNode> leadingTriviaList = new ArrayList<>();
int bucketCount = leadingMinutiaeNode.bucketCount();
for (int i = 0; i < bucketCount; i++) {
leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));
}
return leadingTriviaList;
}
private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) {
int bucketCount = parsedDocLines.bucketCount();
for (int i = 0; i < bucketCount; i++) {
STNode markdownDocLine = parsedDocLines.childInBucket(i);
markdownDocLineList.add(markdownDocLine);
}
}
/**
* Parse any statement that starts with a token that has ambiguity between being
* a type-desc or an expression.
*
* @param annots Annotations
* @param qualifiers
* @return Statement node
*/
private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true);
return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);
}
private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
List<STNode> varDeclQualifiers = new ArrayList<>();
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false);
}
STNode expr = getExpression(typedBindingPatternOrExpr);
expr = getExpression(parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true));
return parseStatementStartWithExprRhs(expr);
}
private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment);
}
private STNode parseTypedBindingPatternOrExpr(List<STNode> qualifiers, boolean allowAssignment) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
if (isPredeclaredIdentifier(nextToken.kind)) {
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseTypedBPOrExprStartsWithOpenParenthesis();
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT);
}
}
/**
* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr
* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or
* the expression-rhs.
*
* @param typeOrExpr Type desc or the expression
* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a
* valid lvalue expression
* @return Typed-binding-pattern node or an expression node
*/
private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipeOrAndToken = parseBinaryOperator();
STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);
if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
STNode newTypeDesc = mergeTypes(typeOrExpr, pipeOrAndToken, typedBP.typeDescriptor);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);
}
if (peek().kind == SyntaxKind.EQUAL_TOKEN) {
return createCaptureBPWithMissingVarName(typeOrExpr, pipeOrAndToken, rhsTypedBPOrExpr);
}
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,
pipeOrAndToken, rhsTypedBPOrExpr);
case SEMICOLON_TOKEN:
if (isDefiniteExpr(typeOrExpr.kind)) {
return typeOrExpr;
}
if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case EQUAL_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,
ParserRuleContext.AMBIGUOUS_STMT);
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
default:
if (isCompoundAssignment(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return typeOrExpr;
}
STToken token = peek();
SyntaxKind typeOrExprKind = typeOrExpr.kind;
if (typeOrExprKind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
typeOrExprKind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
recover(token, ParserRuleContext.BINDING_PATTERN_OR_VAR_REF_RHS);
} else {
recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS);
}
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
}
private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) {
lhsType = getTypeDescFromExpr(lhsType);
rhsType = getTypeDescFromExpr(rhsType);
STNode newTypeDesc = mergeTypes(lhsType, separatorToken, rhsType);
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
ParserRuleContext.VARIABLE_NAME);
STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP);
}
private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {
typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);
}
private STNode parseTypedBPOrExprStartsWithOpenParenthesis() {
STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();
if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {
return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);
}
return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);
}
private boolean isDefiniteTypeDesc(SyntaxKind kind) {
return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.FUTURE_TYPE_DESC) <= 0;
}
private boolean isDefiniteExpr(SyntaxKind kind) {
if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return false;
}
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0;
}
private boolean isDefiniteAction(SyntaxKind kind) {
return kind.compareTo(SyntaxKind.REMOTE_METHOD_CALL_ACTION) >= 0 &&
kind.compareTo(SyntaxKind.COMMIT_ACTION) <= 0;
}
/**
* Parse type or expression that starts with open parenthesis. Possible options are:
* 1) () - nil type-desc or nil-literal
* 2) (T) - Parenthesized type-desc
* 3) (expr) - Parenthesized expression
* 4) (param, param, ..) - Anon function params
*
* @return Type-desc or expression node
*/
private STNode parseTypedDescOrExprStartsWithOpenParenthesis() {
STNode openParen = parseOpenParenthesis();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
STNode closeParen = parseCloseParenthesis();
return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);
}
STNode typeOrExpr = parseTypeDescOrExpr();
if (isAction(typeOrExpr)) {
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,
closeParen);
}
if (isExpression(typeOrExpr.kind)) {
startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);
return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false);
}
STNode typeDescNode = getTypeDescFromExpr(typeOrExpr);
typeDescNode = parseComplexTypeDescriptor(typeDescNode, ParserRuleContext.TYPE_DESC_IN_PARENTHESIS, false);
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen);
}
/**
* Parse type-desc or expression. This method does not handle binding patterns.
*
* @return Type-desc node or expression node
*/
private STNode parseTypeDescOrExpr() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypeDescOrExpr(typeDescQualifiers);
}
private STNode parseTypeDescOrExpr(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();
break;
case FUNCTION_KEYWORD:
typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
break;
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypeDescOrExprRhs(typeOrExpr);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());
break;
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypeDescOrExprRhs(basicLiteral);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
}
if (isDefiniteTypeDesc(typeOrExpr.kind)) {
return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseTypeDescOrExprRhs(typeOrExpr);
}
private boolean isExpression(SyntaxKind kind) {
switch (kind) {
case NUMERIC_LITERAL:
case STRING_LITERAL_TOKEN:
case NIL_LITERAL:
case NULL_LITERAL:
case BOOLEAN_LITERAL:
return true;
default:
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&
kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;
}
}
/**
* Parse statement that starts with an empty parenthesis. Empty parenthesis can be
* 1) Nil literal
* 2) Nil type-desc
* 3) Anon-function params
*
* @param openParen Open parenthesis
* @param closeParen Close parenthesis
* @return Parsed node
*/
private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {
STToken nextToken = peek();
switch (nextToken.kind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
STNode params = STNodeFactory.createEmptyNodeList();
STNode anonFuncParam =
STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(anonFuncParam, false);
default:
return STNodeFactory.createNilLiteralNode(openParen, closeParen);
}
}
private STNode parseAnonFuncExprOrTypedBPWithFuncType(List<STNode> qualifiers) {
STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {
return exprOrTypeDesc;
}
return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);
}
/**
* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.
*
* @param qualifiers Preceding qualifiers
* @return Anon-func-expr or function-type-desc
*/
private STNode parseAnonFuncExprOrFuncTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);
STNode qualifierList;
STNode functionKeyword = parseFunctionKeyword();
STNode funcSignature;
if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) {
funcSignature = parseFuncSignature(true);
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, true);
qualifierList = nodes[0];
functionKeyword = nodes[1];
endContext();
return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature);
}
funcSignature = STNodeFactory.createEmptyNode();
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, false);
qualifierList = nodes[0];
functionKeyword = nodes[1];
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) {
ParserRuleContext currentCtx = getCurrentContext();
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.EXPRESSION_STATEMENT);
}
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);
STNode funcBody = parseAnonFuncBody(false);
STNode annots = STNodeFactory.createEmptyNodeList();
STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList,
functionKeyword, funcSignature, funcBody);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);
case IDENTIFIER_TOKEN:
default:
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,
true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
}
private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {
STToken nextToken = peek();
STNode typeDesc;
switch (nextToken.kind) {
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipeOrAndToken = parseBinaryOperator();
STNode rhsTypeDescOrExpr = parseTypeDescOrExpr();
if (isExpression(rhsTypeDescOrExpr.kind)) {
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,
pipeOrAndToken, rhsTypeDescOrExpr);
}
typeDesc = getTypeDescFromExpr(typeOrExpr);
rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);
return mergeTypes(typeDesc, pipeOrAndToken, rhsTypeDescOrExpr);
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
typeDesc = parseComplexTypeDescriptor(getTypeDescFromExpr(typeOrExpr),
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);
return typeDesc;
case SEMICOLON_TOKEN:
return getTypeDescFromExpr(typeOrExpr);
case EQUAL_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
case COMMA_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,
ParserRuleContext.AMBIGUOUS_STMT);
case ELLIPSIS_TOKEN:
STNode ellipsis = parseEllipsis();
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);
default:
if (isCompoundAssignment(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false);
}
recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS);
return parseTypeDescOrExprRhs(typeOrExpr);
}
}
private boolean isAmbiguous(STNode node) {
switch (node.kind) {
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
case BRACKETED_LIST:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case INDEXED_EXPRESSION:
STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;
if (!isAmbiguous(indexExpr.containerExpression)) {
return false;
}
STNode keys = indexExpr.keyExpression;
for (int i = 0; i < keys.bucketCount(); i++) {
STNode item = keys.childInBucket(i);
if (item.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAmbiguous(item)) {
return false;
}
}
return true;
default:
return false;
}
}
private boolean isAllBasicLiterals(STNode node) {
switch (node.kind) {
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case BRACKETED_LIST:
STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;
for (STNode member : list.members) {
if (member.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAllBasicLiterals(member)) {
return false;
}
}
return true;
case UNARY_EXPRESSION:
STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;
if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&
unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {
return false;
}
return isNumericLiteral(unaryExpr.expression);
default:
return false;
}
}
private boolean isNumericLiteral(STNode node) {
switch (node.kind) {
case NUMERIC_LITERAL:
return true;
default:
return false;
}
}
/**
* Parse binding-patterns.
* <p>
* <code>
* binding-pattern := capture-binding-pattern
* | wildcard-binding-pattern
* | list-binding-pattern
* | mapping-binding-pattern
* | functional-binding-pattern
* <br/><br/>
* <p>
* capture-binding-pattern := variable-name
* variable-name := identifier
* <br/><br/>
* <p>
* wildcard-binding-pattern := _
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* <p>
* mapping-binding-pattern := { field-binding-patterns }
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* <br/>
* rest-binding-pattern := ... variable-name
* <p>
* <br/><br/>
* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )
* <br/>
* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]
* | other-arg-binding-patterns
* <br/>
* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*
* <br/>
* positional-arg-binding-pattern := binding-pattern
* <br/>
* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]
* | [rest-binding-pattern]
* <br/>
* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*
* <br/>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return binding-pattern node
*/
private STNode parseBindingPattern() {
switch (peek().kind) {
case OPEN_BRACKET_TOKEN:
return parseListBindingPattern();
case IDENTIFIER_TOKEN:
return parseBindingPatternStartsWithIdentifier();
case OPEN_BRACE_TOKEN:
return parseMappingBindingPattern();
case ERROR_KEYWORD:
return parseErrorBindingPattern();
default:
recover(peek(), ParserRuleContext.BINDING_PATTERN);
return parseBindingPattern();
}
}
private STNode parseBindingPatternStartsWithIdentifier() {
STNode argNameOrBindingPattern =
parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
STToken secondToken = peek();
if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern);
}
if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);
return createCaptureOrWildcardBP(identifier);
}
return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);
}
private STNode createCaptureOrWildcardBP(STNode varName) {
STNode bindingPattern;
if (isWildcardBP(varName)) {
bindingPattern = getWildcardBindingPattern(varName);
} else {
bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);
}
return bindingPattern;
}
/**
* Parse list-binding-patterns.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return list-binding-pattern node
*/
private STNode parseListBindingPattern() {
startContext(ParserRuleContext.LIST_BINDING_PATTERN);
STNode openBracket = parseOpenBracket();
List<STNode> bindingPatternsList = new ArrayList<>();
STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);
endContext();
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) {
if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) {
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
STNode listBindingPatternMember = parseListBindingPatternMember();
bindingPatternsList.add(listBindingPatternMember);
STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) {
STNode member = firstMember;
STToken token = peek();
STNode listBindingPatternRhs = null;
while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {
listBindingPatternRhs = parseListBindingPatternMemberRhs();
if (listBindingPatternRhs == null) {
break;
}
bindingPatterns.add(listBindingPatternRhs);
member = parseListBindingPatternMember();
bindingPatterns.add(member);
token = peek();
}
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
private STNode parseListBindingPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);
return parseListBindingPatternMemberRhs();
}
}
private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse list-binding-pattern member.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return List binding pattern member
*/
private STNode parseListBindingPatternMember() {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case OPEN_BRACKET_TOKEN:
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);
return parseListBindingPatternMember();
}
}
/**
* Parse rest binding pattern.
* <p>
* <code>
* rest-binding-pattern := ... variable-name
* </code>
*
* @return Rest binding pattern node
*/
private STNode parseRestBindingPattern() {
startContext(ParserRuleContext.REST_BINDING_PATTERN);
STNode ellipsis = parseEllipsis();
STNode varName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);
return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);
}
/**
* Parse Typed-binding-pattern.
* <p>
* <code>
* typed-binding-pattern := inferable-type-descriptor binding-pattern
* <br/><br/>
* inferable-type-descriptor := type-descriptor | var
* </code>
*
* @return Typed binding pattern node
*/
private STNode parseTypedBindingPattern(ParserRuleContext context) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPattern(typeDescQualifiers, context);
}
private STNode parseTypedBindingPattern(List<STNode> qualifiers, ParserRuleContext context) {
STNode typeDesc = parseTypeDescriptor(qualifiers,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false, TypePrecedence.DEFAULT);
STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);
return typeBindingPattern;
}
/**
* Parse mapping-binding-patterns.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPattern() {
startContext(ParserRuleContext.MAPPING_BINDING_PATTERN);
STNode openBrace = parseOpenBrace();
STToken token = peek();
if (isEndOfMappingBindingPattern(token.kind)) {
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
List<STNode> bindingPatterns = new ArrayList<>();
STNode prevMember = parseMappingBindingPatternMember();
if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);
}
private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) {
STToken token = peek();
STNode mappingBindingPatternRhs = null;
while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
mappingBindingPatternRhs = parseMappingBindingPatternEnd();
if (mappingBindingPatternRhs == null) {
break;
}
bindingPatterns.add(mappingBindingPatternRhs);
prevMember = parseMappingBindingPatternMember();
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
break;
}
bindingPatterns.add(prevMember);
token = peek();
}
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
/**
* Parse mapping-binding-pattern entry.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern
* | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPatternMember() {
STToken token = peek();
switch (token.kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
default:
return parseFieldBindingPattern();
}
}
private STNode parseMappingBindingPatternEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.MAPPING_BINDING_PATTERN_END);
return parseMappingBindingPatternEnd();
}
}
/**
* Parse field-binding-pattern.
* <code>field-binding-pattern := field-name : binding-pattern | varname</code>
*
* @return field-binding-pattern node
*/
private STNode parseFieldBindingPattern() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);
return parseFieldBindingPattern(simpleNameReference);
default:
recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
return parseFieldBindingPattern();
}
}
private STNode parseFieldBindingPattern(STNode simpleNameReference) {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);
case COLON_TOKEN:
STNode colon = parseColon();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);
default:
recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_END);
return parseFieldBindingPattern(simpleNameReference);
}
}
private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || isEndOfModuleLevelNode(1);
}
private STNode parseErrorTypeDescOrErrorBP(STNode annots) {
STToken nextNextToken = peek(2);
switch (nextNextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseAsErrorBindingPattern();
case LT_TOKEN:
return parseAsErrorTypeDesc(annots);
case IDENTIFIER_TOKEN:
SyntaxKind nextNextNextTokenKind = peek(3).kind;
if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN ||
nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {
return parseAsErrorBindingPattern();
}
default:
return parseAsErrorTypeDesc(annots);
}
}
private STNode parseAsErrorBindingPattern() {
startContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(parseErrorBindingPattern());
}
private STNode parseAsErrorTypeDesc(STNode annots) {
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(getAnnotations(annots), finalKeyword);
}
/**
* Parse error binding pattern node.
* <p>
* <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code>
* <br/><br/>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* named-arg-binding-pattern := arg-name = binding-pattern
*
* @return Error binding pattern node.
*/
private STNode parseErrorBindingPattern() {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = parseErrorKeyword();
return parseErrorBindingPattern(errorKeyword);
}
private STNode parseErrorBindingPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorBindingPattern(errorKeyword);
}
return parseErrorBindingPattern(errorKeyword, typeRef);
}
private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesis = parseOpenParenthesis();
STNode argListBindingPatterns = parseErrorArgListBindingPatterns();
STNode closeParenthesis = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis,
argListBindingPatterns, closeParenthesis);
}
/**
* Parse error arg list binding pattern.
* <p>
* <code>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* <p>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* <p>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* <p>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* <p>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* <p>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return Error arg list binding patterns.
*/
private STNode parseErrorArgListBindingPatterns() {
List<STNode> argListBindingPatterns = new ArrayList<>();
if (isEndOfErrorFieldBindingPatterns()) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
private STNode parseErrorArgListBindingPatterns(List<STNode> argListBindingPatterns) {
STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true);
if (firstArg == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
switch (firstArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
case ERROR_BINDING_PATTERN:
STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier);
missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP,
DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN);
STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN);
argListBindingPatterns.add(missingErrorMsgBP);
argListBindingPatterns.add(missingComma);
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
default:
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsg(List<STNode> argListBindingPatterns) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END);
if (argEnd == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false);
assert secondArg != null;
switch (secondArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(secondArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind);
default:
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg,
DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List<STNode> argListBindingPatterns,
SyntaxKind lastValidArgKind) {
while (!isEndOfErrorFieldBindingPatterns()) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false);
assert currentArg != null;
DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListBindingPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);
}
}
return STNodeFactory.createNodeList(argListBindingPatterns);
}
private boolean isEndOfErrorFieldBindingPatterns() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgsBindingPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case IDENTIFIER_TOKEN:
STNode argNameOrSimpleBindingPattern = consume();
return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern);
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
case CLOSE_PAREN_TOKEN:
if (isFirstArg) {
return null;
}
default:
recover(peek(), context);
return parseErrorArgListBindingPattern(context, isFirstArg);
}
}
private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) {
STToken secondToken = peek();
switch (secondToken.kind) {
case EQUAL_TOKEN:
STNode equal = consume();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern,
equal, bindingPattern);
case COMMA_TOKEN:
case CLOSE_PAREN_TOKEN:
default:
return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern);
}
}
private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind,
SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_BINDING_PATTERN:
case REST_BINDING_PATTERN:
if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
default:
return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED;
}
}
/*
* This parses Typed binding patterns and deals with ambiguity between types,
* and binding patterns. An example is 'T[a]'.
* The ambiguity lies in between:
* 1) Array Type
* 2) List binding pattern
* 3) Member access expression.
*/
/**
* Parse the component after the type-desc, of a typed-binding-pattern.
*
* @param typeDesc Starting type-desc of the typed-binding-pattern
* @return Typed-binding pattern
*/
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {
return parseTypedBindingPatternTypeRhs(typeDesc, context, true);
}
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case OPEN_BRACKET_TOKEN:
STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);
assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;
return typedBindingPattern;
case CLOSE_PAREN_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
if (!isRoot) {
return typeDesc;
}
default:
recover(nextToken, ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS);
return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot);
}
}
/**
* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.
*
* @param typeDescOrExpr Type desc or the expression at the start
* @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous
* @return Parsed node
*/
private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
if (isBracketedListEnd(peek().kind)) {
return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);
}
STNode member = parseBracketedListMember(isTypedBindingPattern);
SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern);
switch (currentNodeType) {
case ARRAY_TYPE_DESC:
STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);
return typedBindingPattern;
case LIST_BINDING_PATTERN:
STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case INDEXED_EXPRESSION:
return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);
case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS:
break;
case NONE:
default:
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd != null) {
List<STNode> memberList = new ArrayList<>();
memberList.add(getBindingPattern(member));
memberList.add(memberEnd);
bindingPattern = parseAsListBindingPattern(openBracket, memberList);
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
}
STNode closeBracket = parseCloseBracket();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {
member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);
STNode closeBracket = parseCloseBracket();
endContext();
STNode keyExpr = STNodeFactory.createNodeList(member);
STNode memberAccessExpr =
STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);
}
private boolean isBracketedListEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse a member of an ambiguous bracketed list. This member could be:
* 1) Array length
* 2) Key expression of a member-access-expr
* 3) A member-binding pattern of a list-binding-pattern.
*
* @param isTypedBindingPattern Is this in a definite typed-binding pattern
* @return Parsed member node
*/
private STNode parseBracketedListMember(boolean isTypedBindingPattern) {
STToken nextToken = peek();
switch (nextToken.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
case STRING_LITERAL_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
case ELLIPSIS_TOKEN:
case OPEN_BRACKET_TOKEN:
return parseStatementStartBracketedListMember();
case IDENTIFIER_TOKEN:
if (isTypedBindingPattern) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
break;
default:
if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) ||
isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) {
break;
}
ParserRuleContext recoverContext =
isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH
: ParserRuleContext.BRACKETED_LIST_MEMBER;
recover(peek(), recoverContext);
return parseBracketedListMember(isTypedBindingPattern);
}
STNode expr = parseExpression();
if (isWildcardBP(expr)) {
return getWildcardBindingPattern(expr);
}
return expr;
}
/**
* Treat the current node as an array, and parse the remainder of the binding pattern.
*
* @param typeDesc Type-desc
* @param openBracket Open bracket
* @param member Member
* @return Parsed node
*/
private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {
typeDesc = getTypeDescFromExpr(typeDesc);
switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode closeBracket = parseCloseBracket();
endContext();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,
context);
}
private STNode parseBracketedListMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);
return parseBracketedListMemberEnd();
}
}
/**
* We reach here to break ambiguity of T[a]. This could be:
* 1) Array Type Desc
* 2) Member access on LHS
* 3) Typed-binding-pattern
*
* @param typeDescOrExpr Type name or the expr that precede the open-bracket.
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Open bracket
* @param isTypedBindingPattern Is this is a typed-binding-pattern.
* @return Specific node that matches to T[a], after solving ambiguity.
*/
private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
case OPEN_BRACKET_TOKEN:
if (isTypedBindingPattern) {
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
}
STNode keyExpr = getKeyExpr(member);
STNode expr =
STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);
case QUESTION_MARK_TOKEN:
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
typeDesc = parseComplexTypeDescriptor(arrayTypeDesc,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
return parseTypedBindingPatternTypeRhs(typeDesc, context);
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern);
case IN_KEYWORD:
if (context != ParserRuleContext.FOREACH_STMT &&
context != ParserRuleContext.FROM_CLAUSE &&
context != ParserRuleContext.JOIN_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case EQUAL_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
}
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
case SEMICOLON_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case CLOSE_BRACE_TOKEN:
case COMMA_TOKEN:
if (context == ParserRuleContext.AMBIGUOUS_STMT) {
keyExpr = getKeyExpr(member);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
default:
if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) {
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
break;
}
ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS;
if (isTypedBindingPattern) {
recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS;
}
recover(peek(), recoveryCtx);
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode getKeyExpr(STNode member) {
if (member == null) {
STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);
STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier);
return STNodeFactory.createNodeList(missingVarRef);
}
return STNodeFactory.createNodeList(member);
}
private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket) {
STNode bindingPatterns = STNodeFactory.createEmptyNodeList();
if (!isEmpty(member)) {
SyntaxKind memberKind = member.kind;
if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) {
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME);
STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken);
return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName);
}
if (member.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
openBracket = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracket, member,
DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP);
} else {
STNode bindingPattern = getBindingPattern(member);
bindingPatterns = STNodeFactory.createNodeList(bindingPattern);
}
}
STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
/**
* Parse a union or intersection type-desc/binary-expression that involves ambiguous
* bracketed list in lhs.
* <p>
* e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code>
* <p>
* Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this
* is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However,
* if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes
* the type-desc, and <code>[b]</code> becomes the binding pattern.
*
* @param typeDescOrExpr Type desc or the expression
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Close bracket
* @return Parsed node
*/
private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern) {
STNode pipeOrAndToken = parseUnionOrIntersectionToken();
STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;
STNode rhsTypeDesc = rhsTypedBindingPattern.typeDescriptor;
STNode newTypeDesc = mergeTypes(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);
}
if (isTypedBindingPattern) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr);
}
STNode keyExpr = getExpression(member);
STNode containerExpr = getExpression(typeDescOrExpr);
STNode lhsExpr =
STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,
typedBindingPatternOrExpr);
}
/**
* Merges two types separated by <code>|</code> or <code>&</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param pipeOrAndToken pipe or bitwise-and token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypes(STNode lhsTypeDesc, STNode pipeOrAndToken, STNode rhsTypeDesc) {
if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {
return mergeTypesWithUnion(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
} else {
return mergeTypesWithIntersection(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
}
}
/**
* Merges two types separated by <code>|</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param pipeToken pipe token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypesWithUnion(STNode lhsTypeDesc, STNode pipeToken, STNode rhsTypeDesc) {
if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostUnionWithAUnion(lhsTypeDesc, pipeToken, rhsUnionTypeDesc);
} else {
return createUnionTypeDesc(lhsTypeDesc, pipeToken, rhsTypeDesc);
}
}
/**
* Merges two types separated by <code>&</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param bitwiseAndToken bitwise-and token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypesWithIntersection(STNode lhsTypeDesc, STNode bitwiseAndToken, STNode rhsTypeDesc) {
if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode lhsUnionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;
if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
rhsTypeDesc = replaceLeftMostIntersectionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,
bitwiseAndToken, (STIntersectionTypeDescriptorNode) rhsTypeDesc);
return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);
} else if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
rhsTypeDesc = replaceLeftMostUnionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,
bitwiseAndToken, (STUnionTypeDescriptorNode) rhsTypeDesc);
return replaceLeftMostUnionWithAUnion(lhsUnionTypeDesc.leftTypeDesc,
lhsUnionTypeDesc.pipeToken, (STUnionTypeDescriptorNode) rhsTypeDesc);
} else {
rhsTypeDesc = createIntersectionTypeDesc(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, rhsTypeDesc);
return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);
}
}
if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostUnionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsUnionTypeDesc);
} else if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode rhsIntSecTypeDesc = (STIntersectionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostIntersectionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsIntSecTypeDesc);
} else {
return createIntersectionTypeDesc(lhsTypeDesc, bitwiseAndToken, rhsTypeDesc);
}
}
private STNode replaceLeftMostUnionWithAUnion(STNode typeDesc, STNode pipeToken,
STUnionTypeDescriptorNode unionTypeDesc) {
STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostUnionWithAUnion(typeDesc, pipeToken, (STUnionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createUnionTypeDesc(typeDesc, pipeToken, leftTypeDesc);
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode replaceLeftMostUnionWithAIntersection(STNode typeDesc, STNode bitwiseAndToken,
STUnionTypeDescriptorNode unionTypeDesc) {
STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostUnionWithAIntersection(typeDesc, bitwiseAndToken,
(STUnionTypeDescriptorNode) leftTypeDesc));
}
if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,
(STIntersectionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode replaceLeftMostIntersectionWithAIntersection(STNode typeDesc,
STNode bitwiseAndToken,
STIntersectionTypeDescriptorNode intersectionTypeDesc) {
STNode leftTypeDesc = intersectionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc,
replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,
(STIntersectionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);
return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {
if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);
lhsTypeDesc = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);
} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc =
getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);
lhsTypeDesc = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,
intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);
} else {
lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket);
}
return lhsTypeDesc;
}
/**
* Parse union (|) or intersection (&) type operator.
*
* @return pipe or bitwise and token
*/
private STNode parseUnionOrIntersectionToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);
return parseUnionOrIntersectionToken();
}
}
/**
* Infer the type of the ambiguous bracketed list, based on the type of the member.
*
* @param memberNode Member node
* @return Inferred type of the bracketed list
*/
private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) {
if (isEmpty(memberNode)) {
return SyntaxKind.NONE;
}
if (isDefiniteTypeDesc(memberNode.kind)) {
return SyntaxKind.TUPLE_TYPE_DESC;
}
switch (memberNode.kind) {
case ASTERISK_LITERAL:
return SyntaxKind.ARRAY_TYPE_DESC;
case CAPTURE_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
return SyntaxKind.LIST_BINDING_PATTERN;
case QUALIFIED_NAME_REFERENCE:
case REST_TYPE:
return SyntaxKind.TUPLE_TYPE_DESC;
case NUMERIC_LITERAL:
if (isTypedBindingPattern) {
return SyntaxKind.ARRAY_TYPE_DESC;
}
return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS;
case SIMPLE_NAME_REFERENCE:
case BRACKETED_LIST:
case MAPPING_BP_OR_MAPPING_CONSTRUCTOR:
return SyntaxKind.NONE;
case ERROR_CONSTRUCTOR:
if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
default:
if (isTypedBindingPattern) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
}
}
/*
* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.
* The ambiguity lies in between:
* 1) Assignment that starts with list binding pattern
* 2) Var-decl statement that starts with tuple type
* 3) Statement that starts with list constructor, such as sync-send, etc.
*/
/**
* Parse any statement that starts with an open-bracket.
*
* @param annots Annotations attached to the statement.
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {
startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);
return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);
}
private STNode parseMemberBracketedList() {
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStatementStartsWithOpenBracket(annots, false, false);
}
/**
* The bracketed list at the start of a statement can be one of the following.
* 1) List binding pattern
* 2) Tuple type
* 3) List constructor
*
* @param isRoot Is this the root of the list
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {
startContext(ParserRuleContext.STMT_START_BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
while (!isBracketedListEnd(peek().kind)) {
STNode member = parseStatementStartBracketedListMember();
SyntaxKind currentNodeType = getStmtStartBracketedListType(member);
switch (currentNodeType) {
case TUPLE_TYPE_DESC:
member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case MEMBER_TYPE_DESC:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case LIST_BINDING_PATTERN:
return parseAsListBindingPattern(openBracket, memberList, member, isRoot);
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case LIST_BP_OR_LIST_CONSTRUCTOR:
return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);
case NONE:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
}
STNode closeBracket = parseCloseBracket();
STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket,
isRoot, possibleMappingField);
return bracketedList;
}
/**
* Parse a member of a list-binding-pattern, tuple-type-desc, or
* list-constructor-expr, when the parent is ambiguous.
*
* @return Parsed node
*/
private STNode parseStatementStartBracketedListMember() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseStatementStartBracketedListMember(typeDescQualifiers);
}
private STNode parseStatementStartBracketedListMember(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMemberBracketedList();
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (isWildcardBP(identifier)) {
STNode varName = ((STSimpleNameReferenceNode) identifier).name;
return getWildcardBindingPattern(varName);
}
nextToken = peek();
if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) {
return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);
case OPEN_BRACE_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMappingBindingPatterOrMappingConstructor();
case ERROR_KEYWORD:
reportInvalidQualifierList(qualifiers);
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorBindingPatternOrErrorConstructor();
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case ELLIPSIS_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseRestBindingOrSpreadMember();
case XML_KEYWORD:
case STRING_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr(qualifiers);
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrFuncTypeDesc(qualifiers);
case AT_TOKEN:
return parseMemberDescriptor();
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER);
return parseStatementStartBracketedListMember(qualifiers);
}
}
private STNode parseRestBindingOrSpreadMember() {
STNode ellipsis = parseEllipsis();
STNode expr = parseExpression();
if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return STNodeFactory.createRestBindingPatternNode(ellipsis, expr);
} else {
return STNodeFactory.createSpreadMemberNode(ellipsis, expr);
}
}
private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
STNode member, boolean isRoot) {
memberList.add(member);
STNode memberEnd = parseBracketedListMemberEnd();
STNode tupleTypeDescOrListCons;
if (memberEnd == null) {
STNode closeBracket = parseCloseBracket();
tupleTypeDescOrListCons =
parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
} else {
memberList.add(memberEnd);
tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);
}
return tupleTypeDescOrListCons;
}
/**
* Parse tuple type desc or list constructor.
*
* @return Parsed node
*/
private STNode parseTupleTypeDescOrListConstructor(STNode annots) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);
}
private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
boolean isRoot) {
STToken nextToken = peek();
while (!isBracketedListEnd(nextToken.kind)) {
STNode member = parseTupleTypeDescOrListConstructorMember(annots);
SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);
switch (currentNodeType) {
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC:
member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case MEMBER_TYPE_DESC:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
nextToken = peek();
}
STNode closeBracket = parseCloseBracket();
return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
}
private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
return parseTupleTypeDescOrListConstructor(annots);
case IDENTIFIER_TOKEN:
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);
case OPEN_BRACE_TOKEN:
return parseMappingConstructorExpr();
case ERROR_KEYWORD:
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorConstructorExpr(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case XML_KEYWORD:
case STRING_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr();
case AT_TOKEN:
return parseMemberDescriptor();
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER);
return parseTupleTypeDescOrListConstructorMember(annots);
}
}
private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {
return getStmtStartBracketedListType(memberNode);
}
private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket,
boolean isRoot) {
STNode tupleTypeOrListConst;
switch (peek().kind) {
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
if (!isRoot) {
endContext();
return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,
closeBracket);
}
default:
if (isValidExprRhsStart(peek().kind, closeBracket.kind) ||
(isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {
members = getExpressionList(members, false);
STNode memberExpressions = STNodeFactory.createNodeList(members);
tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,
memberExpressions, closeBracket);
break;
}
STNode memberTypeDescs = STNodeFactory.createNodeList(getTupleMemberList(members));
STNode tupleTypeDesc =
STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);
tupleTypeOrListConst =
parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
endContext();
if (!isRoot) {
return tupleTypeOrListConst;
}
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot);
} | return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot); | private boolean isServiceDeclStart(ParserRuleContext currentContext, int lookahead) {
switch (peek(lookahead + 1).kind) {
case IDENTIFIER_TOKEN:
SyntaxKind tokenAfterIdentifier = peek(lookahead + 2).kind;
switch (tokenAfterIdentifier) {
case ON_KEYWORD:
case OPEN_BRACE_TOKEN:
return true;
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
case QUESTION_MARK_TOKEN:
return false;
default:
return false;
}
case ON_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse listener declaration, given the qualifier.
* <p>
* <code>
* listener-decl := metadata [public] listener [type-descriptor] variable-name = expression ;
* </code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseListenerDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.LISTENER_DECL);
STNode listenerKeyword = parseListenerKeyword();
if (peek().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode listenerDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, listenerKeyword, true);
endContext();
return listenerDecl;
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, listenerKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse listener keyword.
*
* @return Parsed node
*/
private STNode parseListenerKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LISTENER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LISTENER_KEYWORD);
return parseListenerKeyword();
}
}
/**
* Parse constant declaration, given the qualifier.
* <p>
* <code>module-const-decl := metadata [public] const [type-descriptor] identifier = const-expr ;</code>
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @return Parsed node
*/
private STNode parseConstantDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.CONSTANT_DECL);
STNode constKeyword = parseConstantKeyword();
return parseConstDecl(metadata, qualifier, constKeyword);
}
/**
* Parse the components that follows after the const keyword of a constant declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseConstDecl(STNode metadata, STNode qualifier, STNode constKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case ANNOTATION_KEYWORD:
endContext();
return parseAnnotationDeclaration(metadata, qualifier, constKeyword);
case IDENTIFIER_TOKEN:
STNode constantDecl =
parseConstantOrListenerDeclWithOptionalType(metadata, qualifier, constKeyword, false);
endContext();
return constantDecl;
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.CONST_DECL_TYPE);
return parseConstDecl(metadata, qualifier, constKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_BEFORE_IDENTIFIER);
STNode variableName = parseVariableName();
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
endContext();
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, constKeyword, typeDesc, variableName,
equalsToken, initializer, semicolonToken);
}
private STNode parseConstantOrListenerDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
boolean isListener) {
STNode varNameOrTypeName = parseStatementStartIdentifier();
return parseConstantOrListenerDeclRhs(metadata, qualifier, constKeyword, varNameOrTypeName, isListener);
}
/**
* Parse the component that follows the first identifier in a const decl. The identifier
* can be either the type-name (a user defined type) or the var-name there the type-name
* is not present.
*
* @param qualifier Qualifier that precedes the constant decl
* @param keyword Keyword
* @param typeOrVarName Identifier that follows the const-keywoord
* @return Parsed node
*/
private STNode parseConstantOrListenerDeclRhs(STNode metadata, STNode qualifier, STNode keyword,
STNode typeOrVarName, boolean isListener) {
if (typeOrVarName.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode type = typeOrVarName;
STNode variableName = parseVariableName();
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
STNode type;
STNode variableName;
switch (peek().kind) {
case IDENTIFIER_TOKEN:
type = typeOrVarName;
variableName = parseVariableName();
break;
case EQUAL_TOKEN:
variableName = ((STSimpleNameReferenceNode) typeOrVarName).name;
type = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.CONST_DECL_RHS);
return parseConstantOrListenerDeclRhs(metadata, qualifier, keyword, typeOrVarName, isListener);
}
return parseListenerOrConstRhs(metadata, qualifier, keyword, isListener, type, variableName);
}
private STNode parseListenerOrConstRhs(STNode metadata, STNode qualifier, STNode keyword, boolean isListener,
STNode type, STNode variableName) {
STNode equalsToken = parseAssignOp();
STNode initializer = parseExpression();
STNode semicolonToken = parseSemicolon();
if (isListener) {
return STNodeFactory.createListenerDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
return STNodeFactory.createConstantDeclarationNode(metadata, qualifier, keyword, type, variableName,
equalsToken, initializer, semicolonToken);
}
/**
* Parse const keyword.
*
* @return Parsed node
*/
private STNode parseConstantKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONST_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONST_KEYWORD);
return parseConstantKeyword();
}
}
/**
* Parse typeof expression.
* <p>
* <code>
* typeof-expr := typeof expression
* </code>
*
* @param isRhsExpr
* @return Typeof expression node
*/
private STNode parseTypeofExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode typeofKeyword = parseTypeofKeyword();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createTypeofExpressionNode(typeofKeyword, expr);
}
/**
* Parse typeof-keyword.
*
* @return Typeof-keyword node
*/
private STNode parseTypeofKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TYPEOF_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TYPEOF_KEYWORD);
return parseTypeofKeyword();
}
}
/**
* Parse optional type descriptor given the type.
* <p>
* <code>optional-type-descriptor := type-descriptor `?`</code>
* </p>
*
* @param typeDescriptorNode Preceding type descriptor
* @return Parsed node
*/
private STNode parseOptionalTypeDescriptor(STNode typeDescriptorNode) {
startContext(ParserRuleContext.OPTIONAL_TYPE_DESCRIPTOR);
STNode questionMarkToken = parseQuestionMark();
endContext();
return createOptionalTypeDesc(typeDescriptorNode, questionMarkToken);
}
private STNode createOptionalTypeDesc(STNode typeDescNode, STNode questionMarkToken) {
if (typeDescNode.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) typeDescNode;
STNode middleTypeDesc = createOptionalTypeDesc(unionTypeDesc.rightTypeDesc, questionMarkToken);
typeDescNode = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);
} else if (typeDescNode.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) typeDescNode;
STNode middleTypeDesc = createOptionalTypeDesc(intersectionTypeDesc.rightTypeDesc, questionMarkToken);
typeDescNode = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,
intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);
} else {
typeDescNode = validateForUsageOfVar(typeDescNode);
typeDescNode = STNodeFactory.createOptionalTypeDescriptorNode(typeDescNode, questionMarkToken);
}
return typeDescNode;
}
/**
* Parse unary expression.
* <p>
* <code>
* unary-expr := + expression | - expression | ~ expression | ! expression
* </code>
*
* @param isRhsExpr
* @return Unary expression node
*/
private STNode parseUnaryExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode unaryOperator = parseUnaryOperator();
STNode expr = parseExpression(OperatorPrecedence.UNARY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createUnaryExpressionNode(unaryOperator, expr);
}
/**
* Parse unary operator.
* <code>UnaryOperator := + | - | ~ | !</code>
*
* @return Parsed node
*/
private STNode parseUnaryOperator() {
STToken token = peek();
if (isUnaryOperator(token.kind)) {
return consume();
} else {
recover(token, ParserRuleContext.UNARY_OPERATOR);
return parseUnaryOperator();
}
}
/**
* Check whether the given token kind is a unary operator.
*
* @param kind STToken kind
* @return <code>true</code> if the token kind refers to a unary operator. <code>false</code> otherwise
*/
private boolean isUnaryOperator(SyntaxKind kind) {
switch (kind) {
case PLUS_TOKEN:
case MINUS_TOKEN:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse array type descriptor.
* <p>
* <code>
* array-type-descriptor := array-member-type-descriptor [ [ array-length ] ]
* array-member-type-descriptor := type-descriptor
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* inferred-array-length := *
* </code>
* </p>
*
* @param memberTypeDesc
* @return Parsed Node
*/
private STNode parseArrayTypeDescriptor(STNode memberTypeDesc) {
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode openBracketToken = parseOpenBracket();
STNode arrayLengthNode = parseArrayLength();
STNode closeBracketToken = parseCloseBracket();
endContext();
return createArrayTypeDesc(memberTypeDesc, openBracketToken, arrayLengthNode, closeBracketToken);
}
private STNode createArrayTypeDesc(STNode memberTypeDesc, STNode openBracketToken, STNode arrayLengthNode,
STNode closeBracketToken) {
memberTypeDesc = validateForUsageOfVar(memberTypeDesc);
if (arrayLengthNode != null) {
switch (arrayLengthNode.kind) {
case ASTERISK_LITERAL:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind numericLiteralKind = arrayLengthNode.childInBucket(0).kind;
if (numericLiteralKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
numericLiteralKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
openBracketToken = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracketToken,
arrayLengthNode, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
arrayLengthNode = STNodeFactory.createEmptyNode();
}
}
List<STNode> arrayDimensions = new ArrayList();
if (memberTypeDesc.kind == SyntaxKind.ARRAY_TYPE_DESC) {
STArrayTypeDescriptorNode innerArrayType = (STArrayTypeDescriptorNode) memberTypeDesc;
STNode innerArrayDimensions = innerArrayType.dimensions;
int dimensionCount = innerArrayDimensions.bucketCount();
for (int i = 0; i < dimensionCount; i++) {
arrayDimensions.add(innerArrayDimensions.childInBucket(i));
}
memberTypeDesc = innerArrayType.memberTypeDesc;
}
STNode arrayDimension = STNodeFactory.createArrayDimensionNode(openBracketToken, arrayLengthNode,
closeBracketToken);
arrayDimensions.add(arrayDimension);
STNode arrayDimensionNodeList = STNodeFactory.createNodeList(arrayDimensions);
return STNodeFactory.createArrayTypeDescriptorNode(memberTypeDesc, arrayDimensionNodeList);
}
/**
* Parse array length.
* <p>
* <code>
* array-length :=
* int-literal
* | constant-reference-expr
* | inferred-array-length
* constant-reference-expr := variable-reference-expr
* </code>
* </p>
*
* @return Parsed array length
*/
private STNode parseArrayLength() {
STToken token = peek();
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case IDENTIFIER_TOKEN:
return parseQualifiedIdentifier(ParserRuleContext.ARRAY_LENGTH);
default:
recover(token, ParserRuleContext.ARRAY_LENGTH);
return parseArrayLength();
}
}
/**
* Parse annotations.
* <p>
* <i>Note: In the <a href="https:
* annotations-list is specified as one-or-more annotations. And the usage is marked as
* optional annotations-list. However, for the consistency of the tree, here we make the
* annotation-list as zero-or-more annotations, and the usage is not-optional.</i>
* <p>
* <code>annots := annotation*</code>
*
* @return Parsed node
*/
private STNode parseOptionalAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation list with at least one annotation.
*
* @return Annotation list
*/
private STNode parseAnnotations() {
startContext(ParserRuleContext.ANNOTATIONS);
List<STNode> annotList = new ArrayList<>();
annotList.add(parseAnnotation());
while (peek().kind == SyntaxKind.AT_TOKEN) {
annotList.add(parseAnnotation());
}
endContext();
return STNodeFactory.createNodeList(annotList);
}
/**
* Parse annotation attachment.
* <p>
* <code>annotation := @ annot-tag-reference annot-value</code>
*
* @return Parsed node
*/
private STNode parseAnnotation() {
STNode atToken = parseAtToken();
STNode annotReference;
if (isPredeclaredIdentifier(peek().kind)) {
annotReference = parseQualifiedIdentifier(ParserRuleContext.ANNOT_REFERENCE);
} else {
annotReference = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
annotReference = STNodeFactory.createSimpleNameReferenceNode(annotReference);
}
STNode annotValue;
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
annotValue = parseMappingConstructorExpr();
} else {
annotValue = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createAnnotationNode(atToken, annotReference, annotValue);
}
/**
* Parse '@' token.
*
* @return Parsed node
*/
private STNode parseAtToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.AT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.AT);
return parseAtToken();
}
}
/**
* Parse metadata. Meta data consist of optional doc string and
* an annotations list.
* <p>
* <code>metadata := [DocumentationString] annots</code>
*
* @return Parse node
*/
private STNode parseMetaData() {
STNode docString;
STNode annotations;
switch (peek().kind) {
case DOCUMENTATION_STRING:
docString = parseMarkdownDocumentation();
annotations = parseOptionalAnnotations();
break;
case AT_TOKEN:
docString = STNodeFactory.createEmptyNode();
annotations = parseOptionalAnnotations();
break;
default:
return STNodeFactory.createEmptyNode();
}
return createMetadata(docString, annotations);
}
/**
* Create metadata node.
*
* @return A metadata node
*/
private STNode createMetadata(STNode docString, STNode annotations) {
if (annotations == null && docString == null) {
return STNodeFactory.createEmptyNode();
} else {
return STNodeFactory.createMetadataNode(docString, annotations);
}
}
/**
* Parse type test expression.
* <code>
* type-test-expr := expression (is | !is) type-descriptor
* </code>
*
* @param lhsExpr Preceding expression of the is expression
* @return Is expression node
*/
private STNode parseTypeTestExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode isOrNotIsKeyword = parseIsOrNotIsKeyword();
STNode typeDescriptor = parseTypeDescriptorInExpression(isInConditionalExpr);
return STNodeFactory.createTypeTestExpressionNode(lhsExpr, isOrNotIsKeyword, typeDescriptor);
}
/**
* Parse `is` keyword or `!is` keyword.
*
* @return is-keyword or not-is-keyword node
*/
private STNode parseIsOrNotIsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IS_KEYWORD ||
token.kind == SyntaxKind.NOT_IS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IS_KEYWORD);
return parseIsOrNotIsKeyword();
}
}
/**
* Parse local type definition statement statement.
* <code>ocal-type-defn-stmt := [annots] type identifier type-descriptor ;</code>
*
* @return local type definition statement statement
*/
private STNode parseLocalTypeDefinitionStatement(STNode annots) {
startContext(ParserRuleContext.LOCAL_TYPE_DEFINITION_STMT);
STNode typeKeyword = parseTypeKeyword();
STNode typeName = parseTypeName();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_DEF);
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createLocalTypeDefinitionStatementNode(annots, typeKeyword, typeName, typeDescriptor,
semicolon);
}
/**
* Parse statement which is only consists of an action or expression.
*
* @param annots Annotations
* @return Statement node
*/
private STNode parseExpressionStatement(STNode annots) {
startContext(ParserRuleContext.EXPRESSION_STATEMENT);
STNode expression = parseActionOrExpressionInLhs(annots);
return getExpressionAsStatement(expression);
}
/**
* Parse statements that starts with an expression.
*
* @return Statement node
*/
private STNode parseStatementStartWithExpr(STNode annots) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode expr = parseActionOrExpressionInLhs(annots);
return parseStatementStartWithExprRhs(expr);
}
/**
* Parse the component followed by the expression, at the beginning of a statement.
*
* @param expression Action or expression in LHS
* @return Statement node
*/
private STNode parseStatementStartWithExprRhs(STNode expression) {
SyntaxKind nextTokenKind = peek().kind;
if (isAction(expression) || nextTokenKind == SyntaxKind.SEMICOLON_TOKEN) {
return getExpressionAsStatement(expression);
}
switch (nextTokenKind) {
case EQUAL_TOKEN:
switchContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(expression);
case IDENTIFIER_TOKEN:
default:
if (isCompoundAssignment(nextTokenKind)) {
return parseCompoundAssignmentStmtRhs(expression);
}
ParserRuleContext context;
if (isPossibleExpressionStatement(expression)) {
context = ParserRuleContext.EXPR_STMT_RHS;
} else {
context = ParserRuleContext.STMT_START_WITH_EXPR_RHS;
}
recover(peek(), context);
return parseStatementStartWithExprRhs(expression);
}
}
private boolean isPossibleExpressionStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
case CHECK_EXPRESSION:
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
return true;
default:
return false;
}
}
private STNode getExpressionAsStatement(STNode expression) {
switch (expression.kind) {
case METHOD_CALL:
case FUNCTION_CALL:
return parseCallStatement(expression);
case CHECK_EXPRESSION:
return parseCheckStatement(expression);
case REMOTE_METHOD_CALL_ACTION:
case CHECK_ACTION:
case BRACED_ACTION:
case START_ACTION:
case TRAP_ACTION:
case FLUSH_ACTION:
case ASYNC_SEND_ACTION:
case SYNC_SEND_ACTION:
case RECEIVE_ACTION:
case WAIT_ACTION:
case QUERY_ACTION:
case COMMIT_ACTION:
return parseActionStatement(expression);
default:
STNode semicolon = parseSemicolon();
endContext();
expression = getExpression(expression);
STNode exprStmt = STNodeFactory.createExpressionStatementNode(SyntaxKind.INVALID_EXPRESSION_STATEMENT,
expression, semicolon);
exprStmt = SyntaxErrors.addDiagnostic(exprStmt, DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_STATEMENT);
return exprStmt;
}
}
private STNode parseArrayTypeDescriptorNode(STIndexedExpressionNode indexedExpr) {
STNode memberTypeDesc = getTypeDescFromExpr(indexedExpr.containerExpression);
STNodeList lengthExprs = (STNodeList) indexedExpr.keyExpression;
if (lengthExprs.isEmpty()) {
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),
indexedExpr.closeBracket);
}
STNode lengthExpr = lengthExprs.get(0);
switch (lengthExpr.kind) {
case SIMPLE_NAME_REFERENCE:
STSimpleNameReferenceNode nameRef = (STSimpleNameReferenceNode) lengthExpr;
if (nameRef.name.isMissing()) {
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, STNodeFactory.createEmptyNode(),
indexedExpr.closeBracket);
}
break;
case ASTERISK_LITERAL:
case QUALIFIED_NAME_REFERENCE:
break;
case NUMERIC_LITERAL:
SyntaxKind innerChildKind = lengthExpr.childInBucket(0).kind;
if (innerChildKind == SyntaxKind.DECIMAL_INTEGER_LITERAL_TOKEN ||
innerChildKind == SyntaxKind.HEX_INTEGER_LITERAL_TOKEN) {
break;
}
default:
STNode newOpenBracketWithDiagnostics = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(
indexedExpr.openBracket, lengthExpr, DiagnosticErrorCode.ERROR_INVALID_ARRAY_LENGTH);
indexedExpr = indexedExpr.replace(indexedExpr.openBracket, newOpenBracketWithDiagnostics);
lengthExpr = STNodeFactory.createEmptyNode();
}
return createArrayTypeDesc(memberTypeDesc, indexedExpr.openBracket, lengthExpr, indexedExpr.closeBracket);
}
/**
* <p>
* Parse call statement, given the call expression.
* </p>
* <code>
* call-stmt := call-expr ;
* <br/>
* call-expr := function-call-expr | method-call-expr | checking-keyword call-expr
* </code>
*
* @param expression Call expression associated with the call statement
* @return Call statement node
*/
private STNode parseCallStatement(STNode expression) {
return parseCallStatementOrCheckStatement(expression);
}
/**
* <p>
* Parse checking statement.
* </p>
* <code>
* checking-stmt := checking-expr ;
* <br/>
* checking-expr := checking-keyword expr ;
* </code>
*
* @param expression Checking expression associated with the checking statement
* @return Checking statement node
*/
private STNode parseCheckStatement(STNode expression) {
return parseCallStatementOrCheckStatement(expression);
}
private STNode parseCallStatementOrCheckStatement(STNode expression) {
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.CALL_STATEMENT, expression, semicolon);
}
private STNode parseActionStatement(STNode action) {
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createExpressionStatementNode(SyntaxKind.ACTION_STATEMENT, action, semicolon);
}
/**
* Parse remote method call action, given the starting expression.
* <p>
* <code>
* remote-method-call-action := expression -> method-name ( arg-list )
* <br/>
* async-send-action := expression -> peer-worker ;
* </code>
*
* @param isRhsExpr Is this an RHS action
* @param expression LHS expression
* @return
*/
private STNode parseRemoteMethodCallOrAsyncSendAction(STNode expression, boolean isRhsExpr) {
STNode rightArrow = parseRightArrow();
return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);
}
private STNode parseRemoteCallOrAsyncSendActionRhs(STNode expression, boolean isRhsExpr, STNode rightArrow) {
STNode name;
STToken nextToken = peek();
switch (nextToken.kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
name = STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
return parseAsyncSendAction(expression, rightArrow, name);
case IDENTIFIER_TOKEN:
name = STNodeFactory.createSimpleNameReferenceNode(parseFunctionName());
break;
case CONTINUE_KEYWORD:
case COMMIT_KEYWORD:
name = getKeywordAsSimpleNameRef();
break;
default:
STToken token = peek();
recover(token, ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_RHS);
return parseRemoteCallOrAsyncSendActionRhs(expression, isRhsExpr, rightArrow);
}
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
private STNode parseRemoteCallOrAsyncSendEnd(STNode expression, STNode rightArrow, STNode name) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseRemoteMethodCallAction(expression, rightArrow, name);
case SEMICOLON_TOKEN:
return parseAsyncSendAction(expression, rightArrow, name);
default:
recover(peek(), ParserRuleContext.REMOTE_CALL_OR_ASYNC_SEND_END);
return parseRemoteCallOrAsyncSendEnd(expression, rightArrow, name);
}
}
private STNode parseAsyncSendAction(STNode expression, STNode rightArrow, STNode peerWorker) {
return STNodeFactory.createAsyncSendActionNode(expression, rightArrow, peerWorker);
}
private STNode parseRemoteMethodCallAction(STNode expression, STNode rightArrow, STNode name) {
STNode openParenToken = parseArgListOpenParenthesis();
STNode arguments = parseArgsList();
STNode closeParenToken = parseArgListCloseParenthesis();
return STNodeFactory.createRemoteMethodCallActionNode(expression, rightArrow, name, openParenToken, arguments,
closeParenToken);
}
/**
* Parse right arrow (<code>-></code>) token.
*
* @return Parsed node
*/
private STNode parseRightArrow() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.RIGHT_ARROW_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.RIGHT_ARROW);
return parseRightArrow();
}
}
/**
* Parse map type descriptor.
* map-type-descriptor := `map` type-parameter
*
* @return Parsed node
*/
private STNode parseMapTypeDescriptor(STNode mapKeyword) {
STNode typeParameter = parseTypeParameter();
return STNodeFactory.createMapTypeDescriptorNode(mapKeyword, typeParameter);
}
/**
* Parse parameterized type descriptor.
* parameterized-type-descriptor := `typedesc` [type-parameter]
* <br/> | `future` [type-parameter]
* <br/> | `xml` [type-parameter]
* <br/> | `error` [type-parameter]
*
* @return Parsed node
*/
private STNode parseParameterizedTypeDescriptor(STNode keywordToken) {
STNode typeParamNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
typeParamNode = parseTypeParameter();
} else {
typeParamNode = STNodeFactory.createEmptyNode();
}
SyntaxKind parameterizedTypeDescKind = getParameterizedTypeDescKind(keywordToken);
return STNodeFactory.createParameterizedTypeDescriptorNode(parameterizedTypeDescKind, keywordToken,
typeParamNode);
}
private SyntaxKind getParameterizedTypeDescKind(STNode keywordToken) {
switch (keywordToken.kind) {
case TYPEDESC_KEYWORD:
return SyntaxKind.TYPEDESC_TYPE_DESC;
case FUTURE_KEYWORD:
return SyntaxKind.FUTURE_TYPE_DESC;
case XML_KEYWORD:
return SyntaxKind.XML_TYPE_DESC;
case ERROR_KEYWORD:
default:
return SyntaxKind.ERROR_TYPE_DESC;
}
}
/**
* Parse <code> < </code> token.
*
* @return Parsed node
*/
private STNode parseGTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.GT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.GT);
return parseGTToken();
}
}
/**
* Parse <code> > </code> token.
*
* @return Parsed node
*/
private STNode parseLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.LT);
return parseLTToken();
}
}
/**
* Parse nil literal. Here nil literal is only referred to ( ).
*
* @return Parsed node
*/
private STNode parseNilLiteral() {
startContext(ParserRuleContext.NIL_LITERAL);
STNode openParenthesisToken = parseOpenParenthesis();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createNilLiteralNode(openParenthesisToken, closeParenthesisToken);
}
/**
* Parse annotation declaration, given the qualifier.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the listener declaration
* @param constKeyword Const keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclaration(STNode metadata, STNode qualifier, STNode constKeyword) {
startContext(ParserRuleContext.ANNOTATION_DECL);
STNode annotationKeyword = parseAnnotationKeyword();
STNode annotDecl = parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
endContext();
return annotDecl;
}
/**
* Parse annotation keyword.
*
* @return Parsed node
*/
private STNode parseAnnotationKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOTATION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOTATION_KEYWORD);
return parseAnnotationKeyword();
}
}
/**
* Parse the components that follows after the annotation keyword of a annotation declaration.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the constant decl
* @param constKeyword Const keyword
* @param annotationKeyword
* @return Parsed node
*/
private STNode parseAnnotationDeclFromType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return parseAnnotationDeclWithOptionalType(metadata, qualifier, constKeyword, annotationKeyword);
default:
if (isTypeStartingToken(nextToken.kind)) {
break;
}
recover(peek(), ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);
return parseAnnotationDeclFromType(metadata, qualifier, constKeyword, annotationKeyword);
}
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
/**
* Parse annotation tag.
* <p>
* <code>annot-tag := identifier</code>
*
* @return
*/
private STNode parseAnnotationTag() {
STToken token = peek();
if (token.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.ANNOTATION_TAG);
return parseAnnotationTag();
}
}
private STNode parseAnnotationDeclWithOptionalType(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword) {
STNode typeDescOrAnnotTag = parseQualifiedIdentifier(ParserRuleContext.ANNOT_DECL_OPTIONAL_TYPE);
if (typeDescOrAnnotTag.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword,
typeDescOrAnnotTag, annotTag);
}
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || isValidTypeContinuationToken(nextToken)) {
STNode typeDesc = parseComplexTypeDescriptor(typeDescOrAnnotTag,
ParserRuleContext.TYPE_DESC_IN_ANNOTATION_DECL, false);
STNode annotTag = parseAnnotationTag();
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode annotTag = ((STSimpleNameReferenceNode) typeDescOrAnnotTag).name;
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, annotTag);
}
/**
* Parse the component that follows the first identifier in an annotation decl. The identifier
* can be either the type-name (a user defined type) or the annot-tag, where the type-name
* is not present.
*
* @param metadata Metadata
* @param qualifier Qualifier that precedes the annotation decl
* @param constKeyword Const keyword
* @param annotationKeyword Annotation keyword
* @param typeDescOrAnnotTag Identifier that follows the annotation-keyword
* @return Parsed node
*/
private STNode parseAnnotationDeclRhs(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDescOrAnnotTag) {
STToken nextToken = peek();
STNode typeDesc;
STNode annotTag;
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
typeDesc = typeDescOrAnnotTag;
annotTag = parseAnnotationTag();
break;
case SEMICOLON_TOKEN:
case ON_KEYWORD:
typeDesc = STNodeFactory.createEmptyNode();
annotTag = typeDescOrAnnotTag;
break;
default:
recover(peek(), ParserRuleContext.ANNOT_DECL_RHS);
return parseAnnotationDeclRhs(metadata, qualifier, constKeyword, annotationKeyword, typeDescOrAnnotTag);
}
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
private STNode parseAnnotationDeclAttachPoints(STNode metadata, STNode qualifier, STNode constKeyword,
STNode annotationKeyword, STNode typeDesc, STNode annotTag) {
STNode onKeyword;
STNode attachPoints;
STToken nextToken = peek();
switch (nextToken.kind) {
case SEMICOLON_TOKEN:
onKeyword = STNodeFactory.createEmptyNode();
attachPoints = STNodeFactory.createEmptyNodeList();
break;
case ON_KEYWORD:
onKeyword = parseOnKeyword();
attachPoints = parseAnnotationAttachPoints();
onKeyword = cloneWithDiagnosticIfListEmpty(attachPoints, onKeyword,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
break;
default:
recover(peek(), ParserRuleContext.ANNOT_OPTIONAL_ATTACH_POINTS);
return parseAnnotationDeclAttachPoints(metadata, qualifier, constKeyword, annotationKeyword, typeDesc,
annotTag);
}
STNode semicolonToken = parseSemicolon();
return STNodeFactory.createAnnotationDeclarationNode(metadata, qualifier, constKeyword, annotationKeyword,
typeDesc, annotTag, onKeyword, attachPoints, semicolonToken);
}
/**
* Parse annotation attach points.
* <p>
* <code>
* annot-attach-points := annot-attach-point (, annot-attach-point)*
* <br/><br/>
* annot-attach-point := dual-attach-point | source-only-attach-point
* <br/><br/>
* dual-attach-point := [source] dual-attach-point-ident
* <br/><br/>
* dual-attach-point-ident :=
* type
* | class
* | [object|service remote] function
* | parameter
* | return
* | service
* | [object|record] field
* <br/><br/>
* source-only-attach-point := source source-only-attach-point-ident
* <br/><br/>
* source-only-attach-point-ident :=
* annotation
* | external
* | var
* | const
* | listener
* | worker
* </code>
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoints() {
startContext(ParserRuleContext.ANNOT_ATTACH_POINTS_LIST);
List<STNode> attachPoints = new ArrayList<>();
STToken nextToken = peek();
if (isEndAnnotAttachPointList(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode attachPoint = parseAnnotationAttachPoint();
attachPoints.add(attachPoint);
nextToken = peek();
STNode leadingComma;
while (!isEndAnnotAttachPointList(nextToken.kind)) {
leadingComma = parseAttachPointEnd();
if (leadingComma == null) {
break;
}
attachPoints.add(leadingComma);
attachPoint = parseAnnotationAttachPoint();
if (attachPoint == null) {
STToken missingAttachPointIdent = SyntaxErrors.createMissingToken(SyntaxKind.TYPE_KEYWORD);
STNode identList = STNodeFactory.createNodeList(missingAttachPointIdent);
attachPoint = STNodeFactory.createAnnotationAttachPointNode(STNodeFactory.createEmptyNode(), identList);
attachPoint = SyntaxErrors.addDiagnostic(attachPoint,
DiagnosticErrorCode.ERROR_MISSING_ANNOTATION_ATTACH_POINT);
attachPoints.add(attachPoint);
break;
}
attachPoints.add(attachPoint);
nextToken = peek();
}
if (attachPoint.lastToken().isMissing() && this.tokenReader.peek().kind == SyntaxKind.IDENTIFIER_TOKEN &&
!this.tokenReader.head().hasTrailingNewline()) {
STToken nextNonVirtualToken = this.tokenReader.read();
updateLastNodeInListWithInvalidNode(attachPoints, nextNonVirtualToken,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextNonVirtualToken.text());
}
endContext();
return STNodeFactory.createNodeList(attachPoints);
}
/**
* Parse annotation attach point end.
*
* @return Parsed node
*/
private STNode parseAttachPointEnd() {
switch (peek().kind) {
case SEMICOLON_TOKEN:
return null;
case COMMA_TOKEN:
return consume();
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_END);
return parseAttachPointEnd();
}
}
private boolean isEndAnnotAttachPointList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse annotation attach point.
*
* @return Parsed node
*/
private STNode parseAnnotationAttachPoint() {
switch (peek().kind) {
case EOF_TOKEN:
return null;
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
case SOURCE_KEYWORD:
STNode sourceKeyword = parseSourceKeyword();
return parseAttachPointIdent(sourceKeyword);
case OBJECT_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case RECORD_KEYWORD:
case CLASS_KEYWORD:
sourceKeyword = STNodeFactory.createEmptyNode();
STNode firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT);
return parseAnnotationAttachPoint();
}
}
/**
* Parse source keyword.
*
* @return Parsed node
*/
private STNode parseSourceKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SOURCE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SOURCE_KEYWORD);
return parseSourceKeyword();
}
}
/**
* Parse attach point ident gievn.
* <p>
* <code>
* source-only-attach-point-ident := annotation | external | var | const | listener | worker
* <br/><br/>
* dual-attach-point-ident := type | class | [object|service remote] function | parameter
* | return | service | [object|record] field
* </code>
*
* @param sourceKeyword Source keyword
* @return Parsed node
*/
private STNode parseAttachPointIdent(STNode sourceKeyword) {
switch (peek().kind) {
case ANNOTATION_KEYWORD:
case EXTERNAL_KEYWORD:
case VAR_KEYWORD:
case CONST_KEYWORD:
case LISTENER_KEYWORD:
case WORKER_KEYWORD:
STNode firstIdent = consume();
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case OBJECT_KEYWORD:
case RESOURCE_KEYWORD:
case RECORD_KEYWORD:
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case SERVICE_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
firstIdent = consume();
return parseDualAttachPointIdent(sourceKeyword, firstIdent);
default:
recover(peek(), ParserRuleContext.ATTACH_POINT_IDENT);
return parseAttachPointIdent(sourceKeyword);
}
}
/**
* Parse dual-attach-point ident.
*
* @param sourceKeyword Source keyword
* @param firstIdent first part of the dual attach-point
* @return Parsed node
*/
private STNode parseDualAttachPointIdent(STNode sourceKeyword, STNode firstIdent) {
STNode secondIdent;
switch (firstIdent.kind) {
case OBJECT_KEYWORD:
secondIdent = parseIdentAfterObjectIdent();
break;
case RESOURCE_KEYWORD:
secondIdent = parseFunctionIdent();
break;
case RECORD_KEYWORD:
secondIdent = parseFieldIdent();
break;
case SERVICE_KEYWORD:
return parseServiceAttachPoint(sourceKeyword, firstIdent);
case TYPE_KEYWORD:
case FUNCTION_KEYWORD:
case PARAMETER_KEYWORD:
case RETURN_KEYWORD:
case FIELD_KEYWORD:
case CLASS_KEYWORD:
default:
STNode identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
STNode identList = STNodeFactory.createNodeList(firstIdent, secondIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
}
/**
* Parse remote ident.
*
* @return Parsed node
*/
private STNode parseRemoteIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.REMOTE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.REMOTE_IDENT);
return parseRemoteIdent();
}
}
/**
* Parse service attach point.
* <code>service-attach-point := service | service remote function</code>
*
* @return Parsed node
*/
private STNode parseServiceAttachPoint(STNode sourceKeyword, STNode firstIdent) {
STNode identList;
STToken token = peek();
switch (token.kind) {
case REMOTE_KEYWORD:
STNode secondIdent = parseRemoteIdent();
STNode thirdIdent = parseFunctionIdent();
identList = STNodeFactory.createNodeList(firstIdent, secondIdent, thirdIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
case COMMA_TOKEN:
case SEMICOLON_TOKEN:
identList = STNodeFactory.createNodeList(firstIdent);
return STNodeFactory.createAnnotationAttachPointNode(sourceKeyword, identList);
default:
recover(token, ParserRuleContext.SERVICE_IDENT_RHS);
return parseServiceAttachPoint(sourceKeyword, firstIdent);
}
}
/**
* Parse the idents that are supported after object-ident.
*
* @return Parsed node
*/
private STNode parseIdentAfterObjectIdent() {
STToken token = peek();
switch (token.kind) {
case FUNCTION_KEYWORD:
case FIELD_KEYWORD:
return consume();
default:
recover(token, ParserRuleContext.IDENT_AFTER_OBJECT_IDENT);
return parseIdentAfterObjectIdent();
}
}
/**
* Parse function ident.
*
* @return Parsed node
*/
private STNode parseFunctionIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FUNCTION_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FUNCTION_IDENT);
return parseFunctionIdent();
}
}
/**
* Parse field ident.
*
* @return Parsed node
*/
private STNode parseFieldIdent() {
STToken token = peek();
if (token.kind == SyntaxKind.FIELD_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FIELD_IDENT);
return parseFieldIdent();
}
}
/**
* Parse XML namespace declaration.
* <p>
* <code>xmlns-decl := xmlns xml-namespace-uri [ as xml-namespace-prefix ] ;
* <br/>
* xml-namespace-uri := simple-const-expr
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @return
*/
private STNode parseXMLNamespaceDeclaration(boolean isModuleVar) {
startContext(ParserRuleContext.XML_NAMESPACE_DECLARATION);
STNode xmlnsKeyword = parseXMLNSKeyword();
STNode namespaceUri = parseSimpleConstExpr();
while (!isValidXMLNameSpaceURI(namespaceUri)) {
xmlnsKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(xmlnsKeyword, namespaceUri,
DiagnosticErrorCode.ERROR_INVALID_XML_NAMESPACE_URI);
namespaceUri = parseSimpleConstExpr();
}
STNode xmlnsDecl = parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
endContext();
return xmlnsDecl;
}
/**
* Parse xmlns keyword.
*
* @return Parsed node
*/
private STNode parseXMLNSKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XMLNS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XMLNS_KEYWORD);
return parseXMLNSKeyword();
}
}
private boolean isValidXMLNameSpaceURI(STNode expr) {
switch (expr.kind) {
case STRING_LITERAL:
case QUALIFIED_NAME_REFERENCE:
case SIMPLE_NAME_REFERENCE:
return true;
case IDENTIFIER_TOKEN:
default:
return false;
}
}
private STNode parseSimpleConstExpr() {
startContext(ParserRuleContext.CONSTANT_EXPRESSION);
STNode expr = parseSimpleConstExprInternal();
endContext();
return expr;
}
/**
* Parse simple constants expr.
*
* @return Parsed node
*/
private STNode parseSimpleConstExprInternal() {
STToken nextToken = peek();
switch (nextToken.kind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return parseBasicLiteral();
case PLUS_TOKEN:
case MINUS_TOKEN:
return parseSignedIntOrFloat();
case OPEN_PAREN_TOKEN:
return parseNilLiteral();
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
recover(nextToken, ParserRuleContext.CONSTANT_EXPRESSION_START);
return parseSimpleConstExprInternal();
}
}
/**
* Parse the portion after the namsepsace-uri of an XML declaration.
*
* @param xmlnsKeyword XMLNS keyword
* @param namespaceUri Namespace URI
* @return Parsed node
*/
private STNode parseXMLDeclRhs(STNode xmlnsKeyword, STNode namespaceUri, boolean isModuleVar) {
STNode asKeyword = STNodeFactory.createEmptyNode();
STNode namespacePrefix = STNodeFactory.createEmptyNode();
switch (peek().kind) {
case AS_KEYWORD:
asKeyword = parseAsKeyword();
namespacePrefix = parseNamespacePrefix();
break;
case SEMICOLON_TOKEN:
break;
default:
recover(peek(), ParserRuleContext.XML_NAMESPACE_PREFIX_DECL);
return parseXMLDeclRhs(xmlnsKeyword, namespaceUri, isModuleVar);
}
STNode semicolon = parseSemicolon();
if (isModuleVar) {
return STNodeFactory.createModuleXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword,
namespacePrefix, semicolon);
}
return STNodeFactory.createXMLNamespaceDeclarationNode(xmlnsKeyword, namespaceUri, asKeyword, namespacePrefix,
semicolon);
}
/**
* Parse import prefix.
*
* @return Parsed node
*/
private STNode parseNamespacePrefix() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.NAMESPACE_PREFIX);
return parseNamespacePrefix();
}
}
/**
* Parse named worker declaration.
* <p>
* <code>named-worker-decl := [annots] [transactional] worker worker-name return-type-descriptor { sequence-stmt }
* </code>
*
* @param annots Annotations attached to the worker decl
* @param qualifiers Preceding transactional keyword in a list
* @return Parsed node
*/
private STNode parseNamedWorkerDeclaration(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.NAMED_WORKER_DECL);
STNode transactionalKeyword = getTransactionalKeyword(qualifiers);
STNode workerKeyword = parseWorkerKeyword();
STNode workerName = parseWorkerName();
STNode returnTypeDesc = parseReturnTypeDescriptor();
STNode workerBody = parseBlockNode();
endContext();
return STNodeFactory.createNamedWorkerDeclarationNode(annots, transactionalKeyword, workerKeyword, workerName,
returnTypeDesc, workerBody);
}
private STNode getTransactionalKeyword(List<STNode> qualifierList) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (qualifier.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
addInvalidNodeToNextToken(qualifier, DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED,
((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
STNode transactionalKeyword;
if (validatedList.isEmpty()) {
transactionalKeyword = STNodeFactory.createEmptyNode();
} else {
transactionalKeyword = validatedList.get(0);
}
return transactionalKeyword;
}
private STNode parseReturnTypeDescriptor() {
STToken token = peek();
if (token.kind != SyntaxKind.RETURNS_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
STNode returnsKeyword = consume();
STNode annot = parseOptionalAnnotations();
STNode type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_RETURN_TYPE_DESC);
return STNodeFactory.createReturnTypeDescriptorNode(returnsKeyword, annot, type);
}
/**
* Parse worker keyword.
*
* @return Parsed node
*/
private STNode parseWorkerKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.WORKER_KEYWORD) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_KEYWORD);
return parseWorkerKeyword();
}
}
/**
* Parse worker name.
* <p>
* <code>worker-name := identifier</code>
*
* @return Parsed node
*/
private STNode parseWorkerName() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return consume();
} else {
recover(peek(), ParserRuleContext.WORKER_NAME);
return parseWorkerName();
}
}
/**
* Parse lock statement.
* <code>lock-stmt := lock block-stmt [on-fail-clause]</code>
*
* @return Lock statement
*/
private STNode parseLockStatement() {
startContext(ParserRuleContext.LOCK_STMT);
STNode lockKeyword = parseLockKeyword();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createLockStatementNode(lockKeyword, blockStatement, onFailClause);
}
/**
* Parse lock-keyword.
*
* @return lock-keyword node
*/
private STNode parseLockKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LOCK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LOCK_KEYWORD);
return parseLockKeyword();
}
}
/**
* Parse union type descriptor.
* union-type-descriptor := type-descriptor | type-descriptor
*
* @param leftTypeDesc Type desc in the LHS os the union type desc.
* @param context Current context.
* @return parsed union type desc node
*/
private STNode parseUnionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode pipeToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,
TypePrecedence.UNION);
return mergeTypesWithUnion(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Creates a union type descriptor after validating lhs and rhs types.
* <p>
* <i>Note: Since type precedence and associativity are not taken into account here,
* this method should not be called directly when types are unknown.
* <br/>
* Call {@link
*
* @param leftTypeDesc lhs type
* @param pipeToken pipe token
* @param rightTypeDesc rhs type
* @return a UnionTypeDescriptorNode
*/
private STNode createUnionTypeDesc(STNode leftTypeDesc, STNode pipeToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createUnionTypeDescriptorNode(leftTypeDesc, pipeToken, rightTypeDesc);
}
/**
* Parse pipe token.
*
* @return parsed pipe token node
*/
private STNode parsePipeToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.PIPE);
return parsePipeToken();
}
}
private boolean isTypeStartingToken(SyntaxKind nodeKind) {
return isTypeStartingToken(nodeKind, getNextNextToken());
}
private static boolean isTypeStartingToken(SyntaxKind nextTokenKind, STToken nextNextToken) {
switch (nextTokenKind) {
case IDENTIFIER_TOKEN:
case SERVICE_KEYWORD:
case RECORD_KEYWORD:
case OBJECT_KEYWORD:
case ABSTRACT_KEYWORD:
case CLIENT_KEYWORD:
case OPEN_PAREN_TOKEN:
case MAP_KEYWORD:
case STREAM_KEYWORD:
case TABLE_KEYWORD:
case FUNCTION_KEYWORD:
case OPEN_BRACKET_TOKEN:
case DISTINCT_KEYWORD:
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case TRANSACTION_KEYWORD:
return true;
default:
if (isParameterizedTypeToken(nextTokenKind)) {
return true;
}
if (isSingletonTypeDescStart(nextTokenKind, nextNextToken)) {
return true;
}
return isSimpleType(nextTokenKind);
}
}
/**
* Check if the token kind is a type descriptor in terminal expression.
* <p>
* simple-type-in-expr :=
* boolean | int | byte | float | decimal | string | handle | json | anydata | any | never
*
* @param nodeKind token kind to check
* @return <code>true</code> for simple type token in expression. <code>false</code> otherwise.
*/
private boolean isSimpleTypeInExpression(SyntaxKind nodeKind) {
switch (nodeKind) {
case VAR_KEYWORD:
case READONLY_KEYWORD:
return false;
default:
return isSimpleType(nodeKind);
}
}
static boolean isSimpleType(SyntaxKind nodeKind) {
switch (nodeKind) {
case INT_KEYWORD:
case FLOAT_KEYWORD:
case DECIMAL_KEYWORD:
case BOOLEAN_KEYWORD:
case STRING_KEYWORD:
case BYTE_KEYWORD:
case JSON_KEYWORD:
case HANDLE_KEYWORD:
case ANY_KEYWORD:
case ANYDATA_KEYWORD:
case NEVER_KEYWORD:
case VAR_KEYWORD:
case READONLY_KEYWORD:
return true;
default:
return false;
}
}
static boolean isPredeclaredPrefix(SyntaxKind nodeKind) {
switch (nodeKind) {
case BOOLEAN_KEYWORD:
case DECIMAL_KEYWORD:
case ERROR_KEYWORD:
case FLOAT_KEYWORD:
case FUTURE_KEYWORD:
case INT_KEYWORD:
case MAP_KEYWORD:
case OBJECT_KEYWORD:
case STREAM_KEYWORD:
case STRING_KEYWORD:
case TABLE_KEYWORD:
case TRANSACTION_KEYWORD:
case TYPEDESC_KEYWORD:
case XML_KEYWORD:
return true;
default:
return false;
}
}
private boolean isQualifiedIdentifierPredeclaredPrefix(SyntaxKind nodeKind) {
return isPredeclaredPrefix(nodeKind) && getNextNextToken().kind == SyntaxKind.COLON_TOKEN;
}
private static SyntaxKind getBuiltinTypeSyntaxKind(SyntaxKind typeKeyword) {
switch (typeKeyword) {
case INT_KEYWORD:
return SyntaxKind.INT_TYPE_DESC;
case FLOAT_KEYWORD:
return SyntaxKind.FLOAT_TYPE_DESC;
case DECIMAL_KEYWORD:
return SyntaxKind.DECIMAL_TYPE_DESC;
case BOOLEAN_KEYWORD:
return SyntaxKind.BOOLEAN_TYPE_DESC;
case STRING_KEYWORD:
return SyntaxKind.STRING_TYPE_DESC;
case BYTE_KEYWORD:
return SyntaxKind.BYTE_TYPE_DESC;
case JSON_KEYWORD:
return SyntaxKind.JSON_TYPE_DESC;
case HANDLE_KEYWORD:
return SyntaxKind.HANDLE_TYPE_DESC;
case ANY_KEYWORD:
return SyntaxKind.ANY_TYPE_DESC;
case ANYDATA_KEYWORD:
return SyntaxKind.ANYDATA_TYPE_DESC;
case NEVER_KEYWORD:
return SyntaxKind.NEVER_TYPE_DESC;
case VAR_KEYWORD:
return SyntaxKind.VAR_TYPE_DESC;
case READONLY_KEYWORD:
return SyntaxKind.READONLY_TYPE_DESC;
default:
assert false : typeKeyword + " is not a built-in type";
return SyntaxKind.TYPE_REFERENCE;
}
}
/**
* Parse fork-keyword.
*
* @return Fork-keyword node
*/
private STNode parseForkKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FORK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FORK_KEYWORD);
return parseForkKeyword();
}
}
/**
* Parse fork statement.
* <code>fork-stmt := fork { named-worker-decl+ }</code>
*
* @return Fork statement
*/
private STNode parseForkStatement() {
startContext(ParserRuleContext.FORK_STMT);
STNode forkKeyword = parseForkKeyword();
STNode openBrace = parseOpenBrace();
ArrayList<STNode> workers = new ArrayList<>();
while (!isEndOfStatements()) {
STNode stmt = parseStatement();
if (stmt == null) {
break;
}
if (validateStatement(stmt)) {
continue;
}
switch (stmt.kind) {
case NAMED_WORKER_DECLARATION:
workers.add(stmt);
break;
default:
if (workers.isEmpty()) {
openBrace = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBrace, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
} else {
updateLastNodeInListWithInvalidNode(workers, stmt,
DiagnosticErrorCode.ERROR_ONLY_NAMED_WORKERS_ALLOWED_HERE);
}
}
}
STNode namedWorkerDeclarations = STNodeFactory.createNodeList(workers);
STNode closeBrace = parseCloseBrace();
endContext();
STNode forkStmt =
STNodeFactory.createForkStatementNode(forkKeyword, openBrace, namedWorkerDeclarations, closeBrace);
if (isNodeListEmpty(namedWorkerDeclarations)) {
return SyntaxErrors.addDiagnostic(forkStmt,
DiagnosticErrorCode.ERROR_MISSING_NAMED_WORKER_DECLARATION_IN_FORK_STMT);
}
return forkStmt;
}
/**
* Parse trap expression.
* <p>
* <code>
* trap-expr := trap expression
* </code>
*
* @param allowActions Allow actions
* @param isRhsExpr Whether this is a RHS expression or not
* @return Trap expression node
*/
private STNode parseTrapExpression(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
STNode trapKeyword = parseTrapKeyword();
STNode expr = parseExpression(OperatorPrecedence.TRAP, isRhsExpr, allowActions, isInConditionalExpr);
if (isAction(expr)) {
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_ACTION, trapKeyword, expr);
}
return STNodeFactory.createTrapExpressionNode(SyntaxKind.TRAP_EXPRESSION, trapKeyword, expr);
}
/**
* Parse trap-keyword.
*
* @return Trap-keyword node
*/
private STNode parseTrapKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRAP_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRAP_KEYWORD);
return parseTrapKeyword();
}
}
/**
* Parse list constructor expression.
* <p>
* <code>
* list-constructor-expr := [ [ list-members ] ]
* <br/>
* list-members := list-member (, list-member)*
* <br/>
* list-member := expression | spread-member
* <br/>
* spread-member := ... expression
* </code>
*
* @return Parsed node
*/
private STNode parseListConstructorExpr() {
startContext(ParserRuleContext.LIST_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode listMembers = parseListMembers();
STNode closeBracket = parseCloseBracket();
endContext();
return STNodeFactory.createListConstructorExpressionNode(openBracket, listMembers, closeBracket);
}
/**
* Parse optional list member list.
*
* @return Parsed node
*/
private STNode parseListMembers() {
List<STNode> listMembers = new ArrayList<>();
if (isEndOfListConstructor(peek().kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode listMember = parseListMember();
listMembers.add(listMember);
return parseListMembers(listMembers);
}
private STNode parseListMembers(List<STNode> listMembers) {
STNode listConstructorMemberEnd;
while (!isEndOfListConstructor(peek().kind)) {
listConstructorMemberEnd = parseListConstructorMemberEnd();
if (listConstructorMemberEnd == null) {
break;
}
listMembers.add(listConstructorMemberEnd);
STNode listMember = parseListMember();
listMembers.add(listMember);
}
return STNodeFactory.createNodeList(listMembers);
}
/**
* Parse list member.
* <p>
* <code>
* list-member := expression | spread-member
* </code>
*
* @return Parsed node
*/
private STNode parseListMember() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {
return parseSpreadMember();
} else {
return parseExpression();
}
}
/**
* Parse spread member.
* <p>
* <code>
* spread-member := ... expression
* </code>
*
* @return Parsed node
*/
private STNode parseSpreadMember() {
STNode ellipsis = parseEllipsis();
STNode expr = parseExpression();
return STNodeFactory.createSpreadMemberNode(ellipsis, expr);
}
private boolean isEndOfListConstructor(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListConstructorMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.LIST_CONSTRUCTOR_MEMBER_END);
return parseListConstructorMemberEnd();
}
}
/**
* Parse foreach statement.
* <code>foreach-stmt := foreach typed-binding-pattern in action-or-expr block-stmt [on-fail-clause]</code>
*
* @return foreach statement
*/
private STNode parseForEachStatement() {
startContext(ParserRuleContext.FOREACH_STMT);
STNode forEachKeyword = parseForEachKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FOREACH_STMT);
STNode inKeyword = parseInKeyword();
STNode actionOrExpr = parseActionOrExpression();
STNode blockStatement = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createForEachStatementNode(forEachKeyword, typedBindingPattern, inKeyword, actionOrExpr,
blockStatement, onFailClause);
}
/**
* Parse foreach-keyword.
*
* @return ForEach-keyword node
*/
private STNode parseForEachKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FOREACH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FOREACH_KEYWORD);
return parseForEachKeyword();
}
}
/**
* Parse in-keyword.
*
* @return In-keyword node
*/
private STNode parseInKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.IN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.IN_KEYWORD);
return parseInKeyword();
}
}
/**
* Parse type cast expression.
* <p>
* <code>
* type-cast-expr := < type-cast-param > expression
* <br/>
* type-cast-param := [annots] type-descriptor | annots
* </code>
*
* @return Parsed node
*/
private STNode parseTypeCastExpr(boolean isRhsExpr, boolean allowActions, boolean isInConditionalExpr) {
startContext(ParserRuleContext.TYPE_CAST);
STNode ltToken = parseLTToken();
return parseTypeCastExpr(ltToken, isRhsExpr, allowActions, isInConditionalExpr);
}
private STNode parseTypeCastExpr(STNode ltToken, boolean isRhsExpr, boolean allowActions,
boolean isInConditionalExpr) {
STNode typeCastParam = parseTypeCastParam();
STNode gtToken = parseGTToken();
endContext();
STNode expression =
parseExpression(OperatorPrecedence.EXPRESSION_ACTION, isRhsExpr, allowActions, isInConditionalExpr);
return STNodeFactory.createTypeCastExpressionNode(ltToken, typeCastParam, gtToken, expression);
}
private STNode parseTypeCastParam() {
STNode annot;
STNode type;
STToken token = peek();
switch (token.kind) {
case AT_TOKEN:
annot = parseOptionalAnnotations();
token = peek();
if (isTypeStartingToken(token.kind)) {
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
} else {
type = STNodeFactory.createEmptyNode();
}
break;
default:
annot = STNodeFactory.createEmptyNode();
type = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
break;
}
return STNodeFactory.createTypeCastParamNode(getAnnotations(annot), type);
}
/**
* Parse table constructor expression.
* <p>
* <code>
* table-constructor-expr-rhs := [ [row-list] ]
* </code>
*
* @param tableKeyword tableKeyword that precedes this rhs
* @param keySpecifier keySpecifier that precedes this rhs
* @return Parsed node
*/
private STNode parseTableConstructorExprRhs(STNode tableKeyword, STNode keySpecifier) {
switchContext(ParserRuleContext.TABLE_CONSTRUCTOR);
STNode openBracket = parseOpenBracket();
STNode rowList = parseRowList();
STNode closeBracket = parseCloseBracket();
return STNodeFactory.createTableConstructorExpressionNode(tableKeyword, keySpecifier, openBracket, rowList,
closeBracket);
}
/**
* Parse table-keyword.
*
* @return Table-keyword node
*/
private STNode parseTableKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TABLE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TABLE_KEYWORD);
return parseTableKeyword();
}
}
/**
* Parse table rows.
* <p>
* <code>row-list := [ mapping-constructor-expr (, mapping-constructor-expr)* ]</code>
*
* @return Parsed node
*/
private STNode parseRowList() {
STToken nextToken = peek();
if (isEndOfTableRowList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> mappings = new ArrayList<>();
STNode mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
STNode rowEnd;
while (!isEndOfTableRowList(nextToken.kind)) {
rowEnd = parseTableRowEnd();
if (rowEnd == null) {
break;
}
mappings.add(rowEnd);
mapExpr = parseMappingConstructorExpr();
mappings.add(mapExpr);
nextToken = peek();
}
return STNodeFactory.createNodeList(mappings);
}
private boolean isEndOfTableRowList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
case COMMA_TOKEN:
case OPEN_BRACE_TOKEN:
return false;
default:
return isEndOfMappingConstructor(tokenKind);
}
}
private STNode parseTableRowEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.TABLE_ROW_END);
return parseTableRowEnd();
}
}
/**
* Parse key specifier.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier() {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode keyKeyword = parseKeyKeyword();
STNode openParen = parseOpenParenthesis();
STNode fieldNames = parseFieldNames();
STNode closeParen = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeyword, openParen, fieldNames, closeParen);
}
/**
* Parse key-keyword.
*
* @return Key-keyword node
*/
private STNode parseKeyKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.KEY_KEYWORD) {
return consume();
}
if (isKeyKeyword(token)) {
return getKeyKeyword(consume());
}
recover(token, ParserRuleContext.KEY_KEYWORD);
return parseKeyKeyword();
}
static boolean isKeyKeyword(STToken token) {
return token.kind == SyntaxKind.IDENTIFIER_TOKEN && LexerTerminals.KEY.equals(token.text());
}
private STNode getKeyKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.KEY_KEYWORD, token.leadingMinutiae(), token.trailingMinutiae(),
token.diagnostics());
}
private STToken getUnderscoreKeyword(STToken token) {
return STNodeFactory.createToken(SyntaxKind.UNDERSCORE_KEYWORD, token.leadingMinutiae(),
token.trailingMinutiae(), token.diagnostics());
}
/**
* Parse field names.
* <p>
* <code>field-name-list := [ field-name (, field-name)* ]</code>
*
* @return Parsed node
*/
private STNode parseFieldNames() {
STToken nextToken = peek();
if (isEndOfFieldNamesList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> fieldNames = new ArrayList<>();
STNode fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
STNode leadingComma;
while (!isEndOfFieldNamesList(nextToken.kind)) {
leadingComma = parseComma();
fieldNames.add(leadingComma);
fieldName = parseVariableName();
fieldNames.add(fieldName);
nextToken = peek();
}
return STNodeFactory.createNodeList(fieldNames);
}
private boolean isEndOfFieldNamesList(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case IDENTIFIER_TOKEN:
return false;
default:
return true;
}
}
/**
* Parse error-keyword.
*
* @return Parsed error-keyword node
*/
private STNode parseErrorKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ERROR_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ERROR_KEYWORD);
return parseErrorKeyword();
}
}
/**
* Parse stream type descriptor.
* <p>
* stream-type-descriptor := stream [stream-type-parameters]
* <br/>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type descriptor node
*/
private STNode parseStreamTypeDescriptor(STNode streamKeywordToken) {
STNode streamTypeParamsNode;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.LT_TOKEN) {
streamTypeParamsNode = parseStreamTypeParamsNode();
} else {
streamTypeParamsNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createStreamTypeDescriptorNode(streamKeywordToken, streamTypeParamsNode);
}
/**
* Parse stream type params node.
* <p>
* stream-type-parameters := < type-descriptor [, type-descriptor]>
* </p>
*
* @return Parsed stream type params node
*/
private STNode parseStreamTypeParamsNode() {
STNode ltToken = parseLTToken();
startContext(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode leftTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
STNode streamTypedesc = parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
endContext();
return streamTypedesc;
}
private STNode parseStreamTypeParamsNode(STNode ltToken, STNode leftTypeDescNode) {
STNode commaToken, rightTypeDescNode, gtToken;
switch (peek().kind) {
case COMMA_TOKEN:
commaToken = parseComma();
rightTypeDescNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_STREAM_TYPE_DESC);
break;
case GT_TOKEN:
commaToken = STNodeFactory.createEmptyNode();
rightTypeDescNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.STREAM_TYPE_FIRST_PARAM_RHS);
return parseStreamTypeParamsNode(ltToken, leftTypeDescNode);
}
gtToken = parseGTToken();
return STNodeFactory.createStreamTypeParamsNode(ltToken, leftTypeDescNode, commaToken, rightTypeDescNode,
gtToken);
}
/**
* Parse stream-keyword.
*
* @return Parsed stream-keyword node
*/
private STNode parseStreamKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STREAM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.STREAM_KEYWORD);
return parseStreamKeyword();
}
}
/**
* Parse let expression.
* <p>
* <code>
* let-expr := let let-var-decl [, let-var-decl]* in expression
* </code>
*
* @return Parsed node
*/
private STNode parseLetExpression(boolean isRhsExpr, boolean isInConditionalExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_EXPR_LET_VAR_DECL, isRhsExpr);
STNode inKeyword = parseInKeyword();
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false, isInConditionalExpr);
return STNodeFactory.createLetExpressionNode(letKeyword, letVarDeclarations, inKeyword, expression);
}
/**
* Parse let-keyword.
*
* @return Let-keyword node
*/
private STNode parseLetKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LET_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LET_KEYWORD);
return parseLetKeyword();
}
}
/**
* Parse let variable declarations.
* <p>
* <code>let-var-decl-list := let-var-decl [, let-var-decl]*</code>
*
* @return Parsed node
*/
private STNode parseLetVarDeclarations(ParserRuleContext context, boolean isRhsExpr) {
startContext(context);
List<STNode> varDecls = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfLetVarDeclarations(nextToken.kind, getNextNextToken())) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode varDec = parseLetVarDecl(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
STNode leadingComma;
while (!isEndOfLetVarDeclarations(nextToken.kind, getNextNextToken())) {
leadingComma = parseComma();
varDecls.add(leadingComma);
varDec = parseLetVarDecl(isRhsExpr);
varDecls.add(varDec);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(varDecls);
}
static boolean isEndOfLetVarDeclarations(SyntaxKind tokenKind, STToken nextNextToken) {
switch (tokenKind) {
case COMMA_TOKEN:
case AT_TOKEN:
return false;
case IN_KEYWORD:
return true;
default:
return !isTypeStartingToken(tokenKind, nextNextToken);
}
}
/**
* Parse let variable declaration.
* <p>
* <code>let-var-decl := [annots] typed-binding-pattern = expression</code>
*
* @return Parsed node
*/
private STNode parseLetVarDecl(boolean isRhsExpr) {
STNode annot = parseOptionalAnnotations();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.LET_EXPR_LET_VAR_DECL);
STNode assign = parseAssignOp();
STNode expression = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, isRhsExpr, false);
return STNodeFactory.createLetVariableDeclarationNode(annot, typedBindingPattern, assign, expression);
}
/**
* Parse raw backtick string template expression.
* <p>
* <code>BacktickString := `expression`</code>
*
* @return Template expression node
*/
private STNode parseTemplateExpression() {
STNode type = STNodeFactory.createEmptyNode();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.RAW_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
private STNode parseTemplateContent() {
List<STNode> items = new ArrayList<>();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
items.add(contentItem);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
private boolean isEndOfBacktickContent(SyntaxKind kind) {
switch (kind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
return false;
}
}
private STNode parseTemplateItem() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return parseInterpolation();
}
return consume();
}
/**
* Parse string template expression.
* <p>
* <code>string-template-expr := string ` expression `</code>
*
* @return String template expression node
*/
private STNode parseStringTemplateExpression() {
STNode type = parseStringKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content = parseTemplateContent();
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.STRING_TEMPLATE_EXPRESSION, type, startingBackTick,
content, endingBackTick);
}
/**
* Parse <code>string</code> keyword.
*
* @return string keyword node
*/
private STNode parseStringKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.STRING_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.STRING_KEYWORD);
return parseStringKeyword();
}
}
/**
* Parse XML template expression.
* <p>
* <code>xml-template-expr := xml BacktickString</code>
*
* @return XML template expression
*/
private STNode parseXMLTemplateExpression() {
STNode xmlKeyword = parseXMLKeyword();
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
STNode content;
STNode endingBackTick;
if (startingBackTick.isMissing()) {
startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
content = STNodeFactory.createEmptyNodeList();
STNode templateExpr = STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION,
xmlKeyword, startingBackTick, content, endingBackTick);
templateExpr = SyntaxErrors.addDiagnostic(templateExpr, DiagnosticErrorCode.ERROR_MISSING_BACKTICK_STRING);
return templateExpr;
}
content = parseTemplateContentAsXML();
endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createTemplateExpressionNode(SyntaxKind.XML_TEMPLATE_EXPRESSION, xmlKeyword,
startingBackTick, content, endingBackTick);
}
/**
* Parse <code>xml</code> keyword.
*
* @return xml keyword node
*/
private STNode parseXMLKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.XML_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.XML_KEYWORD);
return parseXMLKeyword();
}
}
/**
* Parse the content of the template string as XML. This method first read the
* input in the same way as the raw-backtick-template (BacktickString). Then
* it parses the content as XML.
*
* @return XML node
*/
private STNode parseTemplateContentAsXML() {
ArrayDeque<STNode> expressions = new ArrayDeque<>();
StringBuilder xmlStringBuilder = new StringBuilder();
STToken nextToken = peek();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode contentItem = parseTemplateItem();
if (contentItem.kind == SyntaxKind.TEMPLATE_STRING) {
xmlStringBuilder.append(((STToken) contentItem).text());
} else {
xmlStringBuilder.append("${}");
expressions.add(contentItem);
}
nextToken = peek();
}
CharReader charReader = CharReader.from(xmlStringBuilder.toString());
AbstractTokenReader tokenReader = new TokenReader(new XMLLexer(charReader));
XMLParser xmlParser = new XMLParser(tokenReader, expressions);
return xmlParser.parse();
}
/**
* Parse interpolation of a back-tick string.
* <p>
* <code>
* interpolation := ${ expression }
* </code>
*
* @return Interpolation node
*/
private STNode parseInterpolation() {
startContext(ParserRuleContext.INTERPOLATION);
STNode interpolStart = parseInterpolationStart();
STNode expr = parseExpression();
while (!isEndOfInterpolation()) {
STToken nextToken = consume();
expr = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(expr, nextToken,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, nextToken.text());
}
STNode closeBrace = parseCloseBrace();
endContext();
return STNodeFactory.createInterpolationNode(interpolStart, expr, closeBrace);
}
private boolean isEndOfInterpolation() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case EOF_TOKEN:
case BACKTICK_TOKEN:
return true;
default:
ParserMode currentLexerMode = this.tokenReader.getCurrentMode();
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN && currentLexerMode != ParserMode.INTERPOLATION &&
currentLexerMode != ParserMode.INTERPOLATION_BRACED_CONTENT;
}
}
/**
* Parse interpolation start token.
* <p>
* <code>interpolation-start := ${</code>
*
* @return Interpolation start token
*/
private STNode parseInterpolationStart() {
STToken token = peek();
if (token.kind == SyntaxKind.INTERPOLATION_START_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.INTERPOLATION_START_TOKEN);
return parseInterpolationStart();
}
}
/**
* Parse back-tick token.
*
* @return Back-tick token
*/
private STNode parseBacktickToken(ParserRuleContext ctx) {
STToken token = peek();
if (token.kind == SyntaxKind.BACKTICK_TOKEN) {
return consume();
} else {
recover(token, ctx);
return parseBacktickToken(ctx);
}
}
/**
* Parse table type descriptor.
* <p>
* table-type-descriptor := table row-type-parameter [key-constraint]
* row-type-parameter := type-parameter
* key-constraint := key-specifier | key-type-constraint
* key-specifier := key ( [ field-name (, field-name)* ] )
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed table type desc node.
*/
private STNode parseTableTypeDescriptor(STNode tableKeywordToken) {
STNode rowTypeParameterNode = parseRowTypeParameter();
STNode keyConstraintNode;
STToken nextToken = peek();
if (isKeyKeyword(nextToken)) {
STNode keyKeywordToken = getKeyKeyword(consume());
keyConstraintNode = parseKeyConstraint(keyKeywordToken);
} else {
keyConstraintNode = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createTableTypeDescriptorNode(tableKeywordToken, rowTypeParameterNode, keyConstraintNode);
}
/**
* Parse row type parameter node.
* <p>
* row-type-parameter := type-parameter
* </p>
*
* @return Parsed node.
*/
private STNode parseRowTypeParameter() {
startContext(ParserRuleContext.ROW_TYPE_PARAM);
STNode rowTypeParameterNode = parseTypeParameter();
endContext();
return rowTypeParameterNode;
}
/**
* Parse type parameter node.
* <p>
* type-parameter := < type-descriptor >
* </p>
*
* @return Parsed node
*/
private STNode parseTypeParameter() {
STNode ltToken = parseLTToken();
STNode typeNode = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_ANGLE_BRACKETS);
STNode gtToken = parseGTToken();
return STNodeFactory.createTypeParameterNode(ltToken, typeNode, gtToken);
}
/**
* Parse key constraint.
* <p>
* key-constraint := key-specifier | key-type-constraint
* </p>
*
* @return Parsed node.
*/
private STNode parseKeyConstraint(STNode keyKeywordToken) {
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
return parseKeySpecifier(keyKeywordToken);
case LT_TOKEN:
return parseKeyTypeConstraint(keyKeywordToken);
default:
recover(peek(), ParserRuleContext.KEY_CONSTRAINTS_RHS);
return parseKeyConstraint(keyKeywordToken);
}
}
/**
* Parse key specifier given parsed key keyword token.
* <p>
* <code>key-specifier := key ( [ field-name (, field-name)* ] )</code>
*
* @return Parsed node
*/
private STNode parseKeySpecifier(STNode keyKeywordToken) {
startContext(ParserRuleContext.KEY_SPECIFIER);
STNode openParenToken = parseOpenParenthesis();
STNode fieldNamesNode = parseFieldNames();
STNode closeParenToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createKeySpecifierNode(keyKeywordToken, openParenToken, fieldNamesNode, closeParenToken);
}
/**
* Parse key type constraint.
* <p>
* key-type-constraint := key type-parameter
* </p>
*
* @return Parsed node
*/
private STNode parseKeyTypeConstraint(STNode keyKeywordToken) {
STNode typeParameterNode = parseTypeParameter();
return STNodeFactory.createKeyTypeConstraintNode(keyKeywordToken, typeParameterNode);
}
/**
* Parse function type descriptor.
* <p>
* <code>
* function-type-descriptor := function-quals function function-signature
* <br/> | [isolated] function
* <br/>
* function-quals := (transactional | isolated)*
* </code>
*
* @param qualifiers Preceding type descriptor qualifiers
* @return Function type descriptor node
*/
private STNode parseFunctionTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC);
STNode functionKeyword = parseFunctionKeyword();
boolean hasFuncSignature = false;
STNode signature = STNodeFactory.createEmptyNode();
if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN ||
isSyntaxKindInList(qualifiers, SyntaxKind.TRANSACTIONAL_KEYWORD)) {
signature = parseFuncSignature(true);
hasFuncSignature = true;
}
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, hasFuncSignature);
STNode qualifierList = nodes[0];
functionKeyword = nodes[1];
endContext();
return STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword, signature);
}
private STNode getLastNodeInList(List<STNode> nodeList) {
return nodeList.get(nodeList.size() - 1);
}
private STNode[] createFuncTypeQualNodeList(List<STNode> qualifierList, STNode functionKeyword,
boolean hasFuncSignature) {
List<STNode> validatedList = new ArrayList<>();
for (int i = 0; i < qualifierList.size(); i++) {
STNode qualifier = qualifierList.get(i);
int nextIndex = i + 1;
if (isSyntaxKindInList(validatedList, qualifier.kind)) {
updateLastNodeInListWithInvalidNode(validatedList, qualifier,
DiagnosticErrorCode.ERROR_DUPLICATE_QUALIFIER, ((STToken) qualifier).text());
} else if (hasFuncSignature && isRegularFuncQual(qualifier.kind)) {
validatedList.add(qualifier);
} else if (qualifier.kind == SyntaxKind.ISOLATED_KEYWORD) {
validatedList.add(qualifier);
} else if (qualifierList.size() == nextIndex) {
functionKeyword = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(functionKeyword, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
} else {
updateANodeInListWithLeadingInvalidNode(qualifierList, nextIndex, qualifier,
DiagnosticErrorCode.ERROR_QUALIFIER_NOT_ALLOWED, ((STToken) qualifier).text());
}
}
STNode nodeList = STNodeFactory.createNodeList(validatedList);
return new STNode[]{ nodeList, functionKeyword };
}
private boolean isRegularFuncQual(SyntaxKind tokenKind) {
switch (tokenKind) {
case ISOLATED_KEYWORD:
case TRANSACTIONAL_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse explicit anonymous function expression.
* <p>
* <code>explicit-anonymous-function-expr :=
* [annots] (isolated| transactional) function function-signature anon-func-body</code>
*
* @param annots Annotations.
* @param qualifiers Function qualifiers
* @param isRhsExpr Is expression in rhs context
* @return Anonymous function expression node
*/
private STNode parseExplicitFunctionExpression(STNode annots, List<STNode> qualifiers, boolean isRhsExpr) {
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
STNode funcKeyword = parseFunctionKeyword();
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, funcKeyword, true);
STNode qualifierList = nodes[0];
funcKeyword = nodes[1];
STNode funcSignature = parseFuncSignature(false);
STNode funcBody = parseAnonFuncBody(isRhsExpr);
return STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList, funcKeyword,
funcSignature, funcBody);
}
/**
* Parse anonymous function body.
* <p>
* <code>anon-func-body := block-function-body | expr-function-body</code>
*
* @param isRhsExpr Is expression in rhs context
* @return Anon function body node
*/
private STNode parseAnonFuncBody(boolean isRhsExpr) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case EOF_TOKEN:
STNode body = parseFunctionBodyBlock(true);
endContext();
return body;
case RIGHT_DOUBLE_ARROW_TOKEN:
endContext();
return parseExpressionFuncBody(true, isRhsExpr);
default:
recover(peek(), ParserRuleContext.ANON_FUNC_BODY);
return parseAnonFuncBody(isRhsExpr);
}
}
/**
* Parse expression function body.
* <p>
* <code>expr-function-body := => expression</code>
*
* @param isAnon Is anonymous function.
* @param isRhsExpr Is expression in rhs context
* @return Expression function body node
*/
private STNode parseExpressionFuncBody(boolean isAnon, boolean isRhsExpr) {
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode semiColon;
if (isAnon) {
semiColon = STNodeFactory.createEmptyNode();
} else {
semiColon = parseSemicolon();
}
return STNodeFactory.createExpressionFunctionBodyNode(rightDoubleArrow, expression, semiColon);
}
/**
* Parse '=>' token.
*
* @return Double right arrow token
*/
private STNode parseDoubleRightArrow() {
STToken token = peek();
if (token.kind == SyntaxKind.RIGHT_DOUBLE_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.EXPR_FUNC_BODY_START);
return parseDoubleRightArrow();
}
}
private STNode parseImplicitAnonFunc(STNode params, boolean isRhsExpr) {
switch (params.kind) {
case SIMPLE_NAME_REFERENCE:
case INFER_PARAM_LIST:
break;
case BRACED_EXPRESSION:
params = getAnonFuncParam((STBracedExpressionNode) params);
break;
case NIL_LITERAL:
STNilLiteralNode nilLiteralNode = (STNilLiteralNode) params;
params = STNodeFactory.createImplicitAnonymousFunctionParameters(nilLiteralNode.openParenToken,
STNodeFactory.createNodeList(new ArrayList<>()), nilLiteralNode.closeParenToken);
break;
default:
STToken syntheticParam = STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
syntheticParam = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(syntheticParam, params,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
params = STNodeFactory.createSimpleNameReferenceNode(syntheticParam);
}
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createImplicitAnonymousFunctionExpressionNode(params, rightDoubleArrow, expression);
}
/**
* Create a new anon-func-param node from a braced expression.
*
* @param bracedExpression Braced expression
* @return Anon-func param node
*/
private STNode getAnonFuncParam(STBracedExpressionNode bracedExpression) {
List<STNode> paramList = new ArrayList<>();
STNode innerExpression = bracedExpression.expression;
STNode openParen = bracedExpression.openParen;
if (innerExpression.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
paramList.add(innerExpression);
} else {
openParen = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openParen, innerExpression,
DiagnosticErrorCode.ERROR_INVALID_PARAM_LIST_IN_INFER_ANONYMOUS_FUNCTION_EXPR);
}
return STNodeFactory.createImplicitAnonymousFunctionParameters(openParen,
STNodeFactory.createNodeList(paramList), bracedExpression.closeParen);
}
/**
* Parse implicit anon function expression.
*
* @param openParen Open parenthesis token
* @param firstParam First parameter
* @param isRhsExpr Is expression in rhs context
* @return Implicit anon function expression node
*/
private STNode parseImplicitAnonFunc(STNode openParen, STNode firstParam, boolean isRhsExpr) {
List<STNode> paramList = new ArrayList<>();
paramList.add(firstParam);
STToken nextToken = peek();
STNode paramEnd;
STNode param;
while (!isEndOfAnonFuncParametersList(nextToken.kind)) {
paramEnd = parseImplicitAnonFuncParamEnd();
if (paramEnd == null) {
break;
}
paramList.add(paramEnd);
param = parseIdentifier(ParserRuleContext.IMPLICIT_ANON_FUNC_PARAM);
param = STNodeFactory.createSimpleNameReferenceNode(param);
paramList.add(param);
nextToken = peek();
}
STNode params = STNodeFactory.createNodeList(paramList);
STNode closeParen = parseCloseParenthesis();
endContext();
STNode inferedParams = STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(inferedParams, isRhsExpr);
}
private STNode parseImplicitAnonFuncParamEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ANON_FUNC_PARAM_RHS);
return parseImplicitAnonFuncParamEnd();
}
}
private boolean isEndOfAnonFuncParametersList(SyntaxKind tokenKind) {
switch (tokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case SEMICOLON_TOKEN:
case RETURNS_KEYWORD:
case TYPE_KEYWORD:
case LISTENER_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse tuple type descriptor.
* <p>
* <code>tuple-type-descriptor := [ tuple-member-type-descriptors ]
* <br/><br/>
* tuple-member-type-descriptors := member-type-descriptor (, member-type-descriptor)* [, tuple-rest-descriptor]
* | [ tuple-rest-descriptor ]
* <br/><br/>
* member-type-descriptor := [annots] type-descriptor
* tuple-rest-descriptor := type-descriptor ...
* </code>
*
* @return
*/
private STNode parseTupleTypeDesc() {
STNode openBracket = parseOpenBracket();
startContext(ParserRuleContext.TUPLE_MEMBERS);
STNode memberTypeDesc = parseTupleMemberTypeDescList();
STNode closeBracket = parseCloseBracket();
endContext();
openBracket = cloneWithDiagnosticIfListEmpty(memberTypeDesc, openBracket,
DiagnosticErrorCode.ERROR_MISSING_TYPE_DESC);
return STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDesc, closeBracket);
}
/**
* Parse tuple member type descriptors.
*
* @return Parsed node
*/
private STNode parseTupleMemberTypeDescList() {
List<STNode> typeDescList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfTypeList(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode typeDesc = parseTupleMember();
return parseTupleTypeMembers(typeDesc, typeDescList);
}
private STNode parseTupleTypeMembers(STNode firstMember, List<STNode> memberList) {
STNode tupleMemberRhs;
while (!isEndOfTypeList(peek().kind)) {
if (firstMember.kind == SyntaxKind.REST_TYPE) {
firstMember = invalidateTypeDescAfterRestDesc(firstMember);
break;
}
tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
memberList.add(firstMember);
memberList.add(tupleMemberRhs);
firstMember = parseTupleMember();
}
memberList.add(firstMember);
return STNodeFactory.createNodeList(memberList);
}
private STNode parseTupleMember() {
STNode annot = parseOptionalAnnotations();
STNode typeDesc = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
return createMemberOrRestNode(annot, typeDesc);
}
private STNode createMemberOrRestNode(STNode annot, STNode typeDesc) {
STNode tupleMemberRhs = parseTypeDescInTupleRhs();
if (tupleMemberRhs != null) {
if (!((STNodeList) annot).isEmpty()) {
typeDesc = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(typeDesc, annot,
DiagnosticErrorCode.ERROR_ANNOTATIONS_NOT_ALLOWED_FOR_TUPLE_REST_DESCRIPTOR);
}
return STNodeFactory.createRestDescriptorNode(typeDesc, tupleMemberRhs);
}
return STNodeFactory.createMemberTypeDescriptorNode(annot, typeDesc);
}
private STNode invalidateTypeDescAfterRestDesc(STNode restDescriptor) {
while (!isEndOfTypeList(peek().kind)) {
STNode tupleMemberRhs = parseTupleMemberRhs();
if (tupleMemberRhs == null) {
break;
}
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, tupleMemberRhs, null);
restDescriptor = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(restDescriptor, parseTupleMember(),
DiagnosticErrorCode.ERROR_TYPE_DESC_AFTER_REST_DESCRIPTOR);
}
return restDescriptor;
}
private STNode parseTupleMemberRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.TUPLE_TYPE_MEMBER_RHS);
return parseTupleMemberRhs();
}
}
private STNode parseTypeDescInTupleRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
return null;
case ELLIPSIS_TOKEN:
return parseEllipsis();
default:
recover(nextToken, ParserRuleContext.TYPE_DESC_IN_TUPLE_RHS);
return parseTypeDescInTupleRhs();
}
}
private boolean isEndOfTypeList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
case EQUAL_TOKEN:
case SEMICOLON_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse table constructor or query expression.
* <p>
* <code>
* table-constructor-or-query-expr := table-constructor-expr | query-expr
* <br/>
* table-constructor-expr := table [key-specifier] [ [row-list] ]
* <br/>
* query-expr := [query-construct-type] query-pipeline select-clause
* [query-construct-type] query-pipeline select-clause on-conflict-clause?
* <br/>
* query-construct-type := table key-specifier | stream
* </code>
*
* @return Parsed node
*/
private STNode parseTableConstructorOrQuery(boolean isRhsExpr) {
startContext(ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_EXPRESSION);
STNode tableOrQueryExpr = parseTableConstructorOrQueryInternal(isRhsExpr);
endContext();
return tableOrQueryExpr;
}
private STNode parseTableConstructorOrQueryInternal(boolean isRhsExpr) {
STNode queryConstructType;
switch (peek().kind) {
case FROM_KEYWORD:
queryConstructType = STNodeFactory.createEmptyNode();
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case STREAM_KEYWORD:
queryConstructType = parseQueryConstructType(parseStreamKeyword(), null);
return parseQueryExprRhs(queryConstructType, isRhsExpr);
case TABLE_KEYWORD:
STNode tableKeyword = parseTableKeyword();
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_START);
return parseTableConstructorOrQueryInternal(isRhsExpr);
}
}
private STNode parseTableConstructorOrQuery(STNode tableKeyword, boolean isRhsExpr) {
STNode keySpecifier;
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
keySpecifier = STNodeFactory.createEmptyNode();
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
case KEY_KEYWORD:
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
case IDENTIFIER_TOKEN:
if (isKeyKeyword(nextToken)) {
keySpecifier = parseKeySpecifier();
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
}
break;
default:
break;
}
recover(peek(), ParserRuleContext.TABLE_KEYWORD_RHS);
return parseTableConstructorOrQuery(tableKeyword, isRhsExpr);
}
private STNode parseTableConstructorOrQueryRhs(STNode tableKeyword, STNode keySpecifier, boolean isRhsExpr) {
switch (peek().kind) {
case FROM_KEYWORD:
return parseQueryExprRhs(parseQueryConstructType(tableKeyword, keySpecifier), isRhsExpr);
case OPEN_BRACKET_TOKEN:
return parseTableConstructorExprRhs(tableKeyword, keySpecifier);
default:
recover(peek(), ParserRuleContext.TABLE_CONSTRUCTOR_OR_QUERY_RHS);
return parseTableConstructorOrQueryRhs(tableKeyword, keySpecifier, isRhsExpr);
}
}
/**
* Parse query construct type.
* <p>
* <code>query-construct-type := table key-specifier | stream</code>
*
* @return Parsed node
*/
private STNode parseQueryConstructType(STNode keyword, STNode keySpecifier) {
return STNodeFactory.createQueryConstructTypeNode(keyword, keySpecifier);
}
/**
* Parse query action or expression.
* <p>
* <code>
* query-expr-rhs := query-pipeline select-clause
* query-pipeline select-clause on-conflict-clause?
* <br/>
* query-pipeline := from-clause intermediate-clause*
* </code>
*
* @param queryConstructType queryConstructType that precedes this rhs
* @return Parsed node
*/
private STNode parseQueryExprRhs(STNode queryConstructType, boolean isRhsExpr) {
switchContext(ParserRuleContext.QUERY_EXPRESSION);
STNode fromClause = parseFromClause(isRhsExpr);
List<STNode> clauses = new ArrayList<>();
STNode intermediateClause;
STNode selectClause = null;
while (!isEndOfIntermediateClause(peek().kind)) {
intermediateClause = parseIntermediateClause(isRhsExpr);
if (intermediateClause == null) {
break;
}
if (selectClause != null) {
selectClause = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(selectClause, intermediateClause,
DiagnosticErrorCode.ERROR_MORE_CLAUSES_AFTER_SELECT_CLAUSE);
continue;
}
if (intermediateClause.kind != SyntaxKind.SELECT_CLAUSE) {
clauses.add(intermediateClause);
continue;
}
selectClause = intermediateClause;
if (isNestedQueryExpr() || !isValidIntermediateQueryStart(peek().kind)) {
break;
}
}
if (peek().kind == SyntaxKind.DO_KEYWORD) {
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
return parseQueryAction(queryConstructType, queryPipeline, selectClause);
}
if (selectClause == null) {
STNode selectKeyword = SyntaxErrors.createMissingToken(SyntaxKind.SELECT_KEYWORD);
STNode expr = STNodeFactory
.createSimpleNameReferenceNode(SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
selectClause = STNodeFactory.createSelectClauseNode(selectKeyword, expr);
if (clauses.isEmpty()) {
fromClause = SyntaxErrors.addDiagnostic(fromClause, DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
} else {
int lastIndex = clauses.size() - 1;
STNode intClauseWithDiagnostic = SyntaxErrors.addDiagnostic(clauses.get(lastIndex),
DiagnosticErrorCode.ERROR_MISSING_SELECT_CLAUSE);
clauses.set(lastIndex, intClauseWithDiagnostic);
}
}
STNode intermediateClauses = STNodeFactory.createNodeList(clauses);
STNode queryPipeline = STNodeFactory.createQueryPipelineNode(fromClause, intermediateClauses);
STNode onConflictClause = parseOnConflictClause(isRhsExpr);
return STNodeFactory.createQueryExpressionNode(queryConstructType, queryPipeline, selectClause,
onConflictClause);
}
/**
* Check whether currently parsing query expr is a nested query expression.
*
* @return <code>true</code> if currently parsing query-expr is a nested query-expr. <code>false</code> otherwise.
*/
private boolean isNestedQueryExpr() {
return Collections.frequency(this.errorHandler.getContextStack(), ParserRuleContext.QUERY_EXPRESSION) > 1;
}
private boolean isValidIntermediateQueryStart(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case FROM_KEYWORD:
case WHERE_KEYWORD:
case LET_KEYWORD:
case SELECT_KEYWORD:
case JOIN_KEYWORD:
case OUTER_KEYWORD:
case ORDER_KEYWORD:
case BY_KEYWORD:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
case LIMIT_KEYWORD:
return true;
default:
return false;
}
}
/**
* Parse an intermediate clause.
* <p>
* <code>
* intermediate-clause := from-clause | where-clause | let-clause | join-clause | limit-clause | order-by-clause
* </code>
*
* @return Parsed node
*/
private STNode parseIntermediateClause(boolean isRhsExpr) {
switch (peek().kind) {
case FROM_KEYWORD:
return parseFromClause(isRhsExpr);
case WHERE_KEYWORD:
return parseWhereClause(isRhsExpr);
case LET_KEYWORD:
return parseLetClause(isRhsExpr);
case SELECT_KEYWORD:
return parseSelectClause(isRhsExpr);
case JOIN_KEYWORD:
case OUTER_KEYWORD:
return parseJoinClause(isRhsExpr);
case ORDER_KEYWORD:
case BY_KEYWORD:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return parseOrderByClause(isRhsExpr);
case LIMIT_KEYWORD:
return parseLimitClause(isRhsExpr);
case DO_KEYWORD:
case SEMICOLON_TOKEN:
case ON_KEYWORD:
case CONFLICT_KEYWORD:
return null;
default:
recover(peek(), ParserRuleContext.QUERY_PIPELINE_RHS);
return parseIntermediateClause(isRhsExpr);
}
}
/**
* Parse join-keyword.
*
* @return Join-keyword node
*/
private STNode parseJoinKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.JOIN_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.JOIN_KEYWORD);
return parseJoinKeyword();
}
}
/**
* Parse equals keyword.
*
* @return Parsed node
*/
private STNode parseEqualsKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.EQUALS_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.EQUALS_KEYWORD);
return parseEqualsKeyword();
}
}
private boolean isEndOfIntermediateClause(SyntaxKind tokenKind) {
switch (tokenKind) {
case CLOSE_BRACE_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case PUBLIC_KEYWORD:
case FUNCTION_KEYWORD:
case EOF_TOKEN:
case RESOURCE_KEYWORD:
case LISTENER_KEYWORD:
case DOCUMENTATION_STRING:
case PRIVATE_KEYWORD:
case RETURNS_KEYWORD:
case SERVICE_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
case FINAL_KEYWORD:
case DO_KEYWORD:
case ON_KEYWORD:
case CONFLICT_KEYWORD:
return true;
default:
return isValidExprRhsStart(tokenKind, SyntaxKind.NONE);
}
}
/**
* Parse from clause.
* <p>
* <code>from-clause := from typed-binding-pattern in expression</code>
*
* @return Parsed node
*/
private STNode parseFromClause(boolean isRhsExpr) {
STNode fromKeyword = parseFromKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.FROM_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createFromClauseNode(fromKeyword, typedBindingPattern, inKeyword, expression);
}
/**
* Parse from-keyword.
*
* @return From-keyword node
*/
private STNode parseFromKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FROM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FROM_KEYWORD);
return parseFromKeyword();
}
}
/**
* Parse where clause.
* <p>
* <code>where-clause := where expression</code>
*
* @return Parsed node
*/
private STNode parseWhereClause(boolean isRhsExpr) {
STNode whereKeyword = parseWhereKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createWhereClauseNode(whereKeyword, expression);
}
/**
* Parse where-keyword.
*
* @return Where-keyword node
*/
private STNode parseWhereKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WHERE_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WHERE_KEYWORD);
return parseWhereKeyword();
}
}
/**
* Parse limit-keyword.
*
* @return limit-keyword node
*/
private STNode parseLimitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.LIMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.LIMIT_KEYWORD);
return parseLimitKeyword();
}
}
/**
* Parse let clause.
* <p>
* <code>let-clause := let let-var-decl [, let-var-decl]* </code>
*
* @return Parsed node
*/
private STNode parseLetClause(boolean isRhsExpr) {
STNode letKeyword = parseLetKeyword();
STNode letVarDeclarations = parseLetVarDeclarations(ParserRuleContext.LET_CLAUSE_LET_VAR_DECL, isRhsExpr);
letKeyword = cloneWithDiagnosticIfListEmpty(letVarDeclarations, letKeyword,
DiagnosticErrorCode.ERROR_MISSING_LET_VARIABLE_DECLARATION);
return STNodeFactory.createLetClauseNode(letKeyword, letVarDeclarations);
}
/**
* Parse order-keyword.
*
* @return Order-keyword node
*/
private STNode parseOrderKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ORDER_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ORDER_KEYWORD);
return parseOrderKeyword();
}
}
/**
* Parse by-keyword.
*
* @return By-keyword node
*/
private STNode parseByKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BY_KEYWORD);
return parseByKeyword();
}
}
/**
* Parse order by clause.
* <p>
* <code>order-by-clause := order by order-key-list
* </code>
*
* @return Parsed node
*/
private STNode parseOrderByClause(boolean isRhsExpr) {
STNode orderKeyword = parseOrderKeyword();
STNode byKeyword = parseByKeyword();
STNode orderKeys = parseOrderKeyList(isRhsExpr);
byKeyword = cloneWithDiagnosticIfListEmpty(orderKeys, byKeyword, DiagnosticErrorCode.ERROR_MISSING_ORDER_KEY);
return STNodeFactory.createOrderByClauseNode(orderKeyword, byKeyword, orderKeys);
}
/**
* Parse order key.
* <p>
* <code>order-key-list := order-key [, order-key]*</code>
*
* @return Parsed node
*/
private STNode parseOrderKeyList(boolean isRhsExpr) {
startContext(ParserRuleContext.ORDER_KEY_LIST);
List<STNode> orderKeys = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfOrderKeys(nextToken.kind)) {
endContext();
return STNodeFactory.createEmptyNodeList();
}
STNode orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
STNode orderKeyListMemberEnd;
while (!isEndOfOrderKeys(nextToken.kind)) {
orderKeyListMemberEnd = parseOrderKeyListMemberEnd();
if (orderKeyListMemberEnd == null) {
break;
}
orderKeys.add(orderKeyListMemberEnd);
orderKey = parseOrderKey(isRhsExpr);
orderKeys.add(orderKey);
nextToken = peek();
}
endContext();
return STNodeFactory.createNodeList(orderKeys);
}
private boolean isEndOfOrderKeys(SyntaxKind tokenKind) {
switch (tokenKind) {
case COMMA_TOKEN:
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
return false;
case SEMICOLON_TOKEN:
case EOF_TOKEN:
return true;
default:
return isQueryClauseStartToken(tokenKind);
}
}
private boolean isQueryClauseStartToken(SyntaxKind tokenKind) {
switch (tokenKind) {
case SELECT_KEYWORD:
case LET_KEYWORD:
case WHERE_KEYWORD:
case OUTER_KEYWORD:
case JOIN_KEYWORD:
case ORDER_KEYWORD:
case DO_KEYWORD:
case FROM_KEYWORD:
case LIMIT_KEYWORD:
return true;
default:
return false;
}
}
private STNode parseOrderKeyListMemberEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case EOF_TOKEN:
return null;
default:
if (isQueryClauseStartToken(nextToken.kind)) {
return null;
}
recover(peek(), ParserRuleContext.ORDER_KEY_LIST_END);
return parseOrderKeyListMemberEnd();
}
}
/**
* Parse order key.
* <p>
* <code>order-key := expression (ascending | descending)?</code>
*
* @return Parsed node
*/
private STNode parseOrderKey(boolean isRhsExpr) {
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode orderDirection;
STToken nextToken = peek();
switch (nextToken.kind) {
case ASCENDING_KEYWORD:
case DESCENDING_KEYWORD:
orderDirection = consume();
break;
default:
orderDirection = STNodeFactory.createEmptyNode();
}
return STNodeFactory.createOrderKeyNode(expression, orderDirection);
}
/**
* Parse select clause.
* <p>
* <code>select-clause := select expression</code>
*
* @return Parsed node
*/
private STNode parseSelectClause(boolean isRhsExpr) {
startContext(ParserRuleContext.SELECT_CLAUSE);
STNode selectKeyword = parseSelectKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
return STNodeFactory.createSelectClauseNode(selectKeyword, expression);
}
/**
* Parse select-keyword.
*
* @return Select-keyword node
*/
private STNode parseSelectKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.SELECT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.SELECT_KEYWORD);
return parseSelectKeyword();
}
}
/**
* Parse on-conflict clause.
* <p>
* <code>
* onConflictClause := on conflict expression
* </code>
*
* @return On conflict clause node
*/
private STNode parseOnConflictClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (nextToken.kind != SyntaxKind.ON_KEYWORD && nextToken.kind != SyntaxKind.CONFLICT_KEYWORD) {
return STNodeFactory.createEmptyNode();
}
startContext(ParserRuleContext.ON_CONFLICT_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode conflictKeyword = parseConflictKeyword();
endContext();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnConflictClauseNode(onKeyword, conflictKeyword, expr);
}
/**
* Parse conflict keyword.
*
* @return Conflict keyword node
*/
private STNode parseConflictKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.CONFLICT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.CONFLICT_KEYWORD);
return parseConflictKeyword();
}
}
/**
* Parse limit clause.
* <p>
* <code>limitClause := limit expression</code>
*
* @return Limit expression node
*/
private STNode parseLimitClause(boolean isRhsExpr) {
STNode limitKeyword = parseLimitKeyword();
STNode expr = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createLimitClauseNode(limitKeyword, expr);
}
/**
* Parse join clause.
* <p>
* <code>
* join-clause := (join-var-decl | outer-join-var-decl) in expression on-clause
* <br/>
* join-var-decl := join (typeName | var) bindingPattern
* <br/>
* outer-join-var-decl := outer join var binding-pattern
* </code>
*
* @return Join clause
*/
private STNode parseJoinClause(boolean isRhsExpr) {
startContext(ParserRuleContext.JOIN_CLAUSE);
STNode outerKeyword;
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.OUTER_KEYWORD) {
outerKeyword = consume();
} else {
outerKeyword = STNodeFactory.createEmptyNode();
}
STNode joinKeyword = parseJoinKeyword();
STNode typedBindingPattern = parseTypedBindingPattern(ParserRuleContext.JOIN_CLAUSE);
STNode inKeyword = parseInKeyword();
STNode expression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
endContext();
STNode onCondition = parseOnClause(isRhsExpr);
return STNodeFactory.createJoinClauseNode(outerKeyword, joinKeyword, typedBindingPattern, inKeyword, expression,
onCondition);
}
/**
* Parse on clause.
* <p>
* <code>on clause := `on` expression `equals` expression</code>
*
* @return On clause node
*/
private STNode parseOnClause(boolean isRhsExpr) {
STToken nextToken = peek();
if (isQueryClauseStartToken(nextToken.kind)) {
return createMissingOnClauseNode();
}
startContext(ParserRuleContext.ON_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode lhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
STNode equalsKeyword = parseEqualsKeyword();
endContext();
STNode rhsExpression = parseExpression(OperatorPrecedence.QUERY, isRhsExpr, false);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
private STNode createMissingOnClauseNode() {
STNode onKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ON_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_ON_KEYWORD);
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);
STNode equalsKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.EQUALS_KEYWORD,
DiagnosticErrorCode.ERROR_MISSING_EQUALS_KEYWORD);
STNode lhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
STNode rhsExpression = STNodeFactory.createSimpleNameReferenceNode(identifier);
return STNodeFactory.createOnClauseNode(onKeyword, lhsExpression, equalsKeyword, rhsExpression);
}
/**
* Parse start action.
* <p>
* <code>start-action := [annots] start (function-call-expr|method-call-expr|remote-method-call-action)</code>
*
* @return Start action node
*/
private STNode parseStartAction(STNode annots) {
STNode startKeyword = parseStartKeyword();
STNode expr = parseActionOrExpression();
switch (expr.kind) {
case FUNCTION_CALL:
case METHOD_CALL:
case REMOTE_METHOD_CALL_ACTION:
break;
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case FIELD_ACCESS:
case ASYNC_SEND_ACTION:
expr = generateValidExprForStartAction(expr);
break;
default:
startKeyword = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startKeyword, expr,
DiagnosticErrorCode.ERROR_INVALID_EXPRESSION_IN_START_ACTION);
STNode funcName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
funcName = STNodeFactory.createSimpleNameReferenceNode(funcName);
STNode openParenToken = SyntaxErrors.createMissingToken(SyntaxKind.OPEN_PAREN_TOKEN);
STNode closeParenToken = SyntaxErrors.createMissingToken(SyntaxKind.CLOSE_PAREN_TOKEN);
expr = STNodeFactory.createFunctionCallExpressionNode(funcName, openParenToken,
STNodeFactory.createEmptyNodeList(), closeParenToken);
break;
}
return STNodeFactory.createStartActionNode(getAnnotations(annots), startKeyword, expr);
}
private STNode generateValidExprForStartAction(STNode expr) {
STNode openParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.OPEN_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_OPEN_PAREN_TOKEN);
STNode arguments = STNodeFactory.createEmptyNodeList();
STNode closeParenToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.CLOSE_PAREN_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_CLOSE_PAREN_TOKEN);
switch (expr.kind) {
case FIELD_ACCESS:
STFieldAccessExpressionNode fieldAccessExpr = (STFieldAccessExpressionNode) expr;
return STNodeFactory.createMethodCallExpressionNode(fieldAccessExpr.expression,
fieldAccessExpr.dotToken, fieldAccessExpr.fieldName, openParenToken, arguments,
closeParenToken);
case ASYNC_SEND_ACTION:
STAsyncSendActionNode asyncSendAction = (STAsyncSendActionNode) expr;
return STNodeFactory.createRemoteMethodCallActionNode(asyncSendAction.expression,
asyncSendAction.rightArrowToken, asyncSendAction.peerWorker, openParenToken, arguments,
closeParenToken);
default:
return STNodeFactory.createFunctionCallExpressionNode(expr, openParenToken, arguments, closeParenToken);
}
}
/**
* Parse start keyword.
*
* @return Start keyword node
*/
private STNode parseStartKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.START_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.START_KEYWORD);
return parseStartKeyword();
}
}
/**
* Parse flush action.
* <p>
* <code>flush-action := flush [peer-worker]</code>
*
* @return flush action node
*/
private STNode parseFlushAction() {
STNode flushKeyword = parseFlushKeyword();
STNode peerWorker = parseOptionalPeerWorkerName();
return STNodeFactory.createFlushActionNode(flushKeyword, peerWorker);
}
/**
* Parse flush keyword.
*
* @return flush keyword node
*/
private STNode parseFlushKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.FLUSH_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.FLUSH_KEYWORD);
return parseFlushKeyword();
}
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parseOptionalPeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
return STNodeFactory.createEmptyNode();
}
}
/**
* Parse intersection type descriptor.
* <p>
* intersection-type-descriptor := type-descriptor & type-descriptor
* </p>
*
* @return Parsed node
*/
private STNode parseIntersectionTypeDescriptor(STNode leftTypeDesc, ParserRuleContext context,
boolean isTypedBindingPattern) {
STNode bitwiseAndToken = consume();
STNode rightTypeDesc = parseTypeDescriptorInternal(new ArrayList<>(), context, isTypedBindingPattern, false,
TypePrecedence.INTERSECTION);
return mergeTypesWithIntersection(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Creates an intersection type descriptor after validating lhs and rhs types.
* <p>
* <i>Note: Since type precedence and associativity are not taken into account here,
* this method should not be called directly when types are unknown.
* <br/>
* Call {@link
*
* @param leftTypeDesc lhs type
* @param bitwiseAndToken bitwise-and token
* @param rightTypeDesc rhs type
* @return an IntersectionTypeDescriptorNode
*/
private STNode createIntersectionTypeDesc(STNode leftTypeDesc, STNode bitwiseAndToken, STNode rightTypeDesc) {
leftTypeDesc = validateForUsageOfVar(leftTypeDesc);
rightTypeDesc = validateForUsageOfVar(rightTypeDesc);
return STNodeFactory.createIntersectionTypeDescriptorNode(leftTypeDesc, bitwiseAndToken, rightTypeDesc);
}
/**
* Parse singleton type descriptor.
* <p>
* singleton-type-descriptor := simple-const-expr
* simple-const-expr :=
* nil-literal
* | boolean-literal
* | [Sign] int-literal
* | [Sign] floating-point-literal
* | string-literal
* | constant-reference-expr
* </p>
*/
private STNode parseSingletonTypeDesc() {
STNode simpleContExpr = parseSimpleConstExpr();
return STNodeFactory.createSingletonTypeDescriptorNode(simpleContExpr);
}
private STNode parseSignedIntOrFloat() {
STNode operator = parseUnaryOperator();
STNode literal;
STToken nextToken = peek();
switch (nextToken.kind) {
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
literal = parseBasicLiteral();
break;
default:
literal = parseDecimalIntLiteral(ParserRuleContext.DECIMAL_INTEGER_LITERAL_TOKEN);
literal = STNodeFactory.createBasicLiteralNode(SyntaxKind.NUMERIC_LITERAL, literal);
}
return STNodeFactory.createUnaryExpressionNode(operator, literal);
}
private static boolean isSingletonTypeDescStart(SyntaxKind tokenKind, STToken nextNextToken) {
switch (tokenKind) {
case STRING_LITERAL_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case NULL_KEYWORD:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isIntOrFloat(nextNextToken);
default:
return false;
}
}
static boolean isIntOrFloat(STToken token) {
switch (token.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
return true;
default:
return false;
}
}
/**
* Check whether the parser reached to a valid expression start.
*
* @param nextTokenKind Kind of the next immediate token.
* @param nextTokenIndex Index to the next token.
* @return <code>true</code> if this is a start of a valid expression. <code>false</code> otherwise
*/
private boolean isValidExpressionStart(SyntaxKind nextTokenKind, int nextTokenIndex) {
nextTokenIndex++;
switch (nextTokenKind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
SyntaxKind nextNextTokenKind = peek(nextTokenIndex).kind;
if (nextNextTokenKind == SyntaxKind.PIPE_TOKEN || nextNextTokenKind == SyntaxKind.BITWISE_AND_TOKEN) {
nextTokenIndex++;
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);
}
return nextNextTokenKind == SyntaxKind.SEMICOLON_TOKEN ||
nextNextTokenKind == SyntaxKind.COMMA_TOKEN ||
nextNextTokenKind == SyntaxKind.CLOSE_BRACKET_TOKEN ||
isValidExprRhsStart(nextNextTokenKind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case IDENTIFIER_TOKEN:
return isValidExprRhsStart(peek(nextTokenIndex).kind, SyntaxKind.SIMPLE_NAME_REFERENCE);
case OPEN_PAREN_TOKEN:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case OPEN_BRACE_TOKEN:
case TYPEOF_KEYWORD:
case NEGATION_TOKEN:
case EXCLAMATION_MARK_TOKEN:
case TRAP_KEYWORD:
case OPEN_BRACKET_TOKEN:
case LT_TOKEN:
case FROM_KEYWORD:
case LET_KEYWORD:
case BACKTICK_TOKEN:
case NEW_KEYWORD:
case LEFT_ARROW_TOKEN:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
case BASE16_KEYWORD:
case BASE64_KEYWORD:
return true;
case PLUS_TOKEN:
case MINUS_TOKEN:
return isValidExpressionStart(peek(nextTokenIndex).kind, nextTokenIndex);
case TABLE_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.FROM_KEYWORD;
case STREAM_KEYWORD:
STToken nextNextToken = peek(nextTokenIndex);
return nextNextToken.kind == SyntaxKind.KEY_KEYWORD ||
nextNextToken.kind == SyntaxKind.OPEN_BRACKET_TOKEN ||
nextNextToken.kind == SyntaxKind.FROM_KEYWORD;
case ERROR_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.OPEN_PAREN_TOKEN;
case XML_KEYWORD:
case STRING_KEYWORD:
return peek(nextTokenIndex).kind == SyntaxKind.BACKTICK_TOKEN;
case START_KEYWORD:
case FLUSH_KEYWORD:
case WAIT_KEYWORD:
default:
return false;
}
}
/**
* Parse sync send action.
* <p>
* <code>sync-send-action := expression ->> peer-worker</code>
*
* @param expression LHS expression of the sync send action
* @return Sync send action node
*/
private STNode parseSyncSendAction(STNode expression) {
STNode syncSendToken = parseSyncSendToken();
STNode peerWorker = parsePeerWorkerName();
return STNodeFactory.createSyncSendActionNode(expression, syncSendToken, peerWorker);
}
/**
* Parse peer worker.
* <p>
* <code>peer-worker := worker-name | function</code>
*
* @return peer worker name node
*/
private STNode parsePeerWorkerName() {
STToken token = peek();
switch (token.kind) {
case IDENTIFIER_TOKEN:
case FUNCTION_KEYWORD:
return STNodeFactory.createSimpleNameReferenceNode(consume());
default:
recover(token, ParserRuleContext.PEER_WORKER_NAME);
return parsePeerWorkerName();
}
}
/**
* Parse sync send token.
* <p>
* <code>sync-send-token := ->> </code>
*
* @return sync send token
*/
private STNode parseSyncSendToken() {
STToken token = peek();
if (token.kind == SyntaxKind.SYNC_SEND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.SYNC_SEND_TOKEN);
return parseSyncSendToken();
}
}
/**
* Parse receive action.
* <p>
* <code>receive-action := single-receive-action | multiple-receive-action</code>
*
* @return Receive action
*/
private STNode parseReceiveAction() {
STNode leftArrow = parseLeftArrowToken();
STNode receiveWorkers = parseReceiveWorkers();
return STNodeFactory.createReceiveActionNode(leftArrow, receiveWorkers);
}
private STNode parseReceiveWorkers() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
case IDENTIFIER_TOKEN:
return parsePeerWorkerName();
case OPEN_BRACE_TOKEN:
return parseMultipleReceiveWorkers();
default:
recover(peek(), ParserRuleContext.RECEIVE_WORKERS);
return parseReceiveWorkers();
}
}
/**
* Parse multiple worker receivers.
* <p>
* <code>{ receive-field (, receive-field)* }</code>
*
* @return Multiple worker receiver node
*/
private STNode parseMultipleReceiveWorkers() {
startContext(ParserRuleContext.MULTI_RECEIVE_WORKERS);
STNode openBrace = parseOpenBrace();
STNode receiveFields = parseReceiveFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(receiveFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_RECEIVE_FIELD_IN_RECEIVE_ACTION);
return STNodeFactory.createReceiveFieldsNode(openBrace, receiveFields, closeBrace);
}
private STNode parseReceiveFields() {
List<STNode> receiveFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfReceiveFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
STNode recieveFieldEnd;
while (!isEndOfReceiveFields(nextToken.kind)) {
recieveFieldEnd = parseReceiveFieldEnd();
if (recieveFieldEnd == null) {
break;
}
receiveFields.add(recieveFieldEnd);
receiveField = parseReceiveField();
receiveFields.add(receiveField);
nextToken = peek();
}
return STNodeFactory.createNodeList(receiveFields);
}
private boolean isEndOfReceiveFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseReceiveFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD_END);
return parseReceiveFieldEnd();
}
}
/**
* Parse receive field.
* <p>
* <code>receive-field := peer-worker | field-name : peer-worker</code>
*
* @return Receiver field node
*/
private STNode parseReceiveField() {
switch (peek().kind) {
case FUNCTION_KEYWORD:
STNode functionKeyword = consume();
return STNodeFactory.createSimpleNameReferenceNode(functionKeyword);
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.RECEIVE_FIELD_NAME);
return createQualifiedReceiveField(identifier);
default:
recover(peek(), ParserRuleContext.RECEIVE_FIELD);
return parseReceiveField();
}
}
private STNode createQualifiedReceiveField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode peerWorker = parsePeerWorkerName();
return createQualifiedNameReferenceNode(identifier, colon, peerWorker);
}
/**
* Parse left arrow (<-) token.
*
* @return left arrow token
*/
private STNode parseLeftArrowToken() {
STToken token = peek();
if (token.kind == SyntaxKind.LEFT_ARROW_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.LEFT_ARROW_TOKEN);
return parseLeftArrowToken();
}
}
/**
* Parse signed right shift token (>>).
* This method should only be called by seeing a `DOUBLE_GT_TOKEN` or
* by seeing a `GT_TOKEN` followed by a `GT_TOKEN`
*
* @return Parsed node
*/
private STNode parseSignedRightShiftToken() {
STNode firstToken = consume();
if (firstToken.kind == SyntaxKind.DOUBLE_GT_TOKEN) {
return firstToken;
}
STToken endLGToken = consume();
STNode doubleGTToken = STNodeFactory.createToken(SyntaxKind.DOUBLE_GT_TOKEN, firstToken.leadingMinutiae(),
endLGToken.trailingMinutiae());
if (hasTrailingMinutiae(firstToken)) {
doubleGTToken = SyntaxErrors.addDiagnostic(doubleGTToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_RIGHT_SHIFT_OP);
}
return doubleGTToken;
}
/**
* Parse unsigned right shift token (>>>).
* This method should only be called by seeing a `TRIPPLE_GT_TOKEN` or
* by seeing a `GT_TOKEN` followed by two `GT_TOKEN`s
*
* @return Parsed node
*/
private STNode parseUnsignedRightShiftToken() {
STNode firstToken = consume();
if (firstToken.kind == SyntaxKind.TRIPPLE_GT_TOKEN) {
return firstToken;
}
STNode middleGTToken = consume();
STNode endLGToken = consume();
STNode unsignedRightShiftToken = STNodeFactory.createToken(SyntaxKind.TRIPPLE_GT_TOKEN,
firstToken.leadingMinutiae(), endLGToken.trailingMinutiae());
boolean validOpenGTToken = !hasTrailingMinutiae(firstToken);
boolean validMiddleGTToken = !hasTrailingMinutiae(middleGTToken);
if (validOpenGTToken && validMiddleGTToken) {
return unsignedRightShiftToken;
}
unsignedRightShiftToken = SyntaxErrors.addDiagnostic(unsignedRightShiftToken,
DiagnosticErrorCode.ERROR_NO_WHITESPACES_ALLOWED_IN_UNSIGNED_RIGHT_SHIFT_OP);
return unsignedRightShiftToken;
}
/**
* Parse wait action.
* <p>
* <code>wait-action := single-wait-action | multiple-wait-action | alternate-wait-action </code>
*
* @return Wait action node
*/
private STNode parseWaitAction() {
STNode waitKeyword = parseWaitKeyword();
if (peek().kind == SyntaxKind.OPEN_BRACE_TOKEN) {
return parseMultiWaitAction(waitKeyword);
}
return parseSingleOrAlternateWaitAction(waitKeyword);
}
/**
* Parse wait keyword.
*
* @return wait keyword
*/
private STNode parseWaitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.WAIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.WAIT_KEYWORD);
return parseWaitKeyword();
}
}
/**
* Parse single or alternate wait actions.
* <p>
* <code>
* alternate-or-single-wait-action := wait wait-future-expr (| wait-future-expr)+
* <br/>
* wait-future-expr := expression but not mapping-constructor-expr
* </code>
*
* @param waitKeyword wait keyword
* @return Single or alternate wait action node
*/
private STNode parseSingleOrAlternateWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.ALTERNATE_WAIT_EXPRS);
STToken nextToken = peek();
if (isEndOfWaitFutureExprList(nextToken.kind)) {
endContext();
STNode waitFutureExprs = STNodeFactory
.createSimpleNameReferenceNode(STNodeFactory.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN));
waitFutureExprs = SyntaxErrors.addDiagnostic(waitFutureExprs,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FUTURE_EXPRESSION);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprs);
}
List<STNode> waitFutureExprList = new ArrayList<>();
STNode waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
STNode waitFutureExprEnd;
while (!isEndOfWaitFutureExprList(nextToken.kind)) {
waitFutureExprEnd = parseWaitFutureExprEnd();
if (waitFutureExprEnd == null) {
break;
}
waitFutureExprList.add(waitFutureExprEnd);
waitField = parseWaitFutureExpr();
waitFutureExprList.add(waitField);
nextToken = peek();
}
endContext();
return STNodeFactory.createWaitActionNode(waitKeyword, waitFutureExprList.get(0));
}
private boolean isEndOfWaitFutureExprList(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case OPEN_BRACE_TOKEN:
return true;
case PIPE_TOKEN:
default:
return false;
}
}
private STNode parseWaitFutureExpr() {
STNode waitFutureExpr = parseActionOrExpression();
if (waitFutureExpr.kind == SyntaxKind.MAPPING_CONSTRUCTOR) {
waitFutureExpr = SyntaxErrors.addDiagnostic(waitFutureExpr,
DiagnosticErrorCode.ERROR_MAPPING_CONSTRUCTOR_EXPR_AS_A_WAIT_EXPR);
} else if (isAction(waitFutureExpr)) {
waitFutureExpr =
SyntaxErrors.addDiagnostic(waitFutureExpr, DiagnosticErrorCode.ERROR_ACTION_AS_A_WAIT_EXPR);
}
return waitFutureExpr;
}
private STNode parseWaitFutureExprEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
return parsePipeToken();
default:
if (isEndOfWaitFutureExprList(nextToken.kind) || !isValidExpressionStart(nextToken.kind, 1)) {
return null;
}
recover(peek(), ParserRuleContext.WAIT_FUTURE_EXPR_END);
return parseWaitFutureExprEnd();
}
}
/**
* Parse multiple wait action.
* <p>
* <code>multiple-wait-action := wait { wait-field (, wait-field)* }</code>
*
* @param waitKeyword Wait keyword
* @return Multiple wait action node
*/
private STNode parseMultiWaitAction(STNode waitKeyword) {
startContext(ParserRuleContext.MULTI_WAIT_FIELDS);
STNode openBrace = parseOpenBrace();
STNode waitFields = parseWaitFields();
STNode closeBrace = parseCloseBrace();
endContext();
openBrace = cloneWithDiagnosticIfListEmpty(waitFields, openBrace,
DiagnosticErrorCode.ERROR_MISSING_WAIT_FIELD_IN_WAIT_ACTION);
STNode waitFieldsNode = STNodeFactory.createWaitFieldsListNode(openBrace, waitFields, closeBrace);
return STNodeFactory.createWaitActionNode(waitKeyword, waitFieldsNode);
}
private STNode parseWaitFields() {
List<STNode> waitFields = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfWaitFields(nextToken.kind)) {
return STNodeFactory.createEmptyNodeList();
}
STNode waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
STNode waitFieldEnd;
while (!isEndOfWaitFields(nextToken.kind)) {
waitFieldEnd = parseWaitFieldEnd();
if (waitFieldEnd == null) {
break;
}
waitFields.add(waitFieldEnd);
waitField = parseWaitField();
waitFields.add(waitField);
nextToken = peek();
}
return STNodeFactory.createNodeList(waitFields);
}
private boolean isEndOfWaitFields(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return true;
default:
return false;
}
}
private STNode parseWaitFieldEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_END);
return parseWaitFieldEnd();
}
}
/**
* Parse wait field.
* <p>
* <code>wait-field := variable-name | field-name : wait-future-expr</code>
*
* @return Receiver field node
*/
private STNode parseWaitField() {
switch (peek().kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.WAIT_FIELD_NAME);
identifier = STNodeFactory.createSimpleNameReferenceNode(identifier);
return createQualifiedWaitField(identifier);
default:
recover(peek(), ParserRuleContext.WAIT_FIELD_NAME);
return parseWaitField();
}
}
private STNode createQualifiedWaitField(STNode identifier) {
if (peek().kind != SyntaxKind.COLON_TOKEN) {
return identifier;
}
STNode colon = parseColon();
STNode waitFutureExpr = parseWaitFutureExpr();
return STNodeFactory.createWaitFieldNode(identifier, colon, waitFutureExpr);
}
/**
* Parse annot access expression.
* <p>
* <code>
* annot-access-expr := expression .@ annot-tag-reference
* <br/>
* annot-tag-reference := qualified-identifier | identifier
* </code>
*
* @param lhsExpr Preceding expression of the annot access access
* @return Parsed node
*/
private STNode parseAnnotAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode annotAccessToken = parseAnnotChainingToken();
STNode annotTagReference = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createAnnotAccessExpressionNode(lhsExpr, annotAccessToken, annotTagReference);
}
/**
* Parse annot-chaining-token.
*
* @return Parsed node
*/
private STNode parseAnnotChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.ANNOT_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.ANNOT_CHAINING_TOKEN);
return parseAnnotChainingToken();
}
}
/**
* Parse field access identifier.
* <p>
* <code>field-access-identifier := qualified-identifier | identifier</code>
*
* @return Parsed node
*/
private STNode parseFieldAccessIdentifier(boolean isInConditionalExpr) {
STToken nextToken = peek();
if (!isPredeclaredIdentifier(nextToken.kind)) {
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_IDENTIFIER);
return parseQualifiedIdentifier(identifier, isInConditionalExpr);
}
return parseQualifiedIdentifier(ParserRuleContext.FIELD_ACCESS_IDENTIFIER, isInConditionalExpr);
}
/**
* Parse query action.
* <p>
* <code>query-action := query-pipeline do-clause
* <br/>
* do-clause := do block-stmt
* </code>
*
* @param queryConstructType Query construct type. This is only for validation
* @param queryPipeline Query pipeline
* @param selectClause Select clause if any This is only for validation.
* @return Query action node
*/
private STNode parseQueryAction(STNode queryConstructType, STNode queryPipeline, STNode selectClause) {
if (queryConstructType != null) {
queryPipeline = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(queryPipeline, queryConstructType,
DiagnosticErrorCode.ERROR_QUERY_CONSTRUCT_TYPE_IN_QUERY_ACTION);
}
if (selectClause != null) {
queryPipeline = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(queryPipeline, selectClause,
DiagnosticErrorCode.ERROR_SELECT_CLAUSE_IN_QUERY_ACTION);
}
startContext(ParserRuleContext.DO_CLAUSE);
STNode doKeyword = parseDoKeyword();
STNode blockStmt = parseBlockNode();
endContext();
return STNodeFactory.createQueryActionNode(queryPipeline, doKeyword, blockStmt);
}
/**
* Parse 'do' keyword.
*
* @return do keyword node
*/
private STNode parseDoKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.DO_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.DO_KEYWORD);
return parseDoKeyword();
}
}
/**
* Parse optional field access or xml optional attribute access expression.
* <p>
* <code>
* optional-field-access-expr := expression ?. field-name
* <br/>
* xml-optional-attribute-access-expr := expression ?. xml-attribute-name
* <br/>
* xml-attribute-name := xml-qualified-name | qualified-identifier | identifier
* <br/>
* xml-qualified-name := xml-namespace-prefix : identifier
* <br/>
* xml-namespace-prefix := identifier
* </code>
*
* @param lhsExpr Preceding expression of the optional access
* @return Parsed node
*/
private STNode parseOptionalFieldAccessExpression(STNode lhsExpr, boolean isInConditionalExpr) {
STNode optionalFieldAccessToken = parseOptionalChainingToken();
STNode fieldName = parseFieldAccessIdentifier(isInConditionalExpr);
return STNodeFactory.createOptionalFieldAccessExpressionNode(lhsExpr, optionalFieldAccessToken, fieldName);
}
/**
* Parse optional chaining token.
*
* @return parsed node
*/
private STNode parseOptionalChainingToken() {
STToken token = peek();
if (token.kind == SyntaxKind.OPTIONAL_CHAINING_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.OPTIONAL_CHAINING_TOKEN);
return parseOptionalChainingToken();
}
}
/**
* Parse conditional expression.
* <p>
* <code>conditional-expr := expression ? expression : expression</code>
*
* @param lhsExpr Preceding expression of the question mark
* @param isInConditionalExpr whether calling from a conditional-expr
* @return Parsed node
*/
private STNode parseConditionalExpression(STNode lhsExpr, boolean isInConditionalExpr) {
startContext(ParserRuleContext.CONDITIONAL_EXPRESSION);
STNode questionMark = parseQuestionMark();
STNode middleExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false, true);
if (peek().kind != SyntaxKind.COLON_TOKEN) {
if (middleExpr.kind == SyntaxKind.CONDITIONAL_EXPRESSION) {
STConditionalExpressionNode innerConditionalExpr = (STConditionalExpressionNode) middleExpr;
STNode innerMiddleExpr = innerConditionalExpr.middleExpression;
STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, false);
if (rightMostQNameRef != null) {
middleExpr = generateConditionalExprForRightMost(innerConditionalExpr.lhsExpression,
innerConditionalExpr.questionMarkToken, innerMiddleExpr, rightMostQNameRef);
endContext();
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,
innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);
}
STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(innerMiddleExpr, true);
if (leftMostQNameRef != null) {
middleExpr = generateConditionalExprForLeftMost(innerConditionalExpr.lhsExpression,
innerConditionalExpr.questionMarkToken, innerMiddleExpr, leftMostQNameRef);
endContext();
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr,
innerConditionalExpr.colonToken, innerConditionalExpr.endExpression);
}
}
STNode rightMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, false);
if (rightMostQNameRef != null) {
endContext();
return generateConditionalExprForRightMost(lhsExpr, questionMark, middleExpr, rightMostQNameRef);
}
STNode leftMostQNameRef = ConditionalExprResolver.getQualifiedNameRefNode(middleExpr, true);
if (leftMostQNameRef != null) {
endContext();
return generateConditionalExprForLeftMost(lhsExpr, questionMark, middleExpr, leftMostQNameRef);
}
}
return parseConditionalExprRhs(lhsExpr, questionMark, middleExpr, isInConditionalExpr);
}
private STNode generateConditionalExprForRightMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,
STNode rightMostQualifiedNameRef) {
STQualifiedNameReferenceNode qualifiedNameRef =
(STQualifiedNameReferenceNode) rightMostQualifiedNameRef;
STNode endExpr = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);
STNode simpleNameRef =
ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);
middleExpr = middleExpr.replace(rightMostQualifiedNameRef, simpleNameRef);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,
endExpr);
}
private STNode generateConditionalExprForLeftMost(STNode lhsExpr, STNode questionMark, STNode middleExpr,
STNode leftMostQualifiedNameRef) {
STQualifiedNameReferenceNode qualifiedNameRef = (STQualifiedNameReferenceNode) leftMostQualifiedNameRef;
STNode simpleNameRef = STNodeFactory.createSimpleNameReferenceNode(qualifiedNameRef.identifier);
STNode endExpr = middleExpr.replace(leftMostQualifiedNameRef, simpleNameRef);
middleExpr = ConditionalExprResolver.getSimpleNameRefNode(qualifiedNameRef.modulePrefix);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, qualifiedNameRef.colon,
endExpr);
}
private STNode parseConditionalExprRhs(STNode lhsExpr, STNode questionMark, STNode middleExpr,
boolean isInConditionalExpr) {
STNode colon = parseColon();
endContext();
STNode endExpr = parseExpression(OperatorPrecedence.ANON_FUNC_OR_LET, true, false,
isInConditionalExpr);
return STNodeFactory.createConditionalExpressionNode(lhsExpr, questionMark, middleExpr, colon, endExpr);
}
/**
* Parse enum declaration.
* <p>
* module-enum-decl :=
* metadata
* [public] enum identifier { enum-member (, enum-member)* }
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @param metadata
* @param qualifier
* @return Parsed enum node.
*/
private STNode parseEnumDeclaration(STNode metadata, STNode qualifier) {
startContext(ParserRuleContext.MODULE_ENUM_DECLARATION);
STNode enumKeywordToken = parseEnumKeyword();
STNode identifier = parseIdentifier(ParserRuleContext.MODULE_ENUM_NAME);
STNode openBraceToken = parseOpenBrace();
STNode enumMemberList = parseEnumMemberList();
STNode closeBraceToken = parseCloseBrace();
endContext();
openBraceToken = cloneWithDiagnosticIfListEmpty(enumMemberList, openBraceToken,
DiagnosticErrorCode.ERROR_MISSING_ENUM_MEMBER);
return STNodeFactory.createEnumDeclarationNode(metadata, qualifier, enumKeywordToken, identifier,
openBraceToken, enumMemberList, closeBraceToken);
}
/**
* Parse 'enum' keyword.
*
* @return enum keyword node
*/
private STNode parseEnumKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ENUM_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ENUM_KEYWORD);
return parseEnumKeyword();
}
}
/**
* Parse enum member list.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return enum member list node.
*/
private STNode parseEnumMemberList() {
startContext(ParserRuleContext.ENUM_MEMBER_LIST);
if (peek().kind == SyntaxKind.CLOSE_BRACE_TOKEN) {
return STNodeFactory.createEmptyNodeList();
}
List<STNode> enumMemberList = new ArrayList<>();
STNode enumMember = parseEnumMember();
STNode enumMemberRhs;
while (peek().kind != SyntaxKind.CLOSE_BRACE_TOKEN) {
enumMemberRhs = parseEnumMemberEnd();
if (enumMemberRhs == null) {
break;
}
enumMemberList.add(enumMember);
enumMemberList.add(enumMemberRhs);
enumMember = parseEnumMember();
}
enumMemberList.add(enumMember);
endContext();
return STNodeFactory.createNodeList(enumMemberList);
}
/**
* Parse enum member.
* <p>
* enum-member := metadata identifier [= const-expr]
* </p>
*
* @return Parsed enum member node.
*/
private STNode parseEnumMember() {
STNode metadata;
switch (peek().kind) {
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
metadata = STNodeFactory.createEmptyNode();
}
STNode identifierNode = parseIdentifier(ParserRuleContext.ENUM_MEMBER_NAME);
return parseEnumMemberRhs(metadata, identifierNode);
}
private STNode parseEnumMemberRhs(STNode metadata, STNode identifierNode) {
STNode equalToken, constExprNode;
switch (peek().kind) {
case EQUAL_TOKEN:
equalToken = parseAssignOp();
constExprNode = parseExpression();
break;
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
equalToken = STNodeFactory.createEmptyNode();
constExprNode = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_RHS);
return parseEnumMemberRhs(metadata, identifierNode);
}
return STNodeFactory.createEnumMemberNode(metadata, identifierNode, equalToken, constExprNode);
}
private STNode parseEnumMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.ENUM_MEMBER_END);
return parseEnumMemberEnd();
}
}
private STNode parseTransactionStmtOrVarDecl(STNode annots, List<STNode> qualifiers, STToken transactionKeyword) {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
reportInvalidStatementAnnots(annots, qualifiers);
reportInvalidQualifierList(qualifiers);
return parseTransactionStatement(transactionKeyword);
case COLON_TOKEN:
if (getNextNextToken().kind == SyntaxKind.IDENTIFIER_TOKEN) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
default:
Solution solution = recover(peek(), ParserRuleContext.TRANSACTION_STMT_RHS_OR_TYPE_REF);
if (solution.action == Action.KEEP ||
(solution.action == Action.INSERT && solution.tokenKind == SyntaxKind.COLON_TOKEN)) {
STNode typeDesc = parseQualifiedIdentifierWithPredeclPrefix(transactionKeyword, false);
return parseVarDeclTypeDescRhs(typeDesc, annots, qualifiers, true, false);
}
return parseTransactionStmtOrVarDecl(annots, qualifiers, transactionKeyword);
}
}
/**
* Parse transaction statement.
* <p>
* <code>transaction-stmt := `transaction` block-stmt [on-fail-clause]</code>
*
* @return Transaction statement node
*/
private STNode parseTransactionStatement(STNode transactionKeyword) {
startContext(ParserRuleContext.TRANSACTION_STMT);
STNode blockStmt = parseBlockNode();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createTransactionStatementNode(transactionKeyword, blockStmt, onFailClause);
}
/**
* Parse commit action.
* <p>
* <code>commit-action := "commit"</code>
*
* @return Commit action node
*/
private STNode parseCommitAction() {
STNode commitKeyword = parseCommitKeyword();
return STNodeFactory.createCommitActionNode(commitKeyword);
}
/**
* Parse commit keyword.
*
* @return parsed node
*/
private STNode parseCommitKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.COMMIT_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.COMMIT_KEYWORD);
return parseCommitKeyword();
}
}
/**
* Parse retry statement.
* <p>
* <code>
* retry-stmt := `retry` retry-spec block-stmt [on-fail-clause]
* <br/>
* retry-spec := [type-parameter] [ `(` arg-list `)` ]
* </code>
*
* @return Retry statement node
*/
private STNode parseRetryStatement() {
startContext(ParserRuleContext.RETRY_STMT);
STNode retryKeyword = parseRetryKeyword();
STNode retryStmt = parseRetryKeywordRhs(retryKeyword);
return retryStmt;
}
private STNode parseRetryKeywordRhs(STNode retryKeyword) {
STToken nextToken = peek();
switch (nextToken.kind) {
case LT_TOKEN:
STNode typeParam = parseTypeParameter();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
case OPEN_PAREN_TOKEN:
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
typeParam = STNodeFactory.createEmptyNode();
return parseRetryTypeParamRhs(retryKeyword, typeParam);
default:
recover(peek(), ParserRuleContext.RETRY_KEYWORD_RHS);
return parseRetryKeywordRhs(retryKeyword);
}
}
private STNode parseRetryTypeParamRhs(STNode retryKeyword, STNode typeParam) {
STNode args;
switch (peek().kind) {
case OPEN_PAREN_TOKEN:
args = parseParenthesizedArgList();
break;
case OPEN_BRACE_TOKEN:
case TRANSACTION_KEYWORD:
args = STNodeFactory.createEmptyNode();
break;
default:
recover(peek(), ParserRuleContext.RETRY_TYPE_PARAM_RHS);
return parseRetryTypeParamRhs(retryKeyword, typeParam);
}
STNode blockStmt = parseRetryBody();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createRetryStatementNode(retryKeyword, typeParam, args, blockStmt, onFailClause);
}
private STNode parseRetryBody() {
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
return parseBlockNode();
case TRANSACTION_KEYWORD:
return parseTransactionStatement(consume());
default:
recover(peek(), ParserRuleContext.RETRY_BODY);
return parseRetryBody();
}
}
/**
* Parse optional on fail clause.
*
* @return Parsed node
*/
private STNode parseOptionalOnFailClause() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.ON_KEYWORD) {
return parseOnFailClause();
}
if (isEndOfRegularCompoundStmt(nextToken.kind)) {
return STNodeFactory.createEmptyNode();
}
recover(nextToken, ParserRuleContext.REGULAR_COMPOUND_STMT_RHS);
return parseOptionalOnFailClause();
}
private boolean isEndOfRegularCompoundStmt(SyntaxKind nodeKind) {
switch (nodeKind) {
case CLOSE_BRACE_TOKEN:
case SEMICOLON_TOKEN:
case AT_TOKEN:
case EOF_TOKEN:
return true;
default:
return isStatementStartingToken(nodeKind);
}
}
private boolean isStatementStartingToken(SyntaxKind nodeKind) {
switch (nodeKind) {
case FINAL_KEYWORD:
case IF_KEYWORD:
case WHILE_KEYWORD:
case DO_KEYWORD:
case PANIC_KEYWORD:
case CONTINUE_KEYWORD:
case BREAK_KEYWORD:
case RETURN_KEYWORD:
case LOCK_KEYWORD:
case OPEN_BRACE_TOKEN:
case FORK_KEYWORD:
case FOREACH_KEYWORD:
case XMLNS_KEYWORD:
case TRANSACTION_KEYWORD:
case RETRY_KEYWORD:
case ROLLBACK_KEYWORD:
case MATCH_KEYWORD:
case FAIL_KEYWORD:
case CHECK_KEYWORD:
case CHECKPANIC_KEYWORD:
case TRAP_KEYWORD:
case START_KEYWORD:
case FLUSH_KEYWORD:
case LEFT_ARROW_TOKEN:
case WAIT_KEYWORD:
case COMMIT_KEYWORD:
case WORKER_KEYWORD:
case TYPE_KEYWORD:
case CONST_KEYWORD:
return true;
default:
if (isTypeStartingToken(nodeKind)) {
return true;
}
if (isValidExpressionStart(nodeKind, 1)) {
return true;
}
return false;
}
}
/**
* Parse on fail clause.
* <p>
* <code>
* on-fail-clause := on fail typed-binding-pattern statement-block
* </code>
*
* @return On fail clause node
*/
private STNode parseOnFailClause() {
startContext(ParserRuleContext.ON_FAIL_CLAUSE);
STNode onKeyword = parseOnKeyword();
STNode failKeyword = parseFailKeyword();
STNode typeDescriptor = parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false,
TypePrecedence.DEFAULT);
STNode identifier = parseIdentifier(ParserRuleContext.VARIABLE_NAME);
STNode blockStatement = parseBlockNode();
endContext();
return STNodeFactory.createOnFailClauseNode(onKeyword, failKeyword, typeDescriptor, identifier,
blockStatement);
}
/**
* Parse retry keyword.
*
* @return parsed node
*/
private STNode parseRetryKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.RETRY_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.RETRY_KEYWORD);
return parseRetryKeyword();
}
}
/**
* Parse transaction statement.
* <p>
* <code>rollback-stmt := "rollback" [expression] ";"</code>
*
* @return Rollback statement node
*/
private STNode parseRollbackStatement() {
startContext(ParserRuleContext.ROLLBACK_STMT);
STNode rollbackKeyword = parseRollbackKeyword();
STNode expression;
if (peek().kind == SyntaxKind.SEMICOLON_TOKEN) {
expression = STNodeFactory.createEmptyNode();
} else {
expression = parseExpression();
}
STNode semicolon = parseSemicolon();
endContext();
return STNodeFactory.createRollbackStatementNode(rollbackKeyword, expression, semicolon);
}
/**
* Parse rollback keyword.
*
* @return Rollback keyword node
*/
private STNode parseRollbackKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.ROLLBACK_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.ROLLBACK_KEYWORD);
return parseRollbackKeyword();
}
}
/**
* Parse transactional expression.
* <p>
* <code>transactional-expr := "transactional"</code>
*
* @return Transactional expression node
*/
private STNode parseTransactionalExpression() {
STNode transactionalKeyword = parseTransactionalKeyword();
return STNodeFactory.createTransactionalExpressionNode(transactionalKeyword);
}
/**
* Parse transactional keyword.
*
* @return Transactional keyword node
*/
private STNode parseTransactionalKeyword() {
STToken token = peek();
if (token.kind == SyntaxKind.TRANSACTIONAL_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.TRANSACTIONAL_KEYWORD);
return parseTransactionalKeyword();
}
}
/**
* Parse base16 literal.
* <p>
* <code>
* byte-array-literal := Base16Literal | Base64Literal
* <br/>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* </code>
*
* @return parsed node
*/
private STNode parseByteArrayLiteral() {
STNode type;
if (peek().kind == SyntaxKind.BASE16_KEYWORD) {
type = parseBase16Keyword();
} else {
type = parseBase64Keyword();
}
STNode startingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_START);
if (startingBackTick.isMissing()) {
startingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode endingBackTick = SyntaxErrors.createMissingToken(SyntaxKind.BACKTICK_TOKEN);
STNode content = STNodeFactory.createEmptyNode();
STNode byteArrayLiteral =
STNodeFactory.createByteArrayLiteralNode(type, startingBackTick, content, endingBackTick);
byteArrayLiteral =
SyntaxErrors.addDiagnostic(byteArrayLiteral, DiagnosticErrorCode.ERROR_MISSING_BYTE_ARRAY_CONTENT);
return byteArrayLiteral;
}
STNode content = parseByteArrayContent();
return parseByteArrayLiteral(type, startingBackTick, content);
}
/**
* Parse byte array literal.
*
* @param typeKeyword keyword token, possible values are `base16` and `base64`
* @param startingBackTick starting backtick token
* @param byteArrayContent byte array literal content to be validated
* @return parsed byte array literal node
*/
private STNode parseByteArrayLiteral(STNode typeKeyword, STNode startingBackTick, STNode byteArrayContent) {
STNode content = STNodeFactory.createEmptyNode();
STNode newStartingBackTick = startingBackTick;
STNodeList items = (STNodeList) byteArrayContent;
if (items.size() == 1) {
STNode item = items.get(0);
if (typeKeyword.kind == SyntaxKind.BASE16_KEYWORD && !isValidBase16LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE16_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (typeKeyword.kind == SyntaxKind.BASE64_KEYWORD && !isValidBase64LiteralContent(item.toString())) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_BASE64_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else if (item.kind != SyntaxKind.TEMPLATE_STRING) {
newStartingBackTick = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(startingBackTick, item,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
} else {
content = item;
}
} else if (items.size() > 1) {
STNode clonedStartingBackTick = startingBackTick;
for (int index = 0; index < items.size(); index++) {
STNode item = items.get(index);
clonedStartingBackTick =
SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(clonedStartingBackTick, item);
}
newStartingBackTick = SyntaxErrors.addDiagnostic(clonedStartingBackTick,
DiagnosticErrorCode.ERROR_INVALID_CONTENT_IN_BYTE_ARRAY_LITERAL);
}
STNode endingBackTick = parseBacktickToken(ParserRuleContext.TEMPLATE_END);
return STNodeFactory.createByteArrayLiteralNode(typeKeyword, newStartingBackTick, content, endingBackTick);
}
/**
* Parse <code>base16</code> keyword.
*
* @return base16 keyword node
*/
private STNode parseBase16Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE16_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE16_KEYWORD);
return parseBase16Keyword();
}
}
/**
* Parse <code>base64</code> keyword.
*
* @return base64 keyword node
*/
private STNode parseBase64Keyword() {
STToken token = peek();
if (token.kind == SyntaxKind.BASE64_KEYWORD) {
return consume();
} else {
recover(token, ParserRuleContext.BASE64_KEYWORD);
return parseBase64Keyword();
}
}
/**
* Validate and parse byte array literal content.
* An error is reported, if the content is invalid.
*
* @return parsed node
*/
private STNode parseByteArrayContent() {
STToken nextToken = peek();
List<STNode> items = new ArrayList<>();
while (!isEndOfBacktickContent(nextToken.kind)) {
STNode content = parseTemplateItem();
items.add(content);
nextToken = peek();
}
return STNodeFactory.createNodeList(items);
}
/**
* Validate base16 literal content.
* <p>
* <code>
* Base16Literal := base16 WS ` HexGroup* WS `
* <br/>
* HexGroup := WS HexDigit WS HexDigit
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase16LiteralContent(String content) {
char[] charArray = content.toCharArray();
int hexDigitCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
default:
if (isHexDigit(c)) {
hexDigitCount++;
} else {
return false;
}
break;
}
}
return hexDigitCount % 2 == 0;
}
/**
* Validate base64 literal content.
* <p>
* <code>
* Base64Literal := base64 WS ` Base64Group* [PaddedBase64Group] WS `
* <br/>
* Base64Group := WS Base64Char WS Base64Char WS Base64Char WS Base64Char
* <br/>
* PaddedBase64Group :=
* WS Base64Char WS Base64Char WS Base64Char WS PaddingChar
* | WS Base64Char WS Base64Char WS PaddingChar WS PaddingChar
* <br/>
* Base64Char := A .. Z | a .. z | 0 .. 9 | + | /
* <br/>
* PaddingChar := =
* <br/>
* WS := WhiteSpaceChar*
* <br/>
* WhiteSpaceChar := 0x9 | 0xA | 0xD | 0x20
* </code>
*
* @param content the string surrounded by the backticks
* @return <code>true</code>, if the string content is valid. <code>false</code> otherwise.
*/
static boolean isValidBase64LiteralContent(String content) {
char[] charArray = content.toCharArray();
int base64CharCount = 0;
int paddingCharCount = 0;
for (char c : charArray) {
switch (c) {
case LexerTerminals.TAB:
case LexerTerminals.NEWLINE:
case LexerTerminals.CARRIAGE_RETURN:
case LexerTerminals.SPACE:
break;
case LexerTerminals.EQUAL:
paddingCharCount++;
break;
default:
if (isBase64Char(c)) {
if (paddingCharCount == 0) {
base64CharCount++;
} else {
return false;
}
} else {
return false;
}
break;
}
}
if (paddingCharCount > 2) {
return false;
} else if (paddingCharCount == 0) {
return base64CharCount % 4 == 0;
} else {
return base64CharCount % 4 == 4 - paddingCharCount;
}
}
/**
* <p>
* Check whether a given char is a base64 char.
* </p>
* <code>Base64Char := A .. Z | a .. z | 0 .. 9 | + | /</code>
*
* @param c character to check
* @return <code>true</code>, if the character represents a base64 char. <code>false</code> otherwise.
*/
static boolean isBase64Char(int c) {
if ('a' <= c && c <= 'z') {
return true;
}
if ('A' <= c && c <= 'Z') {
return true;
}
if (c == '+' || c == '/') {
return true;
}
return isDigit(c);
}
static boolean isHexDigit(int c) {
if ('a' <= c && c <= 'f') {
return true;
}
if ('A' <= c && c <= 'F') {
return true;
}
return isDigit(c);
}
static boolean isDigit(int c) {
return ('0' <= c && c <= '9');
}
/**
* Parse xml filter expression.
* <p>
* <code>xml-filter-expr := expression .< xml-name-pattern ></code>
*
* @param lhsExpr Preceding expression of .< token
* @return Parsed node
*/
private STNode parseXMLFilterExpression(STNode lhsExpr) {
STNode xmlNamePatternChain = parseXMLFilterExpressionRhs();
return STNodeFactory.createXMLFilterExpressionNode(lhsExpr, xmlNamePatternChain);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>filer-expression-rhs := .< xml-name-pattern ></code>
*
* @return Parsed node
*/
private STNode parseXMLFilterExpressionRhs() {
STNode dotLTToken = parseDotLTToken();
return parseXMLNamePatternChain(dotLTToken);
}
/**
* Parse xml name pattern chain.
* <p>
* <code>
* xml-name-pattern-chain := filer-expression-rhs | xml-element-children-step | xml-element-descendants-step
* <br/>
* filer-expression-rhs := .< xml-name-pattern >
* <br/>
* xml-element-children-step := /< xml-name-pattern >
* <br/>
* xml-element-descendants-step := /**\/<xml-name-pattern >
* </code>
*
* @param startToken Preceding token of xml name pattern
* @return Parsed node
*/
private STNode parseXMLNamePatternChain(STNode startToken) {
startContext(ParserRuleContext.XML_NAME_PATTERN);
STNode xmlNamePattern = parseXMLNamePattern();
STNode gtToken = parseGTToken();
endContext();
startToken = cloneWithDiagnosticIfListEmpty(xmlNamePattern, startToken,
DiagnosticErrorCode.ERROR_MISSING_XML_ATOMIC_NAME_PATTERN);
return STNodeFactory.createXMLNamePatternChainingNode(startToken, xmlNamePattern, gtToken);
}
/**
* Parse <code> .< </code> token.
*
* @return Parsed node
*/
private STNode parseDotLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOT_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOT_LT_TOKEN);
return parseDotLTToken();
}
}
/**
* Parse xml name pattern.
* <p>
* <code>xml-name-pattern := xml-atomic-name-pattern [| xml-atomic-name-pattern]*</code>
*
* @return Parsed node
*/
private STNode parseXMLNamePattern() {
List<STNode> xmlAtomicNamePatternList = new ArrayList<>();
STToken nextToken = peek();
if (isEndOfXMLNamePattern(nextToken.kind)) {
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
STNode xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
STNode separator;
while (!isEndOfXMLNamePattern(peek().kind)) {
separator = parseXMLNamePatternSeparator();
if (separator == null) {
break;
}
xmlAtomicNamePatternList.add(separator);
xmlAtomicNamePattern = parseXMLAtomicNamePattern();
xmlAtomicNamePatternList.add(xmlAtomicNamePattern);
}
return STNodeFactory.createNodeList(xmlAtomicNamePatternList);
}
private boolean isEndOfXMLNamePattern(SyntaxKind tokenKind) {
switch (tokenKind) {
case GT_TOKEN:
case EOF_TOKEN:
return true;
case IDENTIFIER_TOKEN:
case ASTERISK_TOKEN:
case COLON_TOKEN:
default:
return false;
}
}
private STNode parseXMLNamePatternSeparator() {
STToken token = peek();
switch (token.kind) {
case PIPE_TOKEN:
return consume();
case GT_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(token, ParserRuleContext.XML_NAME_PATTERN_RHS);
return parseXMLNamePatternSeparator();
}
}
/**
* Parse xml atomic name pattern.
* <p>
* <code>
* xml-atomic-name-pattern :=
* *
* | identifier
* | xml-namespace-prefix : identifier
* | xml-namespace-prefix : *
* </code>
*
* @return Parsed node
*/
private STNode parseXMLAtomicNamePattern() {
startContext(ParserRuleContext.XML_ATOMIC_NAME_PATTERN);
STNode atomicNamePattern = parseXMLAtomicNamePatternBody();
endContext();
return atomicNamePattern;
}
private STNode parseXMLAtomicNamePatternBody() {
STToken token = peek();
STNode identifier;
switch (token.kind) {
case ASTERISK_TOKEN:
return consume();
case IDENTIFIER_TOKEN:
identifier = consume();
break;
default:
recover(token, ParserRuleContext.XML_ATOMIC_NAME_PATTERN_START);
return parseXMLAtomicNamePatternBody();
}
return parseXMLAtomicNameIdentifier(identifier);
}
private STNode parseXMLAtomicNameIdentifier(STNode identifier) {
STToken token = peek();
if (token.kind == SyntaxKind.COLON_TOKEN) {
STNode colon = consume();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.IDENTIFIER_TOKEN || nextToken.kind == SyntaxKind.ASTERISK_TOKEN) {
STToken endToken = consume();
return STNodeFactory.createXMLAtomicNamePatternNode(identifier, colon, endToken);
}
}
return STNodeFactory.createSimpleNameReferenceNode(identifier);
}
/**
* Parse xml step expression.
* <p>
* <code>xml-step-expr := expression xml-step-start</code>
*
* @param lhsExpr Preceding expression of /*, /<, or /**\/< token
* @return Parsed node
*/
private STNode parseXMLStepExpression(STNode lhsExpr) {
STNode xmlStepStart = parseXMLStepStart();
return STNodeFactory.createXMLStepExpressionNode(lhsExpr, xmlStepStart);
}
/**
* Parse xml filter expression rhs.
* <p>
* <code>
* xml-step-start :=
* xml-all-children-step
* | xml-element-children-step
* | xml-element-descendants-step
* <br/>
* xml-all-children-step := /*
* </code>
*
* @return Parsed node
*/
private STNode parseXMLStepStart() {
STToken token = peek();
STNode startToken;
switch (token.kind) {
case SLASH_ASTERISK_TOKEN:
return consume();
case DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN:
startToken = parseDoubleSlashDoubleAsteriskLTToken();
break;
case SLASH_LT_TOKEN:
default:
startToken = parseSlashLTToken();
break;
}
return parseXMLNamePatternChain(startToken);
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseSlashLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.SLASH_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.SLASH_LT_TOKEN);
return parseSlashLTToken();
}
}
/**
* Parse <code> /< </code> token.
*
* @return Parsed node
*/
private STNode parseDoubleSlashDoubleAsteriskLTToken() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN) {
return consume();
} else {
recover(nextToken, ParserRuleContext.DOUBLE_SLASH_DOUBLE_ASTERISK_LT_TOKEN);
return parseDoubleSlashDoubleAsteriskLTToken();
}
}
/**
* Parse match statement.
* <p>
* <code>match-stmt := match action-or-expr { match-clause+ } [on-fail-clause]</code>
*
* @return Match statement
*/
private STNode parseMatchStatement() {
startContext(ParserRuleContext.MATCH_STMT);
STNode matchKeyword = parseMatchKeyword();
STNode actionOrExpr = parseActionOrExpression();
startContext(ParserRuleContext.MATCH_BODY);
STNode openBrace = parseOpenBrace();
List<STNode> matchClausesList = new ArrayList<>();
while (!isEndOfMatchClauses(peek().kind)) {
STNode clause = parseMatchClause();
matchClausesList.add(clause);
}
STNode matchClauses = STNodeFactory.createNodeList(matchClausesList);
if (isNodeListEmpty(matchClauses)) {
openBrace = SyntaxErrors.addDiagnostic(openBrace,
DiagnosticErrorCode.ERROR_MATCH_STATEMENT_SHOULD_HAVE_ONE_OR_MORE_MATCH_CLAUSES);
}
STNode closeBrace = parseCloseBrace();
endContext();
endContext();
STNode onFailClause = parseOptionalOnFailClause();
return STNodeFactory.createMatchStatementNode(matchKeyword, actionOrExpr, openBrace, matchClauses, closeBrace,
onFailClause);
}
/**
* Parse match keyword.
*
* @return Match keyword node
*/
private STNode parseMatchKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.MATCH_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.MATCH_KEYWORD);
return parseMatchKeyword();
}
}
private boolean isEndOfMatchClauses(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
case TYPE_KEYWORD:
return true;
default:
return isEndOfStatements();
}
}
/**
* Parse a single match match clause.
* <p>
* <code>
* match-clause := match-pattern-list [match-guard] => block-stmt
* <br/>
* match-guard := if expression
* </code>
*
* @return A match clause
*/
private STNode parseMatchClause() {
STNode matchPatterns = parseMatchPatternList();
STNode matchGuard = parseMatchGuard();
STNode rightDoubleArrow = parseDoubleRightArrow();
STNode blockStmt = parseBlockNode();
if (isNodeListEmpty(matchPatterns)) {
STToken identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode constantPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);
matchPatterns = STNodeFactory.createNodeList(constantPattern);
DiagnosticErrorCode errorCode = DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN;
if (matchGuard != null) {
matchGuard = SyntaxErrors.addDiagnostic(matchGuard, errorCode);
} else {
rightDoubleArrow = SyntaxErrors.addDiagnostic(rightDoubleArrow, errorCode);
}
}
return STNodeFactory.createMatchClauseNode(matchPatterns, matchGuard, rightDoubleArrow, blockStmt);
}
/**
* Parse match guard.
* <p>
* <code>match-guard := if expression</code>
*
* @return Match guard
*/
private STNode parseMatchGuard() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IF_KEYWORD:
STNode ifKeyword = parseIfKeyword();
STNode expr = parseExpression(DEFAULT_OP_PRECEDENCE, true, false, true, false);
return STNodeFactory.createMatchGuardNode(ifKeyword, expr);
case RIGHT_DOUBLE_ARROW_TOKEN:
return STNodeFactory.createEmptyNode();
default:
recover(nextToken, ParserRuleContext.OPTIONAL_MATCH_GUARD);
return parseMatchGuard();
}
}
/**
* Parse match patterns list.
* <p>
* <code>match-pattern-list := match-pattern (| match-pattern)*</code>
*
* @return Match patterns list
*/
private STNode parseMatchPatternList() {
startContext(ParserRuleContext.MATCH_PATTERN);
List<STNode> matchClauses = new ArrayList<>();
while (!isEndOfMatchPattern(peek().kind)) {
STNode clause = parseMatchPattern();
if (clause == null) {
break;
}
matchClauses.add(clause);
STNode seperator = parseMatchPatternListMemberRhs();
if (seperator == null) {
break;
}
matchClauses.add(seperator);
}
endContext();
return STNodeFactory.createNodeList(matchClauses);
}
private boolean isEndOfMatchPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case PIPE_TOKEN:
case IF_KEYWORD:
case RIGHT_DOUBLE_ARROW_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse match pattern.
* <p>
* <code>
* match-pattern := var binding-pattern
* | wildcard-match-pattern
* | const-pattern
* | list-match-pattern
* | mapping-match-pattern
* | error-match-pattern
* </code>
*
* @return Match pattern
*/
private STNode parseMatchPattern() {
STToken nextToken = peek();
if (isPredeclaredIdentifier(nextToken.kind)) {
STNode typeRefOrConstExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
}
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
return parseSimpleConstExpr();
case VAR_KEYWORD:
return parseVarTypedBindingPattern();
case OPEN_BRACKET_TOKEN:
return parseListMatchPattern();
case OPEN_BRACE_TOKEN:
return parseMappingMatchPattern();
case ERROR_KEYWORD:
return parseErrorMatchPattern();
default:
recover(nextToken, ParserRuleContext.MATCH_PATTERN_START);
return parseMatchPattern();
}
}
private STNode parseMatchPatternListMemberRhs() {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
return parsePipeToken();
case IF_KEYWORD:
case RIGHT_DOUBLE_ARROW_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.MATCH_PATTERN_LIST_MEMBER_RHS);
return parseMatchPatternListMemberRhs();
}
}
/**
* Parse var typed binding pattern.
* <p>
* <code>var binding-pattern</code>
* </p>
*
* @return Parsed typed binding pattern node
*/
private STNode parseVarTypedBindingPattern() {
STNode varKeyword = parseVarKeyword();
STNode varTypeDesc = createBuiltinSimpleNameReference(varKeyword);
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(varTypeDesc, bindingPattern);
}
/**
* Parse var keyword.
*
* @return Var keyword node
*/
private STNode parseVarKeyword() {
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.VAR_KEYWORD) {
return consume();
} else {
recover(nextToken, ParserRuleContext.VAR_KEYWORD);
return parseVarKeyword();
}
}
/**
* Parse list match pattern.
* <p>
* <code>
* list-match-pattern := [ list-member-match-patterns ]
* list-member-match-patterns :=
* match-pattern (, match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* </code>
* </p>
*
* @return Parsed list match pattern node
*/
private STNode parseListMatchPattern() {
startContext(ParserRuleContext.LIST_MATCH_PATTERN);
STNode openBracketToken = parseOpenBracket();
List<STNode> matchPatternList = new ArrayList<>();
STNode listMatchPatternMemberRhs = null;
boolean isEndOfFields = false;
while (!isEndOfListMatchPattern()) {
STNode listMatchPatternMember = parseListMatchPatternMember();
matchPatternList.add(listMatchPatternMember);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
if (listMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
isEndOfFields = true;
break;
}
if (listMatchPatternMemberRhs != null) {
matchPatternList.add(listMatchPatternMemberRhs);
} else {
break;
}
}
while (isEndOfFields && listMatchPatternMemberRhs != null) {
updateLastNodeInListWithInvalidNode(matchPatternList, listMatchPatternMemberRhs, null);
if (peek().kind == SyntaxKind.CLOSE_BRACKET_TOKEN) {
break;
}
STNode invalidField = parseListMatchPatternMember();
updateLastNodeInListWithInvalidNode(matchPatternList, invalidField,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
listMatchPatternMemberRhs = parseListMatchPatternMemberRhs();
}
STNode matchPatternListNode = STNodeFactory.createNodeList(matchPatternList);
STNode closeBracketToken = parseCloseBracket();
endContext();
return STNodeFactory.createListMatchPatternNode(openBracketToken, matchPatternListNode, closeBracketToken);
}
public boolean isEndOfListMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseListMatchPatternMember() {
STNode nextToken = peek();
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
default:
return parseMatchPattern();
}
}
/**
* Parse rest match pattern.
* <p>
* <code>
* rest-match-pattern := ... var variable-name
* </code>
* </p>
*
* @return Parsed rest match pattern node
*/
private STNode parseRestMatchPattern() {
startContext(ParserRuleContext.REST_MATCH_PATTERN);
STNode ellipsisToken = parseEllipsis();
STNode varKeywordToken = parseVarKeyword();
STNode variableName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(variableName);
return STNodeFactory.createRestMatchPatternNode(ellipsisToken, varKeywordToken, simpleNameReferenceNode);
}
private STNode parseListMatchPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_MATCH_PATTERN_MEMBER_RHS);
return parseListMatchPatternMemberRhs();
}
}
/**
* Parse mapping match pattern.
* <p>
* mapping-match-pattern := { field-match-patterns }
* <br/>
* field-match-patterns := field-match-pattern (, field-match-pattern)* [, rest-match-pattern]
* | [ rest-match-pattern ]
* <br/>
* field-match-pattern := field-name : match-pattern
* <br/>
* rest-match-pattern := ... var variable-name
* </p>
*
* @return Parsed Node.
*/
private STNode parseMappingMatchPattern() {
startContext(ParserRuleContext.MAPPING_MATCH_PATTERN);
STNode openBraceToken = parseOpenBrace();
STNode fieldMatchPatterns = parseFieldMatchPatternList();
STNode closeBraceToken = parseCloseBrace();
endContext();
return STNodeFactory.createMappingMatchPatternNode(openBraceToken, fieldMatchPatterns, closeBraceToken);
}
private STNode parseFieldMatchPatternList() {
List<STNode> fieldMatchPatterns = new ArrayList<>();
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
return STNodeFactory.createEmptyNodeList();
}
fieldMatchPatterns.add(fieldMatchPatternMember);
if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
invalidateExtraFieldMatchPatterns(fieldMatchPatterns);
return STNodeFactory.createNodeList(fieldMatchPatterns);
}
return parseFieldMatchPatternList(fieldMatchPatterns);
}
private STNode parseFieldMatchPatternList(List<STNode> fieldMatchPatterns) {
while (!isEndOfMappingMatchPattern()) {
STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();
if (fieldMatchPatternRhs == null) {
break;
}
fieldMatchPatterns.add(fieldMatchPatternRhs);
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
fieldMatchPatternMember = createMissingFieldMatchPattern();
}
fieldMatchPatterns.add(fieldMatchPatternMember);
if (fieldMatchPatternMember.kind == SyntaxKind.REST_MATCH_PATTERN) {
invalidateExtraFieldMatchPatterns(fieldMatchPatterns);
break;
}
}
return STNodeFactory.createNodeList(fieldMatchPatterns);
}
private STNode createMissingFieldMatchPattern() {
STNode fieldName = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode colon = SyntaxErrors.createMissingToken(SyntaxKind.COLON_TOKEN);
STNode identifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode matchPattern = STNodeFactory.createSimpleNameReferenceNode(identifier);
STNode fieldMatchPatternMember = STNodeFactory.createFieldMatchPatternNode(fieldName, colon, matchPattern);
fieldMatchPatternMember = SyntaxErrors.addDiagnostic(fieldMatchPatternMember,
DiagnosticErrorCode.ERROR_MISSING_FIELD_MATCH_PATTERN_MEMBER);
return fieldMatchPatternMember;
}
/**
* Parse and invalidate all field match pattern members after a rest-match-pattern.
*
* @param fieldMatchPatterns field-match-patterns list
*/
private void invalidateExtraFieldMatchPatterns(List<STNode> fieldMatchPatterns) {
while (!isEndOfMappingMatchPattern()) {
STNode fieldMatchPatternRhs = parseFieldMatchPatternRhs();
if (fieldMatchPatternRhs == null) {
break;
}
STNode fieldMatchPatternMember = parseFieldMatchPatternMember();
if (fieldMatchPatternMember == null) {
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs,
DiagnosticErrorCode.ERROR_INVALID_TOKEN, ((STToken) fieldMatchPatternRhs).text());
} else {
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternRhs, null);
updateLastNodeInListWithInvalidNode(fieldMatchPatterns, fieldMatchPatternMember,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_AFTER_REST_MATCH_PATTERN);
}
}
}
private STNode parseFieldMatchPatternMember() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
return parseFieldMatchPattern();
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.FIELD_MATCH_PATTERNS_START);
return parseFieldMatchPatternMember();
}
}
/**
* Parse filed match pattern.
* <p>
* field-match-pattern := field-name : match-pattern
* </p>
*
* @return Parsed field match pattern node
*/
public STNode parseFieldMatchPattern() {
STNode fieldNameNode = parseVariableName();
STNode colonToken = parseColon();
STNode matchPattern = parseMatchPattern();
return STNodeFactory.createFieldMatchPatternNode(fieldNameNode, colonToken, matchPattern);
}
public boolean isEndOfMappingMatchPattern() {
switch (peek().kind) {
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseFieldMatchPatternRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
case EOF_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.FIELD_MATCH_PATTERN_MEMBER_RHS);
return parseFieldMatchPatternRhs();
}
}
private STNode parseErrorMatchPatternOrConsPattern(STNode typeRefOrConstExpr) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
return parseErrorMatchPattern(errorKeyword, typeRefOrConstExpr);
default:
if (isMatchPatternEnd(peek().kind)) {
return typeRefOrConstExpr;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_OR_CONST_PATTERN);
return parseErrorMatchPatternOrConsPattern(typeRefOrConstExpr);
}
}
private boolean isMatchPatternEnd(SyntaxKind tokenKind) {
switch (tokenKind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_PAREN_TOKEN:
case PIPE_TOKEN:
case IF_KEYWORD:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse functional match pattern.
* <p>
* error-match-pattern := error [error-type-reference] ( error-arg-list-match-pattern )
* error-arg-list-match-pattern :=
* error-message-match-pattern [, error-cause-match-pattern] [, error-field-match-patterns]
* | [error-field-match-patterns]
* error-message-match-pattern := simple-match-pattern
* error-cause-match-pattern := simple-match-pattern | error-match-pattern
* simple-match-pattern :=
* wildcard-match-pattern
* | const-pattern
* | var variable-name
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* </p>
*
* @return Parsed functional match pattern node.
*/
private STNode parseErrorMatchPattern() {
startContext(ParserRuleContext.ERROR_MATCH_PATTERN);
STNode errorKeyword = consume();
return parseErrorMatchPattern(errorKeyword);
}
private STNode parseErrorMatchPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_MATCH_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorMatchPattern(errorKeyword);
}
return parseErrorMatchPattern(errorKeyword, typeRef);
}
private STNode parseErrorMatchPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesisToken = parseOpenParenthesis();
STNode argListMatchPatternNode = parseErrorArgListMatchPatterns();
STNode closeParenthesisToken = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorMatchPatternNode(errorKeyword, typeRef, openParenthesisToken,
argListMatchPatternNode, closeParenthesisToken);
}
private STNode parseErrorArgListMatchPatterns() {
List<STNode> argListMatchPatterns = new ArrayList<>();
if (isEndOfErrorFieldMatchPatterns()) {
return STNodeFactory.createNodeList(argListMatchPatterns);
}
startContext(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_FIRST_ARG);
STNode firstArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_ARG_LIST_MATCH_PATTERN_START);
endContext();
if (isSimpleMatchPattern(firstArg.kind)) {
argListMatchPatterns.add(firstArg);
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_END);
if (argEnd != null) {
STNode secondArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_MESSAGE_MATCH_PATTERN_RHS);
if (isValidSecondArgMatchPattern(secondArg.kind)) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(secondArg);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, secondArg,
DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
}
}
} else {
if (firstArg.kind != SyntaxKind.NAMED_ARG_MATCH_PATTERN &&
firstArg.kind != SyntaxKind.REST_MATCH_PATTERN) {
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED);
} else {
argListMatchPatterns.add(firstArg);
}
}
parseErrorFieldMatchPatterns(argListMatchPatterns);
return STNodeFactory.createNodeList(argListMatchPatterns);
}
private boolean isSimpleMatchPattern(SyntaxKind matchPatternKind) {
switch (matchPatternKind) {
case IDENTIFIER_TOKEN:
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case NULL_LITERAL:
case NIL_LITERAL:
case BOOLEAN_LITERAL:
case TYPED_BINDING_PATTERN:
case UNARY_EXPRESSION:
return true;
default:
return false;
}
}
private boolean isValidSecondArgMatchPattern(SyntaxKind syntaxKind) {
switch (syntaxKind) {
case ERROR_MATCH_PATTERN:
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
return true;
default:
if (isSimpleMatchPattern(syntaxKind)) {
return true;
}
return false;
}
}
/**
* Parse error field match patterns.
* error-field-match-patterns :=
* named-arg-match-pattern (, named-arg-match-pattern)* [, rest-match-pattern]
* | rest-match-pattern
* named-arg-match-pattern := arg-name = match-pattern
* @param argListMatchPatterns
*/
private void parseErrorFieldMatchPatterns(List<STNode> argListMatchPatterns) {
SyntaxKind lastValidArgKind = SyntaxKind.NAMED_ARG_MATCH_PATTERN;
while (!isEndOfErrorFieldMatchPatterns()) {
STNode argEnd = parseErrorArgListMatchPatternEnd(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN_RHS);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListMatchPattern(ParserRuleContext.ERROR_FIELD_MATCH_PATTERN);
DiagnosticErrorCode errorCode = validateErrorFieldMatchPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListMatchPatterns.add(argEnd);
argListMatchPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListMatchPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListMatchPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListMatchPatterns, currentArg, errorCode);
}
}
}
private boolean isEndOfErrorFieldMatchPatterns() {
return isEndOfErrorFieldBindingPatterns();
}
private STNode parseErrorArgListMatchPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgListMatchPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListMatchPattern(ParserRuleContext context) {
STToken nextToken = peek();
if (isPredeclaredIdentifier(nextToken.kind)) {
return parseNamedArgOrSimpleMatchPattern();
}
switch (nextToken.kind) {
case ELLIPSIS_TOKEN:
return parseRestMatchPattern();
case OPEN_PAREN_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case PLUS_TOKEN:
case MINUS_TOKEN:
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseMatchPattern();
case VAR_KEYWORD:
STNode varType = createBuiltinSimpleNameReference(consume());
STNode variableName = createCaptureOrWildcardBP(parseVariableName());
return STNodeFactory.createTypedBindingPatternNode(varType, variableName);
case CLOSE_PAREN_TOKEN:
return SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_MATCH_PATTERN);
default:
recover(nextToken, context);
return parseErrorArgListMatchPattern(context);
}
}
private STNode parseNamedArgOrSimpleMatchPattern() {
STNode constRefExpr = parseQualifiedIdentifier(ParserRuleContext.MATCH_PATTERN);
if (constRefExpr.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || peek().kind != SyntaxKind.EQUAL_TOKEN) {
return constRefExpr;
}
return parseNamedArgMatchPattern(((STSimpleNameReferenceNode) constRefExpr).name);
}
/**
* Parses the next named arg match pattern.
* <br/>
* <code>named-arg-match-pattern := arg-name = match-pattern</code>
* <br/>
* <br/>
*
* @return arg match pattern list node added the new arg match pattern
*/
private STNode parseNamedArgMatchPattern(STNode identifier) {
startContext(ParserRuleContext.NAMED_ARG_MATCH_PATTERN);
STNode equalToken = parseAssignOp();
STNode matchPattern = parseMatchPattern();
endContext();
return STNodeFactory.createNamedArgMatchPatternNode(identifier, equalToken, matchPattern);
}
private DiagnosticErrorCode validateErrorFieldMatchPatternOrder(SyntaxKind prevArgKind, SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_MATCH_PATTERN:
case REST_MATCH_PATTERN:
if (prevArgKind == SyntaxKind.REST_MATCH_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
default:
return DiagnosticErrorCode.ERROR_MATCH_PATTERN_NOT_ALLOWED;
}
}
/**
* Parse markdown documentation.
*
* @return markdown documentation node
*/
private STNode parseMarkdownDocumentation() {
List<STNode> markdownDocLineList = new ArrayList<>();
STToken nextToken = peek();
while (nextToken.kind == SyntaxKind.DOCUMENTATION_STRING) {
STToken documentationString = consume();
STNode parsedDocLines = parseDocumentationString(documentationString);
appendParsedDocumentationLines(markdownDocLineList, parsedDocLines);
nextToken = peek();
}
STNode markdownDocLines = STNodeFactory.createNodeList(markdownDocLineList);
return STNodeFactory.createMarkdownDocumentationNode(markdownDocLines);
}
/**
* Parse documentation string.
*
* @return markdown documentation line list node
*/
private STNode parseDocumentationString(STToken documentationStringToken) {
List<STNode> leadingTriviaList = getLeadingTriviaList(documentationStringToken.leadingMinutiae());
Collection<STNodeDiagnostic> diagnostics = new ArrayList<>((documentationStringToken.diagnostics()));
CharReader charReader = CharReader.from(documentationStringToken.text());
DocumentationLexer documentationLexer = new DocumentationLexer(charReader, leadingTriviaList, diagnostics);
AbstractTokenReader tokenReader = new TokenReader(documentationLexer);
DocumentationParser documentationParser = new DocumentationParser(tokenReader);
return documentationParser.parse();
}
private List<STNode> getLeadingTriviaList(STNode leadingMinutiaeNode) {
List<STNode> leadingTriviaList = new ArrayList<>();
int bucketCount = leadingMinutiaeNode.bucketCount();
for (int i = 0; i < bucketCount; i++) {
leadingTriviaList.add(leadingMinutiaeNode.childInBucket(i));
}
return leadingTriviaList;
}
private void appendParsedDocumentationLines(List<STNode> markdownDocLineList, STNode parsedDocLines) {
int bucketCount = parsedDocLines.bucketCount();
for (int i = 0; i < bucketCount; i++) {
STNode markdownDocLine = parsedDocLines.childInBucket(i);
markdownDocLineList.add(markdownDocLine);
}
}
/**
* Parse any statement that starts with a token that has ambiguity between being
* a type-desc or an expression.
*
* @param annots Annotations
* @param qualifiers
* @return Statement node
*/
private STNode parseStmtStartsWithTypeOrExpr(STNode annots, List<STNode> qualifiers) {
startContext(ParserRuleContext.AMBIGUOUS_STMT);
STNode typeOrExpr = parseTypedBindingPatternOrExpr(qualifiers, true);
return parseStmtStartsWithTypedBPOrExprRhs(annots, typeOrExpr);
}
private STNode parseStmtStartsWithTypedBPOrExprRhs(STNode annots, STNode typedBindingPatternOrExpr) {
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
List<STNode> varDeclQualifiers = new ArrayList<>();
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseVarDeclRhs(annots, varDeclQualifiers, typedBindingPatternOrExpr, false);
}
STNode expr = getExpression(typedBindingPatternOrExpr);
expr = getExpression(parseExpressionRhs(DEFAULT_OP_PRECEDENCE, expr, false, true));
return parseStatementStartWithExprRhs(expr);
}
private STNode parseTypedBindingPatternOrExpr(boolean allowAssignment) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPatternOrExpr(typeDescQualifiers, allowAssignment);
}
private STNode parseTypedBindingPatternOrExpr(List<STNode> qualifiers, boolean allowAssignment) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
if (isPredeclaredIdentifier(nextToken.kind)) {
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseTypedBPOrExprStartsWithOpenParenthesis();
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrTypedBPWithFuncType(qualifiers);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypedBindingPatternOrExprRhs(basicLiteral, allowAssignment);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypedBindingPattern(qualifiers, ParserRuleContext.VAR_DECL_STMT);
}
}
/**
* Parse the component after the ambiguous starting node. Ambiguous node could be either an expr
* or a type-desc. The component followed by this ambiguous node could be the binding-pattern or
* the expression-rhs.
*
* @param typeOrExpr Type desc or the expression
* @param allowAssignment Flag indicating whether to allow assignment. i.e.: whether this is a
* valid lvalue expression
* @return Typed-binding-pattern node or an expression node
*/
private STNode parseTypedBindingPatternOrExprRhs(STNode typeOrExpr, boolean allowAssignment) {
STToken nextToken = peek();
switch (nextToken.kind) {
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipeOrAndToken = parseBinaryOperator();
STNode rhsTypedBPOrExpr = parseTypedBindingPatternOrExpr(allowAssignment);
if (rhsTypedBPOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STTypedBindingPatternNode typedBP = (STTypedBindingPatternNode) rhsTypedBPOrExpr;
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
STNode newTypeDesc = mergeTypes(typeOrExpr, pipeOrAndToken, typedBP.typeDescriptor);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, typedBP.bindingPattern);
}
if (peek().kind == SyntaxKind.EQUAL_TOKEN) {
return createCaptureBPWithMissingVarName(typeOrExpr, pipeOrAndToken, rhsTypedBPOrExpr);
}
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,
pipeOrAndToken, rhsTypedBPOrExpr);
case SEMICOLON_TOKEN:
if (isDefiniteExpr(typeOrExpr.kind)) {
return typeOrExpr;
}
if (isDefiniteTypeDesc(typeOrExpr.kind) || !isAllBasicLiterals(typeOrExpr)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
if (isAmbiguous(typeOrExpr) || isDefiniteTypeDesc(typeOrExpr.kind)) {
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
}
return typeOrExpr;
case EQUAL_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, allowAssignment,
ParserRuleContext.AMBIGUOUS_STMT);
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeOrExpr);
return parseTypeBindingPatternStartsWithAmbiguousNode(typeDesc);
default:
if (isCompoundAssignment(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return typeOrExpr;
}
STToken token = peek();
SyntaxKind typeOrExprKind = typeOrExpr.kind;
if (typeOrExprKind == SyntaxKind.QUALIFIED_NAME_REFERENCE ||
typeOrExprKind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
recover(token, ParserRuleContext.BINDING_PATTERN_OR_VAR_REF_RHS);
} else {
recover(token, ParserRuleContext.BINDING_PATTERN_OR_EXPR_RHS);
}
return parseTypedBindingPatternOrExprRhs(typeOrExpr, allowAssignment);
}
}
private STNode createCaptureBPWithMissingVarName(STNode lhsType, STNode separatorToken, STNode rhsType) {
lhsType = getTypeDescFromExpr(lhsType);
rhsType = getTypeDescFromExpr(rhsType);
STNode newTypeDesc = mergeTypes(lhsType, separatorToken, rhsType);
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
ParserRuleContext.VARIABLE_NAME);
STNode captureBP = STNodeFactory.createCaptureBindingPatternNode(identifier);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, captureBP);
}
private STNode parseTypeBindingPatternStartsWithAmbiguousNode(STNode typeDesc) {
typeDesc = parseComplexTypeDescriptor(typeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
return parseTypedBindingPatternTypeRhs(typeDesc, ParserRuleContext.VAR_DECL_STMT);
}
private STNode parseTypedBPOrExprStartsWithOpenParenthesis() {
STNode exprOrTypeDesc = parseTypedDescOrExprStartsWithOpenParenthesis();
if (isDefiniteTypeDesc(exprOrTypeDesc.kind)) {
return parseTypeBindingPatternStartsWithAmbiguousNode(exprOrTypeDesc);
}
return parseTypedBindingPatternOrExprRhs(exprOrTypeDesc, false);
}
private boolean isDefiniteTypeDesc(SyntaxKind kind) {
return kind.compareTo(SyntaxKind.RECORD_TYPE_DESC) >= 0 && kind.compareTo(SyntaxKind.FUTURE_TYPE_DESC) <= 0;
}
private boolean isDefiniteExpr(SyntaxKind kind) {
if (kind == SyntaxKind.QUALIFIED_NAME_REFERENCE || kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return false;
}
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 && kind.compareTo(SyntaxKind.ERROR_CONSTRUCTOR) <= 0;
}
private boolean isDefiniteAction(SyntaxKind kind) {
return kind.compareTo(SyntaxKind.REMOTE_METHOD_CALL_ACTION) >= 0 &&
kind.compareTo(SyntaxKind.COMMIT_ACTION) <= 0;
}
/**
* Parse type or expression that starts with open parenthesis. Possible options are:
* 1) () - nil type-desc or nil-literal
* 2) (T) - Parenthesized type-desc
* 3) (expr) - Parenthesized expression
* 4) (param, param, ..) - Anon function params
*
* @return Type-desc or expression node
*/
private STNode parseTypedDescOrExprStartsWithOpenParenthesis() {
STNode openParen = parseOpenParenthesis();
STToken nextToken = peek();
if (nextToken.kind == SyntaxKind.CLOSE_PAREN_TOKEN) {
STNode closeParen = parseCloseParenthesis();
return parseTypeOrExprStartWithEmptyParenthesis(openParen, closeParen);
}
STNode typeOrExpr = parseTypeDescOrExpr();
if (isAction(typeOrExpr)) {
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createBracedExpressionNode(SyntaxKind.BRACED_ACTION, openParen, typeOrExpr,
closeParen);
}
if (isExpression(typeOrExpr.kind)) {
startContext(ParserRuleContext.BRACED_EXPR_OR_ANON_FUNC_PARAMS);
return parseBracedExprOrAnonFuncParamRhs(openParen, typeOrExpr, false);
}
STNode typeDescNode = getTypeDescFromExpr(typeOrExpr);
typeDescNode = parseComplexTypeDescriptor(typeDescNode, ParserRuleContext.TYPE_DESC_IN_PARENTHESIS, false);
STNode closeParen = parseCloseParenthesis();
return STNodeFactory.createParenthesisedTypeDescriptorNode(openParen, typeDescNode, closeParen);
}
/**
* Parse type-desc or expression. This method does not handle binding patterns.
*
* @return Type-desc node or expression node
*/
private STNode parseTypeDescOrExpr() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypeDescOrExpr(typeDescQualifiers);
}
private STNode parseTypeDescOrExpr(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
STNode typeOrExpr;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTypedDescOrExprStartsWithOpenParenthesis();
break;
case FUNCTION_KEYWORD:
typeOrExpr = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
break;
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseQualifiedIdentifier(ParserRuleContext.TYPE_NAME_OR_VAR_NAME);
return parseTypeDescOrExprRhs(typeOrExpr);
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
typeOrExpr = parseTupleTypeDescOrListConstructor(STNodeFactory.createEmptyNodeList());
break;
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case STRING_LITERAL_TOKEN:
case NULL_KEYWORD:
case TRUE_KEYWORD:
case FALSE_KEYWORD:
case DECIMAL_FLOATING_POINT_LITERAL_TOKEN:
case HEX_FLOATING_POINT_LITERAL_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode basicLiteral = parseBasicLiteral();
return parseTypeDescOrExprRhs(basicLiteral);
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseActionOrExpressionInLhs(STNodeFactory.createEmptyNodeList());
}
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
}
if (isDefiniteTypeDesc(typeOrExpr.kind)) {
return parseComplexTypeDescriptor(typeOrExpr, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseTypeDescOrExprRhs(typeOrExpr);
}
private boolean isExpression(SyntaxKind kind) {
switch (kind) {
case NUMERIC_LITERAL:
case STRING_LITERAL_TOKEN:
case NIL_LITERAL:
case NULL_LITERAL:
case BOOLEAN_LITERAL:
return true;
default:
return kind.compareTo(SyntaxKind.BINARY_EXPRESSION) >= 0 &&
kind.compareTo(SyntaxKind.XML_ATOMIC_NAME_PATTERN) <= 0;
}
}
/**
* Parse statement that starts with an empty parenthesis. Empty parenthesis can be
* 1) Nil literal
* 2) Nil type-desc
* 3) Anon-function params
*
* @param openParen Open parenthesis
* @param closeParen Close parenthesis
* @return Parsed node
*/
private STNode parseTypeOrExprStartWithEmptyParenthesis(STNode openParen, STNode closeParen) {
STToken nextToken = peek();
switch (nextToken.kind) {
case RIGHT_DOUBLE_ARROW_TOKEN:
STNode params = STNodeFactory.createEmptyNodeList();
STNode anonFuncParam =
STNodeFactory.createImplicitAnonymousFunctionParameters(openParen, params, closeParen);
return parseImplicitAnonFunc(anonFuncParam, false);
default:
return STNodeFactory.createNilLiteralNode(openParen, closeParen);
}
}
private STNode parseAnonFuncExprOrTypedBPWithFuncType(List<STNode> qualifiers) {
STNode exprOrTypeDesc = parseAnonFuncExprOrFuncTypeDesc(qualifiers);
if (isAction(exprOrTypeDesc) || isExpression(exprOrTypeDesc.kind)) {
return exprOrTypeDesc;
}
return parseTypedBindingPatternTypeRhs(exprOrTypeDesc, ParserRuleContext.VAR_DECL_STMT);
}
/**
* Parse anon-func-expr or function-type-desc, by resolving the ambiguity.
*
* @param qualifiers Preceding qualifiers
* @return Anon-func-expr or function-type-desc
*/
private STNode parseAnonFuncExprOrFuncTypeDesc(List<STNode> qualifiers) {
startContext(ParserRuleContext.FUNC_TYPE_DESC_OR_ANON_FUNC);
STNode qualifierList;
STNode functionKeyword = parseFunctionKeyword();
STNode funcSignature;
if (peek().kind == SyntaxKind.OPEN_PAREN_TOKEN) {
funcSignature = parseFuncSignature(true);
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, true);
qualifierList = nodes[0];
functionKeyword = nodes[1];
endContext();
return parseAnonFuncExprOrFuncTypeDesc(qualifierList, functionKeyword, funcSignature);
}
funcSignature = STNodeFactory.createEmptyNode();
STNode[] nodes = createFuncTypeQualNodeList(qualifiers, functionKeyword, false);
qualifierList = nodes[0];
functionKeyword = nodes[1];
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (getCurrentContext() != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
private STNode parseAnonFuncExprOrFuncTypeDesc(STNode qualifierList, STNode functionKeyword, STNode funcSignature) {
ParserRuleContext currentCtx = getCurrentContext();
switch (peek().kind) {
case OPEN_BRACE_TOKEN:
case RIGHT_DOUBLE_ARROW_TOKEN:
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.EXPRESSION_STATEMENT);
}
startContext(ParserRuleContext.ANON_FUNC_EXPRESSION);
funcSignature = validateAndGetFuncParams((STFunctionSignatureNode) funcSignature);
STNode funcBody = parseAnonFuncBody(false);
STNode annots = STNodeFactory.createEmptyNodeList();
STNode anonFunc = STNodeFactory.createExplicitAnonymousFunctionExpressionNode(annots, qualifierList,
functionKeyword, funcSignature, funcBody);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, anonFunc, false, true);
case IDENTIFIER_TOKEN:
default:
STNode funcTypeDesc = STNodeFactory.createFunctionTypeDescriptorNode(qualifierList, functionKeyword,
funcSignature);
if (currentCtx != ParserRuleContext.STMT_START_BRACKETED_LIST) {
switchContext(ParserRuleContext.VAR_DECL_STMT);
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN,
true);
}
return parseComplexTypeDescriptor(funcTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
}
private STNode parseTypeDescOrExprRhs(STNode typeOrExpr) {
STToken nextToken = peek();
STNode typeDesc;
switch (nextToken.kind) {
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
STToken nextNextToken = peek(2);
if (nextNextToken.kind == SyntaxKind.EQUAL_TOKEN) {
return typeOrExpr;
}
STNode pipeOrAndToken = parseBinaryOperator();
STNode rhsTypeDescOrExpr = parseTypeDescOrExpr();
if (isExpression(rhsTypeDescOrExpr.kind)) {
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, typeOrExpr,
pipeOrAndToken, rhsTypeDescOrExpr);
}
typeDesc = getTypeDescFromExpr(typeOrExpr);
rhsTypeDescOrExpr = getTypeDescFromExpr(rhsTypeDescOrExpr);
return mergeTypes(typeDesc, pipeOrAndToken, rhsTypeDescOrExpr);
case IDENTIFIER_TOKEN:
case QUESTION_MARK_TOKEN:
typeDesc = parseComplexTypeDescriptor(getTypeDescFromExpr(typeOrExpr),
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, false);
return typeDesc;
case SEMICOLON_TOKEN:
return getTypeDescFromExpr(typeOrExpr);
case EQUAL_TOKEN:
case CLOSE_PAREN_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
case COMMA_TOKEN:
return typeOrExpr;
case OPEN_BRACKET_TOKEN:
return parseTypedBindingPatternOrMemberAccess(typeOrExpr, false, true,
ParserRuleContext.AMBIGUOUS_STMT);
case ELLIPSIS_TOKEN:
STNode ellipsis = parseEllipsis();
typeOrExpr = getTypeDescFromExpr(typeOrExpr);
return STNodeFactory.createRestDescriptorNode(typeOrExpr, ellipsis);
default:
if (isCompoundAssignment(nextToken.kind)) {
return typeOrExpr;
}
if (isValidExprRhsStart(nextToken.kind, typeOrExpr.kind)) {
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, typeOrExpr, false, false, false, false);
}
recover(peek(), ParserRuleContext.TYPE_DESC_OR_EXPR_RHS);
return parseTypeDescOrExprRhs(typeOrExpr);
}
}
private boolean isAmbiguous(STNode node) {
switch (node.kind) {
case SIMPLE_NAME_REFERENCE:
case QUALIFIED_NAME_REFERENCE:
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
case BRACKETED_LIST:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case INDEXED_EXPRESSION:
STIndexedExpressionNode indexExpr = (STIndexedExpressionNode) node;
if (!isAmbiguous(indexExpr.containerExpression)) {
return false;
}
STNode keys = indexExpr.keyExpression;
for (int i = 0; i < keys.bucketCount(); i++) {
STNode item = keys.childInBucket(i);
if (item.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAmbiguous(item)) {
return false;
}
}
return true;
default:
return false;
}
}
private boolean isAllBasicLiterals(STNode node) {
switch (node.kind) {
case NIL_LITERAL:
case NULL_LITERAL:
case NUMERIC_LITERAL:
case STRING_LITERAL:
case BOOLEAN_LITERAL:
return true;
case BINARY_EXPRESSION:
STBinaryExpressionNode binaryExpr = (STBinaryExpressionNode) node;
if (binaryExpr.operator.kind != SyntaxKind.PIPE_TOKEN ||
binaryExpr.operator.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return false;
}
return isAmbiguous(binaryExpr.lhsExpr) && isAmbiguous(binaryExpr.rhsExpr);
case BRACED_EXPRESSION:
return isAmbiguous(((STBracedExpressionNode) node).expression);
case BRACKETED_LIST:
STAmbiguousCollectionNode list = (STAmbiguousCollectionNode) node;
for (STNode member : list.members) {
if (member.kind == SyntaxKind.COMMA_TOKEN) {
continue;
}
if (!isAllBasicLiterals(member)) {
return false;
}
}
return true;
case UNARY_EXPRESSION:
STUnaryExpressionNode unaryExpr = (STUnaryExpressionNode) node;
if (unaryExpr.unaryOperator.kind != SyntaxKind.PLUS_TOKEN &&
unaryExpr.unaryOperator.kind != SyntaxKind.MINUS_TOKEN) {
return false;
}
return isNumericLiteral(unaryExpr.expression);
default:
return false;
}
}
private boolean isNumericLiteral(STNode node) {
switch (node.kind) {
case NUMERIC_LITERAL:
return true;
default:
return false;
}
}
/**
* Parse binding-patterns.
* <p>
* <code>
* binding-pattern := capture-binding-pattern
* | wildcard-binding-pattern
* | list-binding-pattern
* | mapping-binding-pattern
* | functional-binding-pattern
* <br/><br/>
* <p>
* capture-binding-pattern := variable-name
* variable-name := identifier
* <br/><br/>
* <p>
* wildcard-binding-pattern := _
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* <p>
* mapping-binding-pattern := { field-binding-patterns }
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* <br/>
* rest-binding-pattern := ... variable-name
* <p>
* <br/><br/>
* functional-binding-pattern := functionally-constructible-type-reference ( arg-list-binding-pattern )
* <br/>
* arg-list-binding-pattern := positional-arg-binding-patterns [, other-arg-binding-patterns]
* | other-arg-binding-patterns
* <br/>
* positional-arg-binding-patterns := positional-arg-binding-pattern (, positional-arg-binding-pattern)*
* <br/>
* positional-arg-binding-pattern := binding-pattern
* <br/>
* other-arg-binding-patterns := named-arg-binding-patterns [, rest-binding-pattern]
* | [rest-binding-pattern]
* <br/>
* named-arg-binding-patterns := named-arg-binding-pattern (, named-arg-binding-pattern)*
* <br/>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return binding-pattern node
*/
private STNode parseBindingPattern() {
switch (peek().kind) {
case OPEN_BRACKET_TOKEN:
return parseListBindingPattern();
case IDENTIFIER_TOKEN:
return parseBindingPatternStartsWithIdentifier();
case OPEN_BRACE_TOKEN:
return parseMappingBindingPattern();
case ERROR_KEYWORD:
return parseErrorBindingPattern();
default:
recover(peek(), ParserRuleContext.BINDING_PATTERN);
return parseBindingPattern();
}
}
private STNode parseBindingPatternStartsWithIdentifier() {
STNode argNameOrBindingPattern =
parseQualifiedIdentifier(ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
STToken secondToken = peek();
if (secondToken.kind == SyntaxKind.OPEN_PAREN_TOKEN) {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.ERROR_KEYWORD,
ParserRuleContext.ERROR_KEYWORD);
return parseErrorBindingPattern(errorKeyword, argNameOrBindingPattern);
}
if (argNameOrBindingPattern.kind != SyntaxKind.SIMPLE_NAME_REFERENCE) {
STNode identifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
ParserRuleContext.BINDING_PATTERN_STARTING_IDENTIFIER);
identifier = SyntaxErrors.cloneWithLeadingInvalidNodeMinutiae(identifier, argNameOrBindingPattern);
return createCaptureOrWildcardBP(identifier);
}
return createCaptureOrWildcardBP(((STSimpleNameReferenceNode) argNameOrBindingPattern).name);
}
private STNode createCaptureOrWildcardBP(STNode varName) {
STNode bindingPattern;
if (isWildcardBP(varName)) {
bindingPattern = getWildcardBindingPattern(varName);
} else {
bindingPattern = STNodeFactory.createCaptureBindingPatternNode(varName);
}
return bindingPattern;
}
/**
* Parse list-binding-patterns.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return list-binding-pattern node
*/
private STNode parseListBindingPattern() {
startContext(ParserRuleContext.LIST_BINDING_PATTERN);
STNode openBracket = parseOpenBracket();
List<STNode> bindingPatternsList = new ArrayList<>();
STNode listBindingPattern = parseListBindingPattern(openBracket, bindingPatternsList);
endContext();
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, List<STNode> bindingPatternsList) {
if (isEndOfListBindingPattern(peek().kind) && bindingPatternsList.size() == 0) {
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatternsList);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
STNode listBindingPatternMember = parseListBindingPatternMember();
bindingPatternsList.add(listBindingPatternMember);
STNode listBindingPattern = parseListBindingPattern(openBracket, listBindingPatternMember, bindingPatternsList);
return listBindingPattern;
}
private STNode parseListBindingPattern(STNode openBracket, STNode firstMember, List<STNode> bindingPatterns) {
STNode member = firstMember;
STToken token = peek();
STNode listBindingPatternRhs = null;
while (!isEndOfListBindingPattern(token.kind) && member.kind != SyntaxKind.REST_BINDING_PATTERN) {
listBindingPatternRhs = parseListBindingPatternMemberRhs();
if (listBindingPatternRhs == null) {
break;
}
bindingPatterns.add(listBindingPatternRhs);
member = parseListBindingPatternMember();
bindingPatterns.add(member);
token = peek();
}
STNode closeBracket = parseCloseBracket();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
return STNodeFactory.createListBindingPatternNode(openBracket, bindingPatternsNode, closeBracket);
}
private STNode parseListBindingPatternMemberRhs() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER_END);
return parseListBindingPatternMemberRhs();
}
}
private boolean isEndOfListBindingPattern(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case CLOSE_BRACKET_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse list-binding-pattern member.
* <p>
* <code>
* list-binding-pattern := [ list-member-binding-patterns ]
* <br/>
* list-member-binding-patterns := binding-pattern (, binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* </code>
*
* @return List binding pattern member
*/
private STNode parseListBindingPatternMember() {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case OPEN_BRACKET_TOKEN:
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
default:
recover(peek(), ParserRuleContext.LIST_BINDING_PATTERN_MEMBER);
return parseListBindingPatternMember();
}
}
/**
* Parse rest binding pattern.
* <p>
* <code>
* rest-binding-pattern := ... variable-name
* </code>
*
* @return Rest binding pattern node
*/
private STNode parseRestBindingPattern() {
startContext(ParserRuleContext.REST_BINDING_PATTERN);
STNode ellipsis = parseEllipsis();
STNode varName = parseVariableName();
endContext();
STSimpleNameReferenceNode simpleNameReferenceNode =
(STSimpleNameReferenceNode) STNodeFactory.createSimpleNameReferenceNode(varName);
return STNodeFactory.createRestBindingPatternNode(ellipsis, simpleNameReferenceNode);
}
/**
* Parse Typed-binding-pattern.
* <p>
* <code>
* typed-binding-pattern := inferable-type-descriptor binding-pattern
* <br/><br/>
* inferable-type-descriptor := type-descriptor | var
* </code>
*
* @return Typed binding pattern node
*/
private STNode parseTypedBindingPattern(ParserRuleContext context) {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseTypedBindingPattern(typeDescQualifiers, context);
}
private STNode parseTypedBindingPattern(List<STNode> qualifiers, ParserRuleContext context) {
STNode typeDesc = parseTypeDescriptor(qualifiers,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true, false, TypePrecedence.DEFAULT);
STNode typeBindingPattern = parseTypedBindingPatternTypeRhs(typeDesc, context);
return typeBindingPattern;
}
/**
* Parse mapping-binding-patterns.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPattern() {
startContext(ParserRuleContext.MAPPING_BINDING_PATTERN);
STNode openBrace = parseOpenBrace();
STToken token = peek();
if (isEndOfMappingBindingPattern(token.kind)) {
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createEmptyNodeList();
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
List<STNode> bindingPatterns = new ArrayList<>();
STNode prevMember = parseMappingBindingPatternMember();
if (prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
return parseMappingBindingPattern(openBrace, bindingPatterns, prevMember);
}
private STNode parseMappingBindingPattern(STNode openBrace, List<STNode> bindingPatterns, STNode prevMember) {
STToken token = peek();
STNode mappingBindingPatternRhs = null;
while (!isEndOfMappingBindingPattern(token.kind) && prevMember.kind != SyntaxKind.REST_BINDING_PATTERN) {
mappingBindingPatternRhs = parseMappingBindingPatternEnd();
if (mappingBindingPatternRhs == null) {
break;
}
bindingPatterns.add(mappingBindingPatternRhs);
prevMember = parseMappingBindingPatternMember();
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
break;
}
bindingPatterns.add(prevMember);
token = peek();
}
if (prevMember.kind == SyntaxKind.REST_BINDING_PATTERN) {
bindingPatterns.add(prevMember);
}
STNode closeBrace = parseCloseBrace();
STNode bindingPatternsNode = STNodeFactory.createNodeList(bindingPatterns);
endContext();
return STNodeFactory.createMappingBindingPatternNode(openBrace, bindingPatternsNode, closeBrace);
}
/**
* Parse mapping-binding-pattern entry.
* <p>
* <code>
* mapping-binding-pattern := { field-binding-patterns }
* <br/><br/>
* field-binding-patterns := field-binding-pattern (, field-binding-pattern)* [, rest-binding-pattern]
* | [ rest-binding-pattern ]
* <br/><br/>
* field-binding-pattern := field-name : binding-pattern
* | variable-name
* </code>
*
* @return mapping-binding-pattern node
*/
private STNode parseMappingBindingPatternMember() {
STToken token = peek();
switch (token.kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
default:
return parseFieldBindingPattern();
}
}
private STNode parseMappingBindingPatternEnd() {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACE_TOKEN:
return null;
default:
recover(nextToken, ParserRuleContext.MAPPING_BINDING_PATTERN_END);
return parseMappingBindingPatternEnd();
}
}
/**
* Parse field-binding-pattern.
* <code>field-binding-pattern := field-name : binding-pattern | varname</code>
*
* @return field-binding-pattern node
*/
private STNode parseFieldBindingPattern() {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
STNode identifier = parseIdentifier(ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
STNode simpleNameReference = STNodeFactory.createSimpleNameReferenceNode(identifier);
return parseFieldBindingPattern(simpleNameReference);
default:
recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_NAME);
return parseFieldBindingPattern();
}
}
private STNode parseFieldBindingPattern(STNode simpleNameReference) {
STToken nextToken = peek();
switch (nextToken.kind) {
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
return STNodeFactory.createFieldBindingPatternVarnameNode(simpleNameReference);
case COLON_TOKEN:
STNode colon = parseColon();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createFieldBindingPatternFullNode(simpleNameReference, colon, bindingPattern);
default:
recover(nextToken, ParserRuleContext.FIELD_BINDING_PATTERN_END);
return parseFieldBindingPattern(simpleNameReference);
}
}
private boolean isEndOfMappingBindingPattern(SyntaxKind nextTokenKind) {
return nextTokenKind == SyntaxKind.CLOSE_BRACE_TOKEN || isEndOfModuleLevelNode(1);
}
private STNode parseErrorTypeDescOrErrorBP(STNode annots) {
STToken nextNextToken = peek(2);
switch (nextNextToken.kind) {
case OPEN_PAREN_TOKEN:
return parseAsErrorBindingPattern();
case LT_TOKEN:
return parseAsErrorTypeDesc(annots);
case IDENTIFIER_TOKEN:
SyntaxKind nextNextNextTokenKind = peek(3).kind;
if (nextNextNextTokenKind == SyntaxKind.COLON_TOKEN ||
nextNextNextTokenKind == SyntaxKind.OPEN_PAREN_TOKEN) {
return parseAsErrorBindingPattern();
}
default:
return parseAsErrorTypeDesc(annots);
}
}
private STNode parseAsErrorBindingPattern() {
startContext(ParserRuleContext.ASSIGNMENT_STMT);
return parseAssignmentStmtRhs(parseErrorBindingPattern());
}
private STNode parseAsErrorTypeDesc(STNode annots) {
STNode finalKeyword = STNodeFactory.createEmptyNode();
return parseVariableDecl(getAnnotations(annots), finalKeyword);
}
/**
* Parse error binding pattern node.
* <p>
* <code>error-binding-pattern := error [error-type-reference] ( error-arg-list-binding-pattern )</code>
* <br/><br/>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* named-arg-binding-pattern := arg-name = binding-pattern
*
* @return Error binding pattern node.
*/
private STNode parseErrorBindingPattern() {
startContext(ParserRuleContext.ERROR_BINDING_PATTERN);
STNode errorKeyword = parseErrorKeyword();
return parseErrorBindingPattern(errorKeyword);
}
private STNode parseErrorBindingPattern(STNode errorKeyword) {
STToken nextToken = peek();
STNode typeRef;
switch (nextToken.kind) {
case OPEN_PAREN_TOKEN:
typeRef = STNodeFactory.createEmptyNode();
break;
default:
if (isPredeclaredIdentifier(nextToken.kind)) {
typeRef = parseTypeReference();
break;
}
recover(peek(), ParserRuleContext.ERROR_BINDING_PATTERN_ERROR_KEYWORD_RHS);
return parseErrorBindingPattern(errorKeyword);
}
return parseErrorBindingPattern(errorKeyword, typeRef);
}
private STNode parseErrorBindingPattern(STNode errorKeyword, STNode typeRef) {
STNode openParenthesis = parseOpenParenthesis();
STNode argListBindingPatterns = parseErrorArgListBindingPatterns();
STNode closeParenthesis = parseCloseParenthesis();
endContext();
return STNodeFactory.createErrorBindingPatternNode(errorKeyword, typeRef, openParenthesis,
argListBindingPatterns, closeParenthesis);
}
/**
* Parse error arg list binding pattern.
* <p>
* <code>
* error-arg-list-binding-pattern :=
* error-message-binding-pattern [, error-cause-binding-pattern] [, error-field-binding-patterns]
* | [error-field-binding-patterns]
* <br/><br/>
* <p>
* error-message-binding-pattern := simple-binding-pattern
* <br/><br/>
* <p>
* error-cause-binding-pattern := simple-binding-pattern | error-binding-pattern
* <br/><br/>
* <p>
* simple-binding-pattern := capture-binding-pattern | wildcard-binding-pattern
* <br/><br/>
* <p>
* error-field-binding-patterns :=
* named-arg-binding-pattern (, named-arg-binding-pattern)* [, rest-binding-pattern]
* | rest-binding-pattern
* <br/><br/>
* <p>
* named-arg-binding-pattern := arg-name = binding-pattern
* </code>
*
* @return Error arg list binding patterns.
*/
private STNode parseErrorArgListBindingPatterns() {
List<STNode> argListBindingPatterns = new ArrayList<>();
if (isEndOfErrorFieldBindingPatterns()) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
private STNode parseErrorArgListBindingPatterns(List<STNode> argListBindingPatterns) {
STNode firstArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_ARG_LIST_BINDING_PATTERN_START, true);
if (firstArg == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
switch (firstArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
case ERROR_BINDING_PATTERN:
STNode missingIdentifier = SyntaxErrors.createMissingToken(SyntaxKind.IDENTIFIER_TOKEN);
STNode missingErrorMsgBP = STNodeFactory.createCaptureBindingPatternNode(missingIdentifier);
missingErrorMsgBP = SyntaxErrors.addDiagnostic(missingErrorMsgBP,
DiagnosticErrorCode.ERROR_MISSING_ERROR_MESSAGE_BINDING_PATTERN);
STNode missingComma = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.COMMA_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_COMMA_TOKEN);
argListBindingPatterns.add(missingErrorMsgBP);
argListBindingPatterns.add(missingComma);
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(firstArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, firstArg.kind);
default:
addInvalidNodeToNextToken(firstArg, DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBindingPatterns(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsg(List<STNode> argListBindingPatterns) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_END);
if (argEnd == null) {
return STNodeFactory.createNodeList(argListBindingPatterns);
}
STNode secondArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_MESSAGE_BINDING_PATTERN_RHS, false);
assert secondArg != null;
switch (secondArg.kind) {
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case NAMED_ARG_BINDING_PATTERN:
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(secondArg);
return parseErrorArgListBPWithoutErrorMsgAndCause(argListBindingPatterns, secondArg.kind);
default:
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, secondArg,
DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED);
return parseErrorArgListBPWithoutErrorMsg(argListBindingPatterns);
}
}
private STNode parseErrorArgListBPWithoutErrorMsgAndCause(List<STNode> argListBindingPatterns,
SyntaxKind lastValidArgKind) {
while (!isEndOfErrorFieldBindingPatterns()) {
STNode argEnd = parseErrorArgsBindingPatternEnd(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN_END);
if (argEnd == null) {
break;
}
STNode currentArg = parseErrorArgListBindingPattern(ParserRuleContext.ERROR_FIELD_BINDING_PATTERN, false);
assert currentArg != null;
DiagnosticErrorCode errorCode = validateErrorFieldBindingPatternOrder(lastValidArgKind, currentArg.kind);
if (errorCode == null) {
argListBindingPatterns.add(argEnd);
argListBindingPatterns.add(currentArg);
lastValidArgKind = currentArg.kind;
} else if (argListBindingPatterns.size() == 0) {
addInvalidNodeToNextToken(argEnd, null);
addInvalidNodeToNextToken(currentArg, errorCode);
} else {
updateLastNodeInListWithInvalidNode(argListBindingPatterns, argEnd, null);
updateLastNodeInListWithInvalidNode(argListBindingPatterns, currentArg, errorCode);
}
}
return STNodeFactory.createNodeList(argListBindingPatterns);
}
private boolean isEndOfErrorFieldBindingPatterns() {
SyntaxKind nextTokenKind = peek().kind;
switch (nextTokenKind) {
case CLOSE_PAREN_TOKEN:
case EOF_TOKEN:
return true;
default:
return false;
}
}
private STNode parseErrorArgsBindingPatternEnd(ParserRuleContext currentCtx) {
switch (peek().kind) {
case COMMA_TOKEN:
return consume();
case CLOSE_PAREN_TOKEN:
return null;
default:
recover(peek(), currentCtx);
return parseErrorArgsBindingPatternEnd(currentCtx);
}
}
private STNode parseErrorArgListBindingPattern(ParserRuleContext context, boolean isFirstArg) {
switch (peek().kind) {
case ELLIPSIS_TOKEN:
return parseRestBindingPattern();
case IDENTIFIER_TOKEN:
STNode argNameOrSimpleBindingPattern = consume();
return parseNamedOrSimpleArgBindingPattern(argNameOrSimpleBindingPattern);
case OPEN_BRACKET_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
return parseBindingPattern();
case CLOSE_PAREN_TOKEN:
if (isFirstArg) {
return null;
}
default:
recover(peek(), context);
return parseErrorArgListBindingPattern(context, isFirstArg);
}
}
private STNode parseNamedOrSimpleArgBindingPattern(STNode argNameOrSimpleBindingPattern) {
STToken secondToken = peek();
switch (secondToken.kind) {
case EQUAL_TOKEN:
STNode equal = consume();
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createNamedArgBindingPatternNode(argNameOrSimpleBindingPattern,
equal, bindingPattern);
case COMMA_TOKEN:
case CLOSE_PAREN_TOKEN:
default:
return createCaptureOrWildcardBP(argNameOrSimpleBindingPattern);
}
}
private DiagnosticErrorCode validateErrorFieldBindingPatternOrder(SyntaxKind prevArgKind,
SyntaxKind currentArgKind) {
switch (currentArgKind) {
case NAMED_ARG_BINDING_PATTERN:
case REST_BINDING_PATTERN:
if (prevArgKind == SyntaxKind.REST_BINDING_PATTERN) {
return DiagnosticErrorCode.ERROR_REST_ARG_FOLLOWED_BY_ANOTHER_ARG;
}
return null;
case CAPTURE_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
case ERROR_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
default:
return DiagnosticErrorCode.ERROR_BINDING_PATTERN_NOT_ALLOWED;
}
}
/*
* This parses Typed binding patterns and deals with ambiguity between types,
* and binding patterns. An example is 'T[a]'.
* The ambiguity lies in between:
* 1) Array Type
* 2) List binding pattern
* 3) Member access expression.
*/
/**
* Parse the component after the type-desc, of a typed-binding-pattern.
*
* @param typeDesc Starting type-desc of the typed-binding-pattern
* @return Typed-binding pattern
*/
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context) {
return parseTypedBindingPatternTypeRhs(typeDesc, context, true);
}
private STNode parseTypedBindingPatternTypeRhs(STNode typeDesc, ParserRuleContext context, boolean isRoot) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode bindingPattern = parseBindingPattern();
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case OPEN_BRACKET_TOKEN:
STNode typedBindingPattern = parseTypedBindingPatternOrMemberAccess(typeDesc, true, true, context);
assert typedBindingPattern.kind == SyntaxKind.TYPED_BINDING_PATTERN;
return typedBindingPattern;
case CLOSE_PAREN_TOKEN:
case COMMA_TOKEN:
case CLOSE_BRACKET_TOKEN:
case CLOSE_BRACE_TOKEN:
if (!isRoot) {
return typeDesc;
}
default:
recover(nextToken, ParserRuleContext.TYPED_BINDING_PATTERN_TYPE_RHS);
return parseTypedBindingPatternTypeRhs(typeDesc, context, isRoot);
}
}
/**
* Parse typed-binding pattern with list, array-type-desc, or member-access-expr.
*
* @param typeDescOrExpr Type desc or the expression at the start
* @param isTypedBindingPattern Is this is a typed-binding-pattern. If this is `false`, then it's still ambiguous
* @return Parsed node
*/
private STNode parseTypedBindingPatternOrMemberAccess(STNode typeDescOrExpr, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
if (isBracketedListEnd(peek().kind)) {
return parseAsArrayTypeDesc(typeDescOrExpr, openBracket, STNodeFactory.createEmptyNode(), context);
}
STNode member = parseBracketedListMember(isTypedBindingPattern);
SyntaxKind currentNodeType = getBracketedListNodeType(member, isTypedBindingPattern);
switch (currentNodeType) {
case ARRAY_TYPE_DESC:
STNode typedBindingPattern = parseAsArrayTypeDesc(typeDescOrExpr, openBracket, member, context);
return typedBindingPattern;
case LIST_BINDING_PATTERN:
STNode bindingPattern = parseAsListBindingPattern(openBracket, new ArrayList<>(), member, false);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
case INDEXED_EXPRESSION:
return parseAsMemberAccessExpr(typeDescOrExpr, openBracket, member);
case ARRAY_TYPE_DESC_OR_MEMBER_ACCESS:
break;
case NONE:
default:
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd != null) {
List<STNode> memberList = new ArrayList<>();
memberList.add(getBindingPattern(member));
memberList.add(memberEnd);
bindingPattern = parseAsListBindingPattern(openBracket, memberList);
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
}
STNode closeBracket = parseCloseBracket();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode parseAsMemberAccessExpr(STNode typeNameOrExpr, STNode openBracket, STNode member) {
member = parseExpressionRhs(DEFAULT_OP_PRECEDENCE, member, false, true);
STNode closeBracket = parseCloseBracket();
endContext();
STNode keyExpr = STNodeFactory.createNodeList(member);
STNode memberAccessExpr =
STNodeFactory.createIndexedExpressionNode(typeNameOrExpr, openBracket, keyExpr, closeBracket);
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, memberAccessExpr, false, false);
}
private boolean isBracketedListEnd(SyntaxKind nextTokenKind) {
switch (nextTokenKind) {
case EOF_TOKEN:
case CLOSE_BRACKET_TOKEN:
return true;
default:
return false;
}
}
/**
* Parse a member of an ambiguous bracketed list. This member could be:
* 1) Array length
* 2) Key expression of a member-access-expr
* 3) A member-binding pattern of a list-binding-pattern.
*
* @param isTypedBindingPattern Is this in a definite typed-binding pattern
* @return Parsed member node
*/
private STNode parseBracketedListMember(boolean isTypedBindingPattern) {
STToken nextToken = peek();
switch (nextToken.kind) {
case DECIMAL_INTEGER_LITERAL_TOKEN:
case HEX_INTEGER_LITERAL_TOKEN:
case ASTERISK_TOKEN:
case STRING_LITERAL_TOKEN:
return parseBasicLiteral();
case CLOSE_BRACKET_TOKEN:
return STNodeFactory.createEmptyNode();
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
case ELLIPSIS_TOKEN:
case OPEN_BRACKET_TOKEN:
return parseStatementStartBracketedListMember();
case IDENTIFIER_TOKEN:
if (isTypedBindingPattern) {
return parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
}
break;
default:
if ((!isTypedBindingPattern && isValidExpressionStart(nextToken.kind, 1)) ||
isQualifiedIdentifierPredeclaredPrefix(nextToken.kind)) {
break;
}
ParserRuleContext recoverContext =
isTypedBindingPattern ? ParserRuleContext.LIST_BINDING_MEMBER_OR_ARRAY_LENGTH
: ParserRuleContext.BRACKETED_LIST_MEMBER;
recover(peek(), recoverContext);
return parseBracketedListMember(isTypedBindingPattern);
}
STNode expr = parseExpression();
if (isWildcardBP(expr)) {
return getWildcardBindingPattern(expr);
}
return expr;
}
/**
* Treat the current node as an array, and parse the remainder of the binding pattern.
*
* @param typeDesc Type-desc
* @param openBracket Open bracket
* @param member Member
* @return Parsed node
*/
private STNode parseAsArrayTypeDesc(STNode typeDesc, STNode openBracket, STNode member, ParserRuleContext context) {
typeDesc = getTypeDescFromExpr(typeDesc);
switchContext(ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN);
startContext(ParserRuleContext.ARRAY_TYPE_DESCRIPTOR);
STNode closeBracket = parseCloseBracket();
endContext();
endContext();
return parseTypedBindingPatternOrMemberAccessRhs(typeDesc, openBracket, member, closeBracket, true, true,
context);
}
private STNode parseBracketedListMemberEnd() {
switch (peek().kind) {
case COMMA_TOKEN:
return parseComma();
case CLOSE_BRACKET_TOKEN:
return null;
default:
recover(peek(), ParserRuleContext.BRACKETED_LIST_MEMBER_END);
return parseBracketedListMemberEnd();
}
}
/**
* We reach here to break ambiguity of T[a]. This could be:
* 1) Array Type Desc
* 2) Member access on LHS
* 3) Typed-binding-pattern
*
* @param typeDescOrExpr Type name or the expr that precede the open-bracket.
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Open bracket
* @param isTypedBindingPattern Is this is a typed-binding-pattern.
* @return Specific node that matches to T[a], after solving ambiguity.
*/
private STNode parseTypedBindingPatternOrMemberAccessRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern,
boolean allowAssignment, ParserRuleContext context) {
STToken nextToken = peek();
switch (nextToken.kind) {
case IDENTIFIER_TOKEN:
case OPEN_BRACE_TOKEN:
case ERROR_KEYWORD:
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
case OPEN_BRACKET_TOKEN:
if (isTypedBindingPattern) {
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
return parseTypedBindingPatternTypeRhs(arrayTypeDesc, context);
}
STNode keyExpr = getKeyExpr(member);
STNode expr =
STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
return parseTypedBindingPatternOrMemberAccess(expr, false, allowAssignment, context);
case QUESTION_MARK_TOKEN:
typeDesc = getTypeDescFromExpr(typeDescOrExpr);
arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
typeDesc = parseComplexTypeDescriptor(arrayTypeDesc,
ParserRuleContext.TYPE_DESC_IN_TYPE_BINDING_PATTERN, true);
return parseTypedBindingPatternTypeRhs(typeDesc, context);
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
return parseComplexTypeDescInTypedBPOrExprRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern);
case IN_KEYWORD:
if (context != ParserRuleContext.FOREACH_STMT &&
context != ParserRuleContext.FROM_CLAUSE &&
context != ParserRuleContext.JOIN_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case EQUAL_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
if (isTypedBindingPattern || !allowAssignment || !isValidLVExpr(typeDescOrExpr)) {
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
}
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr, closeBracket);
case SEMICOLON_TOKEN:
if (context == ParserRuleContext.FOREACH_STMT || context == ParserRuleContext.FROM_CLAUSE) {
break;
}
return createTypedBindingPattern(typeDescOrExpr, openBracket, member, closeBracket);
case CLOSE_BRACE_TOKEN:
case COMMA_TOKEN:
if (context == ParserRuleContext.AMBIGUOUS_STMT) {
keyExpr = getKeyExpr(member);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
default:
if (!isTypedBindingPattern && isValidExprRhsStart(nextToken.kind, closeBracket.kind)) {
keyExpr = getKeyExpr(member);
typeDescOrExpr = getExpression(typeDescOrExpr);
return STNodeFactory.createIndexedExpressionNode(typeDescOrExpr, openBracket, keyExpr,
closeBracket);
}
break;
}
ParserRuleContext recoveryCtx = ParserRuleContext.BRACKETED_LIST_RHS;
if (isTypedBindingPattern) {
recoveryCtx = ParserRuleContext.TYPE_DESC_RHS_OR_BP_RHS;
}
recover(peek(), recoveryCtx);
return parseTypedBindingPatternOrMemberAccessRhs(typeDescOrExpr, openBracket, member, closeBracket,
isTypedBindingPattern, allowAssignment, context);
}
private STNode getKeyExpr(STNode member) {
if (member == null) {
STToken keyIdentifier = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_KEY_EXPR_IN_MEMBER_ACCESS_EXPR);
STNode missingVarRef = STNodeFactory.createSimpleNameReferenceNode(keyIdentifier);
return STNodeFactory.createNodeList(missingVarRef);
}
return STNodeFactory.createNodeList(member);
}
private STNode createTypedBindingPattern(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket) {
STNode bindingPatterns = STNodeFactory.createEmptyNodeList();
if (!isEmpty(member)) {
SyntaxKind memberKind = member.kind;
if (memberKind == SyntaxKind.NUMERIC_LITERAL || memberKind == SyntaxKind.ASTERISK_LITERAL) {
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
STNode arrayTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, typeDesc);
STToken identifierToken = SyntaxErrors.createMissingTokenWithDiagnostics(SyntaxKind.IDENTIFIER_TOKEN,
DiagnosticErrorCode.ERROR_MISSING_VARIABLE_NAME);
STNode variableName = STNodeFactory.createCaptureBindingPatternNode(identifierToken);
return STNodeFactory.createTypedBindingPatternNode(arrayTypeDesc, variableName);
}
if (member.kind == SyntaxKind.QUALIFIED_NAME_REFERENCE) {
openBracket = SyntaxErrors.cloneWithTrailingInvalidNodeMinutiae(openBracket, member,
DiagnosticErrorCode.ERROR_FIELD_BP_INSIDE_LIST_BP);
} else {
STNode bindingPattern = getBindingPattern(member);
bindingPatterns = STNodeFactory.createNodeList(bindingPattern);
}
}
STNode bindingPattern = STNodeFactory.createListBindingPatternNode(openBracket, bindingPatterns, closeBracket);
STNode typeDesc = getTypeDescFromExpr(typeDescOrExpr);
return STNodeFactory.createTypedBindingPatternNode(typeDesc, bindingPattern);
}
/**
* Parse a union or intersection type-desc/binary-expression that involves ambiguous
* bracketed list in lhs.
* <p>
* e.g: <code>(T[a] & R..)</code> or <code>(T[a] | R.. )</code>
* <p>
* Complexity occurs in scenarios such as <code>T[a] |/& R[b]</code>. If the token after this
* is another binding-pattern, then <code>(T[a] |/& R[b])</code> becomes the type-desc. However,
* if the token follows this is an equal or semicolon, then <code>(T[a] |/& R)</code> becomes
* the type-desc, and <code>[b]</code> becomes the binding pattern.
*
* @param typeDescOrExpr Type desc or the expression
* @param openBracket Open bracket
* @param member Member
* @param closeBracket Close bracket
* @return Parsed node
*/
private STNode parseComplexTypeDescInTypedBPOrExprRhs(STNode typeDescOrExpr, STNode openBracket, STNode member,
STNode closeBracket, boolean isTypedBindingPattern) {
STNode pipeOrAndToken = parseUnionOrIntersectionToken();
STNode typedBindingPatternOrExpr = parseTypedBindingPatternOrExpr(false);
if (typedBindingPatternOrExpr.kind == SyntaxKind.TYPED_BINDING_PATTERN) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
STTypedBindingPatternNode rhsTypedBindingPattern = (STTypedBindingPatternNode) typedBindingPatternOrExpr;
STNode rhsTypeDesc = rhsTypedBindingPattern.typeDescriptor;
STNode newTypeDesc = mergeTypes(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
return STNodeFactory.createTypedBindingPatternNode(newTypeDesc, rhsTypedBindingPattern.bindingPattern);
}
if (isTypedBindingPattern) {
STNode lhsTypeDesc = getTypeDescFromExpr(typeDescOrExpr);
lhsTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, lhsTypeDesc);
return createCaptureBPWithMissingVarName(lhsTypeDesc, pipeOrAndToken, typedBindingPatternOrExpr);
}
STNode keyExpr = getExpression(member);
STNode containerExpr = getExpression(typeDescOrExpr);
STNode lhsExpr =
STNodeFactory.createIndexedExpressionNode(containerExpr, openBracket, keyExpr, closeBracket);
return STNodeFactory.createBinaryExpressionNode(SyntaxKind.BINARY_EXPRESSION, lhsExpr, pipeOrAndToken,
typedBindingPatternOrExpr);
}
/**
* Merges two types separated by <code>|</code> or <code>&</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param pipeOrAndToken pipe or bitwise-and token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypes(STNode lhsTypeDesc, STNode pipeOrAndToken, STNode rhsTypeDesc) {
if (pipeOrAndToken.kind == SyntaxKind.PIPE_TOKEN) {
return mergeTypesWithUnion(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
} else {
return mergeTypesWithIntersection(lhsTypeDesc, pipeOrAndToken, rhsTypeDesc);
}
}
/**
* Merges two types separated by <code>|</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param pipeToken pipe token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypesWithUnion(STNode lhsTypeDesc, STNode pipeToken, STNode rhsTypeDesc) {
if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostUnionWithAUnion(lhsTypeDesc, pipeToken, rhsUnionTypeDesc);
} else {
return createUnionTypeDesc(lhsTypeDesc, pipeToken, rhsTypeDesc);
}
}
/**
* Merges two types separated by <code>&</code> into one type, while taking precedence
* and associativity into account.
*
* @param lhsTypeDesc lhs type
* @param bitwiseAndToken bitwise-and token
* @param rhsTypeDesc rhs type
* @return a TypeDescriptorNode
*/
private STNode mergeTypesWithIntersection(STNode lhsTypeDesc, STNode bitwiseAndToken, STNode rhsTypeDesc) {
if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode lhsUnionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;
if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
rhsTypeDesc = replaceLeftMostIntersectionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,
bitwiseAndToken, (STIntersectionTypeDescriptorNode) rhsTypeDesc);
return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);
} else if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
rhsTypeDesc = replaceLeftMostUnionWithAIntersection(lhsUnionTypeDesc.rightTypeDesc,
bitwiseAndToken, (STUnionTypeDescriptorNode) rhsTypeDesc);
return replaceLeftMostUnionWithAUnion(lhsUnionTypeDesc.leftTypeDesc,
lhsUnionTypeDesc.pipeToken, (STUnionTypeDescriptorNode) rhsTypeDesc);
} else {
rhsTypeDesc = createIntersectionTypeDesc(lhsUnionTypeDesc.rightTypeDesc, bitwiseAndToken, rhsTypeDesc);
return createUnionTypeDesc(lhsUnionTypeDesc.leftTypeDesc, lhsUnionTypeDesc.pipeToken, rhsTypeDesc);
}
}
if (rhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode rhsUnionTypeDesc = (STUnionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostUnionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsUnionTypeDesc);
} else if (rhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode rhsIntSecTypeDesc = (STIntersectionTypeDescriptorNode) rhsTypeDesc;
return replaceLeftMostIntersectionWithAIntersection(lhsTypeDesc, bitwiseAndToken, rhsIntSecTypeDesc);
} else {
return createIntersectionTypeDesc(lhsTypeDesc, bitwiseAndToken, rhsTypeDesc);
}
}
private STNode replaceLeftMostUnionWithAUnion(STNode typeDesc, STNode pipeToken,
STUnionTypeDescriptorNode unionTypeDesc) {
STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostUnionWithAUnion(typeDesc, pipeToken, (STUnionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createUnionTypeDesc(typeDesc, pipeToken, leftTypeDesc);
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode replaceLeftMostUnionWithAIntersection(STNode typeDesc, STNode bitwiseAndToken,
STUnionTypeDescriptorNode unionTypeDesc) {
STNode leftTypeDesc = unionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostUnionWithAIntersection(typeDesc, bitwiseAndToken,
(STUnionTypeDescriptorNode) leftTypeDesc));
}
if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc,
replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,
(STIntersectionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);
return unionTypeDesc.replace(unionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode replaceLeftMostIntersectionWithAIntersection(STNode typeDesc,
STNode bitwiseAndToken,
STIntersectionTypeDescriptorNode intersectionTypeDesc) {
STNode leftTypeDesc = intersectionTypeDesc.leftTypeDesc;
if (leftTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc,
replaceLeftMostIntersectionWithAIntersection(typeDesc, bitwiseAndToken,
(STIntersectionTypeDescriptorNode) leftTypeDesc));
}
leftTypeDesc = createIntersectionTypeDesc(typeDesc, bitwiseAndToken, leftTypeDesc);
return intersectionTypeDesc.replace(intersectionTypeDesc.leftTypeDesc, leftTypeDesc);
}
private STNode getArrayTypeDesc(STNode openBracket, STNode member, STNode closeBracket, STNode lhsTypeDesc) {
if (lhsTypeDesc.kind == SyntaxKind.UNION_TYPE_DESC) {
STUnionTypeDescriptorNode unionTypeDesc = (STUnionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc = getArrayTypeDesc(openBracket, member, closeBracket, unionTypeDesc.rightTypeDesc);
lhsTypeDesc = mergeTypesWithUnion(unionTypeDesc.leftTypeDesc, unionTypeDesc.pipeToken, middleTypeDesc);
} else if (lhsTypeDesc.kind == SyntaxKind.INTERSECTION_TYPE_DESC) {
STIntersectionTypeDescriptorNode intersectionTypeDesc = (STIntersectionTypeDescriptorNode) lhsTypeDesc;
STNode middleTypeDesc =
getArrayTypeDesc(openBracket, member, closeBracket, intersectionTypeDesc.rightTypeDesc);
lhsTypeDesc = mergeTypesWithIntersection(intersectionTypeDesc.leftTypeDesc,
intersectionTypeDesc.bitwiseAndToken, middleTypeDesc);
} else {
lhsTypeDesc = createArrayTypeDesc(lhsTypeDesc, openBracket, member, closeBracket);
}
return lhsTypeDesc;
}
/**
* Parse union (|) or intersection (&) type operator.
*
* @return pipe or bitwise and token
*/
private STNode parseUnionOrIntersectionToken() {
STToken token = peek();
if (token.kind == SyntaxKind.PIPE_TOKEN || token.kind == SyntaxKind.BITWISE_AND_TOKEN) {
return consume();
} else {
recover(token, ParserRuleContext.UNION_OR_INTERSECTION_TOKEN);
return parseUnionOrIntersectionToken();
}
}
/**
* Infer the type of the ambiguous bracketed list, based on the type of the member.
*
* @param memberNode Member node
* @return Inferred type of the bracketed list
*/
private SyntaxKind getBracketedListNodeType(STNode memberNode, boolean isTypedBindingPattern) {
if (isEmpty(memberNode)) {
return SyntaxKind.NONE;
}
if (isDefiniteTypeDesc(memberNode.kind)) {
return SyntaxKind.TUPLE_TYPE_DESC;
}
switch (memberNode.kind) {
case ASTERISK_LITERAL:
return SyntaxKind.ARRAY_TYPE_DESC;
case CAPTURE_BINDING_PATTERN:
case LIST_BINDING_PATTERN:
case REST_BINDING_PATTERN:
case MAPPING_BINDING_PATTERN:
case WILDCARD_BINDING_PATTERN:
return SyntaxKind.LIST_BINDING_PATTERN;
case QUALIFIED_NAME_REFERENCE:
case REST_TYPE:
return SyntaxKind.TUPLE_TYPE_DESC;
case NUMERIC_LITERAL:
if (isTypedBindingPattern) {
return SyntaxKind.ARRAY_TYPE_DESC;
}
return SyntaxKind.ARRAY_TYPE_DESC_OR_MEMBER_ACCESS;
case SIMPLE_NAME_REFERENCE:
case BRACKETED_LIST:
case MAPPING_BP_OR_MAPPING_CONSTRUCTOR:
return SyntaxKind.NONE;
case ERROR_CONSTRUCTOR:
if (isPossibleErrorBindingPattern((STErrorConstructorExpressionNode) memberNode)) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
default:
if (isTypedBindingPattern) {
return SyntaxKind.NONE;
}
return SyntaxKind.INDEXED_EXPRESSION;
}
}
/*
* This section tries to break the ambiguity in parsing a statement that starts with a open-bracket.
* The ambiguity lies in between:
* 1) Assignment that starts with list binding pattern
* 2) Var-decl statement that starts with tuple type
* 3) Statement that starts with list constructor, such as sync-send, etc.
*/
/**
* Parse any statement that starts with an open-bracket.
*
* @param annots Annotations attached to the statement.
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean possibleMappingField) {
startContext(ParserRuleContext.ASSIGNMENT_OR_VAR_DECL_STMT);
return parseStatementStartsWithOpenBracket(annots, true, possibleMappingField);
}
private STNode parseMemberBracketedList() {
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStatementStartsWithOpenBracket(annots, false, false);
}
/**
* The bracketed list at the start of a statement can be one of the following.
* 1) List binding pattern
* 2) Tuple type
* 3) List constructor
*
* @param isRoot Is this the root of the list
* @return Parsed node
*/
private STNode parseStatementStartsWithOpenBracket(STNode annots, boolean isRoot, boolean possibleMappingField) {
startContext(ParserRuleContext.STMT_START_BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
while (!isBracketedListEnd(peek().kind)) {
STNode member = parseStatementStartBracketedListMember();
SyntaxKind currentNodeType = getStmtStartBracketedListType(member);
switch (currentNodeType) {
case TUPLE_TYPE_DESC:
member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case MEMBER_TYPE_DESC:
case REST_TYPE:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case LIST_BINDING_PATTERN:
return parseAsListBindingPattern(openBracket, memberList, member, isRoot);
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case LIST_BP_OR_LIST_CONSTRUCTOR:
return parseAsListBindingPatternOrListConstructor(openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
return parseAsTupleTypeDescOrListConstructor(annots, openBracket, memberList, member, isRoot);
case NONE:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
}
STNode closeBracket = parseCloseBracket();
STNode bracketedList = parseStatementStartBracketedListRhs(annots, openBracket, memberList, closeBracket,
isRoot, possibleMappingField);
return bracketedList;
}
/**
* Parse a member of a list-binding-pattern, tuple-type-desc, or
* list-constructor-expr, when the parent is ambiguous.
*
* @return Parsed node
*/
private STNode parseStatementStartBracketedListMember() {
List<STNode> typeDescQualifiers = new ArrayList<>();
return parseStatementStartBracketedListMember(typeDescQualifiers);
}
private STNode parseStatementStartBracketedListMember(List<STNode> qualifiers) {
parseTypeDescQualifiers(qualifiers);
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMemberBracketedList();
case IDENTIFIER_TOKEN:
reportInvalidQualifierList(qualifiers);
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (isWildcardBP(identifier)) {
STNode varName = ((STSimpleNameReferenceNode) identifier).name;
return getWildcardBindingPattern(varName);
}
nextToken = peek();
if (nextToken.kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
if (nextToken.kind != SyntaxKind.OPEN_BRACKET_TOKEN && isValidTypeContinuationToken(nextToken)) {
return parseComplexTypeDescriptor(identifier, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, true);
case OPEN_BRACE_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseMappingBindingPatterOrMappingConstructor();
case ERROR_KEYWORD:
reportInvalidQualifierList(qualifiers);
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorBindingPatternOrErrorConstructor();
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case ELLIPSIS_TOKEN:
reportInvalidQualifierList(qualifiers);
return parseRestBindingOrSpreadMember();
case XML_KEYWORD:
case STRING_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
reportInvalidQualifierList(qualifiers);
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr(qualifiers);
case FUNCTION_KEYWORD:
return parseAnonFuncExprOrFuncTypeDesc(qualifiers);
case AT_TOKEN:
return parseTupleMember();
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
reportInvalidQualifierList(qualifiers);
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(qualifiers, ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.STMT_START_BRACKETED_LIST_MEMBER);
return parseStatementStartBracketedListMember(qualifiers);
}
}
private STNode parseRestBindingOrSpreadMember() {
STNode ellipsis = parseEllipsis();
STNode expr = parseExpression();
if (expr.kind == SyntaxKind.SIMPLE_NAME_REFERENCE) {
return STNodeFactory.createRestBindingPatternNode(ellipsis, expr);
} else {
return STNodeFactory.createSpreadMemberNode(ellipsis, expr);
}
}
private STNode parseAsTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
STNode member, boolean isRoot) {
memberList.add(member);
STNode memberEnd = parseBracketedListMemberEnd();
STNode tupleTypeDescOrListCons;
if (memberEnd == null) {
STNode closeBracket = parseCloseBracket();
tupleTypeDescOrListCons =
parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
} else {
memberList.add(memberEnd);
tupleTypeDescOrListCons = parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, isRoot);
}
return tupleTypeDescOrListCons;
}
/**
* Parse tuple type desc or list constructor.
*
* @return Parsed node
*/
private STNode parseTupleTypeDescOrListConstructor(STNode annots) {
startContext(ParserRuleContext.BRACKETED_LIST);
STNode openBracket = parseOpenBracket();
List<STNode> memberList = new ArrayList<>();
return parseTupleTypeDescOrListConstructor(annots, openBracket, memberList, false);
}
private STNode parseTupleTypeDescOrListConstructor(STNode annots, STNode openBracket, List<STNode> memberList,
boolean isRoot) {
STToken nextToken = peek();
while (!isBracketedListEnd(nextToken.kind)) {
STNode member = parseTupleTypeDescOrListConstructorMember(annots);
SyntaxKind currentNodeType = getParsingNodeTypeOfTupleTypeOrListCons(member);
switch (currentNodeType) {
case LIST_CONSTRUCTOR:
return parseAsListConstructor(openBracket, memberList, member, isRoot);
case REST_TYPE:
case MEMBER_TYPE_DESC:
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC:
member = parseComplexTypeDescriptor(member, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
member = createMemberOrRestNode(STNodeFactory.createEmptyNodeList(), member);
return parseAsTupleTypeDesc(annots, openBracket, memberList, member, isRoot);
case TUPLE_TYPE_DESC_OR_LIST_CONST:
default:
memberList.add(member);
break;
}
STNode memberEnd = parseBracketedListMemberEnd();
if (memberEnd == null) {
break;
}
memberList.add(memberEnd);
nextToken = peek();
}
STNode closeBracket = parseCloseBracket();
return parseTupleTypeDescOrListConstructorRhs(openBracket, memberList, closeBracket, isRoot);
}
private STNode parseTupleTypeDescOrListConstructorMember(STNode annots) {
STToken nextToken = peek();
switch (nextToken.kind) {
case OPEN_BRACKET_TOKEN:
return parseTupleTypeDescOrListConstructor(annots);
case IDENTIFIER_TOKEN:
STNode identifier = parseQualifiedIdentifier(ParserRuleContext.VARIABLE_REF);
if (peek().kind == SyntaxKind.ELLIPSIS_TOKEN) {
STNode ellipsis = parseEllipsis();
return STNodeFactory.createRestDescriptorNode(identifier, ellipsis);
}
return parseExpressionRhs(DEFAULT_OP_PRECEDENCE, identifier, false, false);
case OPEN_BRACE_TOKEN:
return parseMappingConstructorExpr();
case ERROR_KEYWORD:
STToken nextNextToken = getNextNextToken();
if (nextNextToken.kind == SyntaxKind.OPEN_PAREN_TOKEN ||
nextNextToken.kind == SyntaxKind.IDENTIFIER_TOKEN) {
return parseErrorConstructorExpr(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case XML_KEYWORD:
case STRING_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.BACKTICK_TOKEN) {
return parseExpression(false);
}
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
case TABLE_KEYWORD:
case STREAM_KEYWORD:
if (getNextNextToken().kind == SyntaxKind.LT_TOKEN) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
return parseExpression(false);
case OPEN_PAREN_TOKEN:
return parseTypeDescOrExpr();
case AT_TOKEN:
return parseTupleMember();
default:
if (isValidExpressionStart(nextToken.kind, 1)) {
return parseExpression(false);
}
if (isTypeStartingToken(nextToken.kind)) {
return parseTypeDescriptor(ParserRuleContext.TYPE_DESC_IN_TUPLE);
}
recover(peek(), ParserRuleContext.TUPLE_TYPE_DESC_OR_LIST_CONST_MEMBER);
return parseTupleTypeDescOrListConstructorMember(annots);
}
}
private SyntaxKind getParsingNodeTypeOfTupleTypeOrListCons(STNode memberNode) {
return getStmtStartBracketedListType(memberNode);
}
private STNode parseTupleTypeDescOrListConstructorRhs(STNode openBracket, List<STNode> members, STNode closeBracket,
boolean isRoot) {
STNode tupleTypeOrListConst;
switch (peek().kind) {
case COMMA_TOKEN:
case CLOSE_BRACE_TOKEN:
case CLOSE_BRACKET_TOKEN:
case PIPE_TOKEN:
case BITWISE_AND_TOKEN:
if (!isRoot) {
endContext();
return new STAmbiguousCollectionNode(SyntaxKind.TUPLE_TYPE_DESC_OR_LIST_CONST, openBracket, members,
closeBracket);
}
default:
if (isValidExprRhsStart(peek().kind, closeBracket.kind) ||
(isRoot && peek().kind == SyntaxKind.EQUAL_TOKEN)) {
members = getExpressionList(members, false);
STNode memberExpressions = STNodeFactory.createNodeList(members);
tupleTypeOrListConst = STNodeFactory.createListConstructorExpressionNode(openBracket,
memberExpressions, closeBracket);
break;
}
STNode memberTypeDescs = STNodeFactory.createNodeList(getTupleMemberList(members));
STNode tupleTypeDesc =
STNodeFactory.createTupleTypeDescriptorNode(openBracket, memberTypeDescs, closeBracket);
tupleTypeOrListConst =
parseComplexTypeDescriptor(tupleTypeDesc, ParserRuleContext.TYPE_DESC_IN_TUPLE, false);
}
endContext();
if (!isRoot) {
return tupleTypeOrListConst;
}
STNode annots = STNodeFactory.createEmptyNodeList();
return parseStmtStartsWithTupleTypeOrExprRhs(annots, tupleTypeOrListConst, isRoot);
} | class member, object member or object member descriptor.
* </p>
* <code>
* class-member := object-field | method-defn | object-type-inclusion
* <br/>
* object-member := object-field | method-defn
* <br/>
* object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion
* </code>
*
* @param context Parsing context of the object member
* @return Parsed node
*/
private STNode parseObjectMember(ParserRuleContext context) {
STNode metadata;
STToken nextToken = peek();
switch (nextToken.kind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return null;
case ASTERISK_TOKEN:
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
case FINAL_KEYWORD:
case REMOTE_KEYWORD:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
case RESOURCE_KEYWORD:
metadata = STNodeFactory.createEmptyNode();
break;
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
if (isTypeStartingToken(nextToken.kind)) {
metadata = STNodeFactory.createEmptyNode();
break;
}
ParserRuleContext recoveryCtx;
if (context == ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER) {
recoveryCtx = ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER_START;
} else {
recoveryCtx = ParserRuleContext.CLASS_MEMBER_OR_OBJECT_MEMBER_START;
}
Solution solution = recover(peek(), recoveryCtx);
if (solution.action == Action.KEEP) {
metadata = STNodeFactory.createEmptyNode();
break;
}
return parseObjectMember(context);
}
return parseObjectMemberWithoutMeta(metadata, context);
} | class member, object member or object member descriptor.
* </p>
* <code>
* class-member := object-field | method-defn | object-type-inclusion
* <br/>
* object-member := object-field | method-defn
* <br/>
* object-member-descriptor := object-field-descriptor | method-decl | object-type-inclusion
* </code>
*
* @param context Parsing context of the object member
* @return Parsed node
*/
private STNode parseObjectMember(ParserRuleContext context) {
STNode metadata;
STToken nextToken = peek();
switch (nextToken.kind) {
case EOF_TOKEN:
case CLOSE_BRACE_TOKEN:
return null;
case ASTERISK_TOKEN:
case PUBLIC_KEYWORD:
case PRIVATE_KEYWORD:
case FINAL_KEYWORD:
case REMOTE_KEYWORD:
case FUNCTION_KEYWORD:
case TRANSACTIONAL_KEYWORD:
case ISOLATED_KEYWORD:
case RESOURCE_KEYWORD:
metadata = STNodeFactory.createEmptyNode();
break;
case DOCUMENTATION_STRING:
case AT_TOKEN:
metadata = parseMetaData();
break;
default:
if (isTypeStartingToken(nextToken.kind)) {
metadata = STNodeFactory.createEmptyNode();
break;
}
ParserRuleContext recoveryCtx;
if (context == ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER) {
recoveryCtx = ParserRuleContext.OBJECT_CONSTRUCTOR_MEMBER_START;
} else {
recoveryCtx = ParserRuleContext.CLASS_MEMBER_OR_OBJECT_MEMBER_START;
}
Solution solution = recover(peek(), recoveryCtx);
if (solution.action == Action.KEEP) {
metadata = STNodeFactory.createEmptyNode();
break;
}
return parseObjectMember(context);
}
return parseObjectMemberWithoutMeta(metadata, context);
} |
The setup and execution is identical to that used in testIntHash(), except the data type and field name. Consider refactor into a helper function. As it stands I think it is harder to read as I was looking for differences between the two tests. | public void testLongHash() throws ParseException {
var expression = Expression.fromString("input myText | hash | attribute 'myLong'");
SimpleTestAdapter adapter = new SimpleTestAdapter();
adapter.createField(new Field("myText", DataType.STRING));
var intField = new Field("myLong", DataType.LONG);
adapter.createField(intField);
adapter.setValue("myText", new StringFieldValue("input text"));
expression.setStatementOutput(new DocumentType("myDocument"), intField);
VerificationContext verificationContext = new VerificationContext(adapter);
assertEquals(DataType.LONG, expression.verify(verificationContext));
ExecutionContext context = new ExecutionContext(adapter);
context.setValue(new StringFieldValue("input text"));
expression.execute(context);
assertNotNull(context);
assertTrue(adapter.values.containsKey("myLong"));
assertEquals(7678158186624760752L, adapter.values.get("myLong").getWrappedValue());
} | expression.execute(context); | public void testLongHash() throws ParseException {
var expression = Expression.fromString("input myText | hash | attribute 'myLong'");
SimpleTestAdapter adapter = new SimpleTestAdapter();
adapter.createField(new Field("myText", DataType.STRING));
var intField = new Field("myLong", DataType.LONG);
adapter.createField(intField);
adapter.setValue("myText", new StringFieldValue("input text"));
expression.setStatementOutput(new DocumentType("myDocument"), intField);
VerificationContext verificationContext = new VerificationContext(adapter);
assertEquals(DataType.LONG, expression.verify(verificationContext));
ExecutionContext context = new ExecutionContext(adapter);
context.setValue(new StringFieldValue("input text"));
expression.execute(context);
assertTrue(adapter.values.containsKey("myLong"));
assertEquals(7678158186624760752L, adapter.values.get("myLong").getWrappedValue());
} | class ScriptTestCase {
private final DocumentType type;
public ScriptTestCase() {
type = new DocumentType("mytype");
type.addField("in-1", DataType.STRING);
type.addField("in-2", DataType.STRING);
type.addField("out-1", DataType.STRING);
type.addField("out-2", DataType.STRING);
type.addField("mybool", DataType.BOOL);
}
@Test
public void requireThatScriptExecutesStatements() {
Document input = new Document(type, "id:scheme:mytype::");
input.setFieldValue("in-1", new StringFieldValue("6"));
input.setFieldValue("in-2", new StringFieldValue("9"));
Expression exp = new ScriptExpression(
new StatementExpression(new InputExpression("in-1"), new AttributeExpression("out-1")),
new StatementExpression(new InputExpression("in-2"), new AttributeExpression("out-2")));
Document output = Expression.execute(exp, input);
assertNotNull(output);
assertEquals(new StringFieldValue("6"), output.getFieldValue("out-1"));
assertEquals(new StringFieldValue("9"), output.getFieldValue("out-2"));
}
@Test
public void requireThatEachStatementHasEmptyInput() {
Document input = new Document(type, "id:scheme:mytype::");
input.setFieldValue(input.getField("in-1"), new StringFieldValue("69"));
Expression exp = new ScriptExpression(
new StatementExpression(new InputExpression("in-1"), new AttributeExpression("out-1")),
new StatementExpression(new AttributeExpression("out-2")));
try {
exp.verify(input);
fail();
} catch (VerificationException e) {
assertTrue(e.getExpressionType().equals(ScriptExpression.class));
assertEquals("Expected any input, got null.", e.getMessage());
}
}
@Test
public void requireThatFactoryMethodWorks() throws ParseException {
Document input = new Document(type, "id:scheme:mytype::");
input.setFieldValue("in-1", new StringFieldValue("FOO"));
Document output = Expression.execute(Expression.fromString("input 'in-1' | { index 'out-1'; lowercase | index 'out-2' }"), input);
assertNotNull(output);
assertEquals(new StringFieldValue("FOO"), output.getFieldValue("out-1"));
assertEquals(new StringFieldValue("foo"), output.getFieldValue("out-2"));
}
@Test
public void requireThatIfExpressionPassesOriginalInputAlong() throws ParseException {
Document input = new Document(type, "id:scheme:mytype::");
Document output = Expression.execute(Expression.fromString("'foo' | if (1 < 2) { 'bar' | index 'out-1' } else { 'baz' | index 'out-1' } | index 'out-1'"), input);
assertNotNull(output);
assertEquals(new StringFieldValue("foo"), output.getFieldValue("out-1"));
}
@Test
public void testLiteralBoolean() throws ParseException {
Document input = new Document(type, "id:scheme:mytype::");
input.setFieldValue("in-1", new StringFieldValue("foo"));
var expression = Expression.fromString("if (input 'in-1' == \"foo\") { true | summary 'mybool' | attribute 'mybool' }");
Document output = Expression.execute(expression, input);
assertNotNull(output);
assertEquals(new BoolFieldValue(true), output.getFieldValue("mybool"));
}
@Test
public void testIntHash() throws ParseException {
var expression = Expression.fromString("input myText | hash | attribute 'myInt'");
SimpleTestAdapter adapter = new SimpleTestAdapter();
adapter.createField(new Field("myText", DataType.STRING));
var intField = new Field("myInt", DataType.INT);
adapter.createField(intField);
adapter.setValue("myText", new StringFieldValue("input text"));
expression.setStatementOutput(new DocumentType("myDocument"), intField);
VerificationContext verificationContext = new VerificationContext(adapter);
assertEquals(DataType.INT, expression.verify(verificationContext));
ExecutionContext context = new ExecutionContext(adapter);
context.setValue(new StringFieldValue("input text"));
expression.execute(context);
assertNotNull(context);
assertTrue(adapter.values.containsKey("myInt"));
assertEquals(-1425622096, adapter.values.get("myInt").getWrappedValue());
}
@Test
@Test
public void testEmbed() throws ParseException {
TensorType tensorType = TensorType.fromSpec("tensor(d[4])");
var expression = Expression.fromString("input myText | embed | attribute 'myTensor'",
new SimpleLinguistics(),
new MockEmbedder("myDocument.myTensor"));
SimpleTestAdapter adapter = new SimpleTestAdapter();
adapter.createField(new Field("myText", DataType.STRING));
var tensorField = new Field("myTensor", new TensorDataType(tensorType));
adapter.createField(tensorField);
adapter.setValue("myText", new StringFieldValue("input text"));
expression.setStatementOutput(new DocumentType("myDocument"), tensorField);
VerificationContext verificationContext = new VerificationContext(adapter);
assertEquals(TensorDataType.class, expression.verify(verificationContext).getClass());
ExecutionContext context = new ExecutionContext(adapter);
context.setValue(new StringFieldValue("input text"));
expression.execute(context);
assertNotNull(context);
assertTrue(adapter.values.containsKey("myTensor"));
assertEquals(Tensor.from(tensorType, "[7,3,0,0]"),
((TensorFieldValue)adapter.values.get("myTensor")).getTensor().get());
}
private static class MockEmbedder implements Embedder {
private final String expectedDestination;
public MockEmbedder(String expectedDestination) {
this.expectedDestination = expectedDestination;
}
@Override
public List<Integer> embed(String text, Embedder.Context context) {
return null;
}
@Override
public Tensor embed(String text, Embedder.Context context, TensorType tensorType) {
assertEquals(expectedDestination, context.getDestination());
return Tensor.from(tensorType, "[7,3,0,0]");
}
}
} | class ScriptTestCase {
private final DocumentType type;
public ScriptTestCase() {
type = new DocumentType("mytype");
type.addField("in-1", DataType.STRING);
type.addField("in-2", DataType.STRING);
type.addField("out-1", DataType.STRING);
type.addField("out-2", DataType.STRING);
type.addField("mybool", DataType.BOOL);
}
@Test
public void requireThatScriptExecutesStatements() {
Document input = new Document(type, "id:scheme:mytype::");
input.setFieldValue("in-1", new StringFieldValue("6"));
input.setFieldValue("in-2", new StringFieldValue("9"));
Expression exp = new ScriptExpression(
new StatementExpression(new InputExpression("in-1"), new AttributeExpression("out-1")),
new StatementExpression(new InputExpression("in-2"), new AttributeExpression("out-2")));
Document output = Expression.execute(exp, input);
assertNotNull(output);
assertEquals(new StringFieldValue("6"), output.getFieldValue("out-1"));
assertEquals(new StringFieldValue("9"), output.getFieldValue("out-2"));
}
@Test
public void requireThatEachStatementHasEmptyInput() {
Document input = new Document(type, "id:scheme:mytype::");
input.setFieldValue(input.getField("in-1"), new StringFieldValue("69"));
Expression exp = new ScriptExpression(
new StatementExpression(new InputExpression("in-1"), new AttributeExpression("out-1")),
new StatementExpression(new AttributeExpression("out-2")));
try {
exp.verify(input);
fail();
} catch (VerificationException e) {
assertTrue(e.getExpressionType().equals(ScriptExpression.class));
assertEquals("Expected any input, got null.", e.getMessage());
}
}
@Test
public void requireThatFactoryMethodWorks() throws ParseException {
Document input = new Document(type, "id:scheme:mytype::");
input.setFieldValue("in-1", new StringFieldValue("FOO"));
Document output = Expression.execute(Expression.fromString("input 'in-1' | { index 'out-1'; lowercase | index 'out-2' }"), input);
assertNotNull(output);
assertEquals(new StringFieldValue("FOO"), output.getFieldValue("out-1"));
assertEquals(new StringFieldValue("foo"), output.getFieldValue("out-2"));
}
@Test
public void requireThatIfExpressionPassesOriginalInputAlong() throws ParseException {
Document input = new Document(type, "id:scheme:mytype::");
Document output = Expression.execute(Expression.fromString("'foo' | if (1 < 2) { 'bar' | index 'out-1' } else { 'baz' | index 'out-1' } | index 'out-1'"), input);
assertNotNull(output);
assertEquals(new StringFieldValue("foo"), output.getFieldValue("out-1"));
}
@Test
public void testLiteralBoolean() throws ParseException {
Document input = new Document(type, "id:scheme:mytype::");
input.setFieldValue("in-1", new StringFieldValue("foo"));
var expression = Expression.fromString("if (input 'in-1' == \"foo\") { true | summary 'mybool' | attribute 'mybool' }");
Document output = Expression.execute(expression, input);
assertNotNull(output);
assertEquals(new BoolFieldValue(true), output.getFieldValue("mybool"));
}
@Test
public void testIntHash() throws ParseException {
var expression = Expression.fromString("input myText | hash | attribute 'myInt'");
SimpleTestAdapter adapter = new SimpleTestAdapter();
adapter.createField(new Field("myText", DataType.STRING));
var intField = new Field("myInt", DataType.INT);
adapter.createField(intField);
adapter.setValue("myText", new StringFieldValue("input text"));
expression.setStatementOutput(new DocumentType("myDocument"), intField);
VerificationContext verificationContext = new VerificationContext(adapter);
assertEquals(DataType.INT, expression.verify(verificationContext));
ExecutionContext context = new ExecutionContext(adapter);
context.setValue(new StringFieldValue("input text"));
expression.execute(context);
assertTrue(adapter.values.containsKey("myInt"));
assertEquals(-1425622096, adapter.values.get("myInt").getWrappedValue());
}
@Test
@Test
public void testEmbed() throws ParseException {
TensorType tensorType = TensorType.fromSpec("tensor(d[4])");
var expression = Expression.fromString("input myText | embed | attribute 'myTensor'",
new SimpleLinguistics(),
new MockEmbedder("myDocument.myTensor"));
SimpleTestAdapter adapter = new SimpleTestAdapter();
adapter.createField(new Field("myText", DataType.STRING));
var tensorField = new Field("myTensor", new TensorDataType(tensorType));
adapter.createField(tensorField);
adapter.setValue("myText", new StringFieldValue("input text"));
expression.setStatementOutput(new DocumentType("myDocument"), tensorField);
VerificationContext verificationContext = new VerificationContext(adapter);
assertEquals(TensorDataType.class, expression.verify(verificationContext).getClass());
ExecutionContext context = new ExecutionContext(adapter);
context.setValue(new StringFieldValue("input text"));
expression.execute(context);
assertTrue(adapter.values.containsKey("myTensor"));
assertEquals(Tensor.from(tensorType, "[7,3,0,0]"),
((TensorFieldValue)adapter.values.get("myTensor")).getTensor().get());
}
private static class MockEmbedder implements Embedder {
private final String expectedDestination;
public MockEmbedder(String expectedDestination) {
this.expectedDestination = expectedDestination;
}
@Override
public List<Integer> embed(String text, Embedder.Context context) {
return null;
}
@Override
public Tensor embed(String text, Embedder.Context context, TensorType tensorType) {
assertEquals(expectedDestination, context.getDestination());
return Tensor.from(tensorType, "[7,3,0,0]");
}
}
} |
Yes, good point, fixed and made sure to actually test both forms | private static ApplicationId fromIdString(String idString, String splitCharacter) {
String[] parts = idString.split(splitCharacter);
String errorMessage = "Application ids must be on the form tenant" +
splitCharacter + "application" + splitCharacter + "instance, but was " + idString;
if (parts.length < 3)
throw new IllegalArgumentException(errorMessage);
if (parts.length > 3)
log.log(SEVERE, errorMessage);
return from(parts[0], parts[1], parts[2]);
} | splitCharacter + "application" + splitCharacter + "instance, but was " + idString; | private static ApplicationId fromIdString(String idString, String splitCharacter) {
String[] parts = idString.split(Pattern.quote(splitCharacter));
String errorMessage = "Application ids must be on the form tenant" +
splitCharacter + "application" + splitCharacter + "instance, but was " + idString;
if (parts.length < 3)
throw new IllegalArgumentException(errorMessage);
if (parts.length > 3)
log.log(SEVERE, errorMessage);
return from(parts[0], parts[1], parts[2]);
} | class ApplicationId implements Comparable<ApplicationId> {
private static final Logger log = Logger.getLogger(ApplicationId.class.getName());
static final Pattern namePattern = Pattern.compile("[a-zA-Z0-9_-]{1,256}");
private static final ApplicationId global = new ApplicationId(TenantName.from("hosted-vespa"),
ApplicationName.from("routing"),
InstanceName.from("default")) { };
private static final Comparator<ApplicationId> comparator = Comparator.comparing(ApplicationId::tenant)
.thenComparing(ApplicationId::application)
.thenComparing(ApplicationId::instance)
.thenComparing(global::equals, Boolean::compare);
private final TenantName tenant;
private final ApplicationName application;
private final InstanceName instance;
private final String serializedForm;
private ApplicationId(TenantName tenant, ApplicationName applicationName, InstanceName instanceName) {
this.tenant = tenant;
this.application = applicationName;
this.instance = instanceName;
this.serializedForm = toSerializedForm();
}
public static ApplicationId from(ApplicationIdConfig config) {
return from(TenantName.from(config.tenant()),
ApplicationName.from(config.application()),
InstanceName.from(config.instance()));
}
public static ApplicationId from(TenantName tenant, ApplicationName application, InstanceName instance) {
return new ApplicationId(tenant, application, instance);
}
public static ApplicationId from(String tenant, String application, String instance) {
return new ApplicationId(TenantName.from(tenant), ApplicationName.from(application), InstanceName.from(instance));
}
public static ApplicationId fromSerializedForm(String idString) {
return fromIdString(idString, ":");
}
public static ApplicationId fromFullString(String idString) {
return fromIdString(idString, "\\.");
}
@Override
public int hashCode() { return Objects.hash(tenant, application, instance); }
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
ApplicationId rhs = (ApplicationId) other;
return tenant.equals(rhs.tenant) &&
application.equals(rhs.application) &&
instance.equals(rhs.instance);
}
/** Returns a serialized form of the content of this: tenant:application:instance */
public String serializedForm() { return serializedForm; }
/** Returns "dotted" string (tenant.application.instance) with instance name omitted if it is "default" */
public String toShortString() {
return tenant().value() + "." + application().value() +
( instance().isDefault() ? "" : "." + instance().value() );
}
/** Returns "dotted" string (tenant.application.instance) with instance name always included */
public String toFullString() {
return tenant().value() + "." + application().value() + "." + instance().value();
}
private String toSerializedForm() {
return tenant.value() + ":" + application.value() + ":" + instance.value();
}
@Override
public String toString() { return toShortString(); }
public TenantName tenant() { return tenant; }
public ApplicationName application() { return application; }
public InstanceName instance() { return instance; }
@Override
public int compareTo(ApplicationId other) {
return comparator.compare(this, other);
}
/** Returns an application id where all fields are "default" */
public static ApplicationId defaultId() {
return new ApplicationId(TenantName.defaultName(), ApplicationName.defaultName(), InstanceName.defaultName());
}
/** Returns a very special application id, which is not equal to any other id. */
public static ApplicationId global() {
return global;
}
public static class Builder {
private TenantName tenant;
private ApplicationName application;
private InstanceName instance;
public Builder() {
this.tenant = TenantName.defaultName();
this.application = null;
this.instance = InstanceName.defaultName();
}
public Builder tenant(TenantName ten) { this.tenant = ten; return this; }
public Builder tenant(String ten) { return tenant(TenantName.from(ten)); }
public Builder applicationName(ApplicationName nam) { this.application = nam; return this; }
public Builder applicationName(String nam) { return applicationName(ApplicationName.from(nam)); }
public Builder instanceName(InstanceName ins) { this.instance = ins; return this; }
public Builder instanceName(String ins) { return instanceName(InstanceName.from(ins)); }
public ApplicationId build() {
if (application == null) {
throw new IllegalArgumentException("must set application name in builder");
}
return ApplicationId.from(tenant, application, instance);
}
}
} | class ApplicationId implements Comparable<ApplicationId> {
private static final Logger log = Logger.getLogger(ApplicationId.class.getName());
static final Pattern namePattern = Pattern.compile("[a-zA-Z0-9_-]{1,256}");
private static final ApplicationId global = new ApplicationId(TenantName.from("hosted-vespa"),
ApplicationName.from("routing"),
InstanceName.from("default")) { };
private static final Comparator<ApplicationId> comparator = Comparator.comparing(ApplicationId::tenant)
.thenComparing(ApplicationId::application)
.thenComparing(ApplicationId::instance)
.thenComparing(global::equals, Boolean::compare);
private final TenantName tenant;
private final ApplicationName application;
private final InstanceName instance;
private final String serializedForm;
private ApplicationId(TenantName tenant, ApplicationName applicationName, InstanceName instanceName) {
this.tenant = tenant;
this.application = applicationName;
this.instance = instanceName;
this.serializedForm = toSerializedForm();
}
public static ApplicationId from(ApplicationIdConfig config) {
return from(TenantName.from(config.tenant()),
ApplicationName.from(config.application()),
InstanceName.from(config.instance()));
}
public static ApplicationId from(TenantName tenant, ApplicationName application, InstanceName instance) {
return new ApplicationId(tenant, application, instance);
}
public static ApplicationId from(String tenant, String application, String instance) {
return new ApplicationId(TenantName.from(tenant), ApplicationName.from(application), InstanceName.from(instance));
}
public static ApplicationId fromSerializedForm(String idString) { return fromIdString(idString, ":"); }
public static ApplicationId fromFullString(String idString) { return fromIdString(idString, "."); }
@Override
public int hashCode() { return Objects.hash(tenant, application, instance); }
@Override
public boolean equals(Object other) {
if (this == other) return true;
if (other == null || getClass() != other.getClass()) return false;
ApplicationId rhs = (ApplicationId) other;
return tenant.equals(rhs.tenant) &&
application.equals(rhs.application) &&
instance.equals(rhs.instance);
}
/** Returns a serialized form of the content of this: tenant:application:instance */
public String serializedForm() { return serializedForm; }
/** Returns "dotted" string (tenant.application.instance) with instance name omitted if it is "default" */
public String toShortString() {
return tenant().value() + "." + application().value() +
( instance().isDefault() ? "" : "." + instance().value() );
}
/** Returns "dotted" string (tenant.application.instance) with instance name always included */
public String toFullString() {
return tenant().value() + "." + application().value() + "." + instance().value();
}
private String toSerializedForm() {
return tenant.value() + ":" + application.value() + ":" + instance.value();
}
@Override
public String toString() { return toShortString(); }
public TenantName tenant() { return tenant; }
public ApplicationName application() { return application; }
public InstanceName instance() { return instance; }
@Override
public int compareTo(ApplicationId other) {
return comparator.compare(this, other);
}
/** Returns an application id where all fields are "default" */
public static ApplicationId defaultId() {
return new ApplicationId(TenantName.defaultName(), ApplicationName.defaultName(), InstanceName.defaultName());
}
/** Returns a very special application id, which is not equal to any other id. */
public static ApplicationId global() {
return global;
}
public static class Builder {
private TenantName tenant;
private ApplicationName application;
private InstanceName instance;
public Builder() {
this.tenant = TenantName.defaultName();
this.application = null;
this.instance = InstanceName.defaultName();
}
public Builder tenant(TenantName ten) { this.tenant = ten; return this; }
public Builder tenant(String ten) { return tenant(TenantName.from(ten)); }
public Builder applicationName(ApplicationName nam) { this.application = nam; return this; }
public Builder applicationName(String nam) { return applicationName(ApplicationName.from(nam)); }
public Builder instanceName(InstanceName ins) { this.instance = ins; return this; }
public Builder instanceName(String ins) { return instanceName(InstanceName.from(ins)); }
public ApplicationId build() {
if (application == null) {
throw new IllegalArgumentException("must set application name in builder");
}
return ApplicationId.from(tenant, application, instance);
}
}
} |
The same to [test_split_with_element_allowed_splits](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/runners/worker/bundle_processor_test.py#L81) | public static Iterable<Object[]> data() {
return ImmutableList.<Object[]>builder()
.add(
new Object[] {
channelSplitResult(4L),
16L,
ImmutableList.of("A"),
0.25,
ImmutableList.of(2L, 3L, 4L, 5L)
})
.add(
new Object[] {
channelSplitResult(5L),
16L,
ImmutableList.of("A"),
0.25,
ImmutableList.of(2L, 3L, 5L)
})
.add(
new Object[] {
channelSplitResult(3L),
16L,
ImmutableList.of("A"),
0.25,
ImmutableList.of(2L, 3L, 6L)
})
.add(
new Object[] {
channelSplitResult(5L),
16L,
ImmutableList.of("A"),
0.25,
ImmutableList.of(5L, 6L, 7L)
})
.add(
new Object[] {
channelSplitResult(3L),
16L,
ImmutableList.of("A"),
0.25,
ImmutableList.of(1L, 2L, 3L)
})
.add(
new Object[] {
ProcessBundleSplitResponse.getDefaultInstance(),
16L,
ImmutableList.of("A", "B", "C", "D", "F", "G"),
0.25,
ImmutableList.of(1L, 2L, 3L)
})
.build();
} | new Object[] { | public static Iterable<Object[]> data() {
return ImmutableList.<Object[]>builder()
.add(new Object[] {channelSplitResult(1L), 0L, 0, 0, 16L})
.add(new Object[] {channelSplitResult(4L), 0L, 0, 0.24, 16L})
.add(new Object[] {channelSplitResult(4L), 0L, 0, 0.25, 16L})
.add(new Object[] {channelSplitResult(4L), 0L, 0, 0.26, 16L})
.add(new Object[] {channelSplitResult(8L), 0L, 0, 0.5, 16L})
.add(new Object[] {channelSplitResult(9L), 2, 0, 0.5, 16L})
.add(new Object[] {channelSplitResult(11L), 6L, 0, 0.5, 16L})
.add(new Object[] {channelSplitResult(1L), 0L, 0.5, 0.25, 4L})
.add(new Object[] {channelSplitResult(2L), 0L, 0.9, 0.25, 4L})
.add(new Object[] {channelSplitResult(2L), 1L, 0, 0.25, 4L})
.add(new Object[] {channelSplitResult(2L), 1L, 0.1, 0.25, 4L})
.build();
} | class ChannelSplitTest {
@Parameterized.Parameters
@Parameterized.Parameter(0)
public ProcessBundleSplitResponse expectedResponse;
@Parameterized.Parameter(1)
public long inputElements;
@Parameterized.Parameter(2)
public List<String> processedElements;
@Parameterized.Parameter(3)
public double fractionOfRemainder;
@Parameterized.Parameter(4)
public double elementProgress;
@Test
public void testChannelSplit() throws Exception {
SplittingReceiver splittingReceiver = mock(SplittingReceiver.class);
BeamFnDataClient mockBeamFnDataClient = mock(BeamFnDataClient.class);
when(splittingReceiver.getProgress()).thenReturn(elementProgress);
BeamFnDataReadRunner<String> readRunner =
createReadRunner(splittingReceiver, PTRANSFORM_ID, mockBeamFnDataClient);
assertEquals(
expectedResponse,
executeSplit(
readRunner,
PTRANSFORM_ID,
inputElements,
processedElements,
fractionOfRemainder,
Collections.EMPTY_LIST));
}
} | class ChannelSplitTest {
@Parameterized.Parameters
@Parameterized.Parameter(0)
public ProcessBundleSplitResponse expectedResponse;
@Parameterized.Parameter(1)
public long index;
@Parameterized.Parameter(2)
public double elementProgress;
@Parameterized.Parameter(3)
public double fractionOfRemainder;
@Parameterized.Parameter(4)
public long bufferSize;
@Test
public void testChannelSplit() throws Exception {
SplittingReceiver splittingReceiver = mock(SplittingReceiver.class);
BeamFnDataClient mockBeamFnDataClient = mock(BeamFnDataClient.class);
when(splittingReceiver.getProgress()).thenReturn(elementProgress);
BeamFnDataReadRunner<String> readRunner =
createReadRunner(splittingReceiver, PTRANSFORM_ID, mockBeamFnDataClient);
readRunner.registerInputLocation();
assertEquals(
expectedResponse,
executeSplit(
readRunner,
PTRANSFORM_ID,
index,
fractionOfRemainder,
bufferSize,
Collections.EMPTY_LIST));
}
} |
After confirming with andrey, we could keep the previous behavior to still ignore `IOException` during close, because we just try best to delete files for releasing resources. If one file was already deleted by other factors before, it is no need to cause unnecessary failover. If we want to refactor this behavior future, we could do it separately, not to block this PR. I remove `try catch` from `IOManager#close` and add some javadoc for this method. | public void afterTest() throws Exception {
this.ioManager.close();
if (!this.ioManager.isProperlyShutDown()) {
Assert.fail("I/O Manager was not properly shut down.");
}
if (this.memoryManager != null && testSuccess) {
Assert.assertTrue("Memory leak: not all segments have been returned to the memory manager.",
this.memoryManager.verifyEmpty());
this.memoryManager.shutdown();
this.memoryManager = null;
}
} | if (!this.ioManager.isProperlyShutDown()) { | public void afterTest() throws Exception {
this.ioManager.close();
if (this.memoryManager != null && testSuccess) {
Assert.assertTrue("Memory leak: not all segments have been returned to the memory manager.",
this.memoryManager.verifyEmpty());
this.memoryManager.shutdown();
this.memoryManager = null;
}
} | class ExternalSortLargeRecordsITCase extends TestLogger {
private static final int MEMORY_SIZE = 1024 * 1024 * 78;
private final AbstractInvokable parentTask = new DummyInvokable();
private IOManager ioManager;
private MemoryManager memoryManager;
private boolean testSuccess;
@Before
public void beforeTest() {
this.memoryManager = new MemoryManager(MEMORY_SIZE, 1);
this.ioManager = new IOManagerAsync();
}
@After
@Test
public void testSortWithLongRecordsOnly() {
try {
final int NUM_RECORDS = 10;
final TypeInformation<?>[] types = new TypeInformation<?>[] {
BasicTypeInfo.LONG_TYPE_INFO,
new ValueTypeInfo<SomeMaybeLongValue>(SomeMaybeLongValue.class)
};
final TupleTypeInfo<Tuple2<Long, SomeMaybeLongValue>> typeInfo =
new TupleTypeInfo<Tuple2<Long,SomeMaybeLongValue>>(types);
final TypeSerializer<Tuple2<Long, SomeMaybeLongValue>> serializer = typeInfo.createSerializer(new ExecutionConfig());
final TypeComparator<Tuple2<Long, SomeMaybeLongValue>> comparator = typeInfo.createComparator(new int[] {0}, new boolean[]{false}, 0, new ExecutionConfig());
MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>> source =
new MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>>() {
private final Random rnd = new Random(457821643089756298L);
private int num = 0;
@Override
public Tuple2<Long, SomeMaybeLongValue> next(Tuple2<Long, SomeMaybeLongValue> reuse) {
return next();
}
@Override
public Tuple2<Long, SomeMaybeLongValue> next() {
if (num++ < NUM_RECORDS) {
long val = rnd.nextLong();
return new Tuple2<Long, SomeMaybeLongValue>(val, new SomeMaybeLongValue((int) val));
}
else {
return null;
}
}
};
@SuppressWarnings("unchecked")
Sorter<Tuple2<Long, SomeMaybeLongValue>> sorter = new UnilateralSortMerger<Tuple2<Long, SomeMaybeLongValue>>(
this.memoryManager, this.ioManager,
source, this.parentTask,
new RuntimeSerializerFactory<Tuple2<Long, SomeMaybeLongValue>>(serializer, (Class<Tuple2<Long, SomeMaybeLongValue>>) (Class<?>) Tuple2.class),
comparator, 1.0, 1, 128, 0.7f, true /* use large record handler */ , false);
MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>> iterator = sorter.getIterator();
Tuple2<Long, SomeMaybeLongValue> val = serializer.createInstance();
long prevKey = Long.MAX_VALUE;
for (int i = 0; i < NUM_RECORDS; i++) {
val = iterator.next(val);
assertTrue(val.f0 <= prevKey);
assertTrue(val.f0.intValue() == val.f1.val());
}
assertNull(iterator.next(val));
sorter.close();
testSuccess = true;
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testSortWithLongAndShortRecordsMixed() {
try {
final int NUM_RECORDS = 1000000;
final int LARGE_REC_INTERVAL = 100000;
final TypeInformation<?>[] types = new TypeInformation<?>[] {
BasicTypeInfo.LONG_TYPE_INFO,
new ValueTypeInfo<SomeMaybeLongValue>(SomeMaybeLongValue.class)
};
final TupleTypeInfo<Tuple2<Long, SomeMaybeLongValue>> typeInfo =
new TupleTypeInfo<Tuple2<Long,SomeMaybeLongValue>>(types);
final TypeSerializer<Tuple2<Long, SomeMaybeLongValue>> serializer = typeInfo.createSerializer(new ExecutionConfig());
final TypeComparator<Tuple2<Long, SomeMaybeLongValue>> comparator = typeInfo.createComparator(new int[] {0}, new boolean[]{false}, 0, new ExecutionConfig());
MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>> source =
new MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>>() {
private final Random rnd = new Random(145610843608763871L);
private int num = -1;
@Override
public Tuple2<Long, SomeMaybeLongValue> next(Tuple2<Long, SomeMaybeLongValue> reuse) {
return next();
}
@Override
public Tuple2<Long, SomeMaybeLongValue> next() {
if (++num < NUM_RECORDS) {
long val = rnd.nextLong();
return new Tuple2<Long, SomeMaybeLongValue>(val, new SomeMaybeLongValue((int) val, num % LARGE_REC_INTERVAL == 0));
}
else {
return null;
}
}
};
@SuppressWarnings("unchecked")
Sorter<Tuple2<Long, SomeMaybeLongValue>> sorter = new UnilateralSortMerger<Tuple2<Long, SomeMaybeLongValue>>(
this.memoryManager, this.ioManager,
source, this.parentTask,
new RuntimeSerializerFactory<Tuple2<Long, SomeMaybeLongValue>>(serializer, (Class<Tuple2<Long, SomeMaybeLongValue>>) (Class<?>) Tuple2.class),
comparator, 1.0, 1, 128, 0.7f, true /*use large record handler*/, true);
MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>> iterator = sorter.getIterator();
Tuple2<Long, SomeMaybeLongValue> val = serializer.createInstance();
long prevKey = Long.MAX_VALUE;
for (int i = 0; i < NUM_RECORDS; i++) {
val = iterator.next(val);
assertTrue("Sort order violated", val.f0 <= prevKey);
assertEquals("Serialization of test data type incorrect", val.f0.intValue(), val.f1.val());
}
assertNull(iterator.next(val));
sorter.close();
testSuccess = true;
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testSortWithShortMediumAndLargeRecords() {
try {
final int NUM_RECORDS = 50000;
final int LARGE_REC_INTERVAL = 10000;
final int MEDIUM_REC_INTERVAL = 500;
final TypeInformation<?>[] types = new TypeInformation<?>[] {
BasicTypeInfo.LONG_TYPE_INFO,
new ValueTypeInfo<SmallOrMediumOrLargeValue>(SmallOrMediumOrLargeValue.class)
};
final TupleTypeInfo<Tuple2<Long, SmallOrMediumOrLargeValue>> typeInfo =
new TupleTypeInfo<Tuple2<Long,SmallOrMediumOrLargeValue>>(types);
final TypeSerializer<Tuple2<Long, SmallOrMediumOrLargeValue>> serializer = typeInfo.createSerializer(new ExecutionConfig());
final TypeComparator<Tuple2<Long, SmallOrMediumOrLargeValue>> comparator = typeInfo.createComparator(new int[] {0}, new boolean[]{false}, 0, new ExecutionConfig());
MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>> source =
new MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>>() {
private final Random rnd = new Random(1456108743687167086L);
private int num = -1;
@Override
public Tuple2<Long, SmallOrMediumOrLargeValue> next(Tuple2<Long, SmallOrMediumOrLargeValue> reuse) {
return next();
}
@Override
public Tuple2<Long, SmallOrMediumOrLargeValue> next() {
if (++num < NUM_RECORDS) {
int size;
if (num % LARGE_REC_INTERVAL == 0) {
size = SmallOrMediumOrLargeValue.LARGE_SIZE;
}
else if (num % MEDIUM_REC_INTERVAL == 0) {
size = SmallOrMediumOrLargeValue.MEDIUM_SIZE;
}
else {
size = SmallOrMediumOrLargeValue.SMALL_SIZE;
}
long val = rnd.nextLong();
return new Tuple2<Long, SmallOrMediumOrLargeValue>(val, new SmallOrMediumOrLargeValue((int) val, size));
}
else {
return null;
}
}
};
@SuppressWarnings("unchecked")
Sorter<Tuple2<Long, SmallOrMediumOrLargeValue>> sorter = new UnilateralSortMerger<Tuple2<Long, SmallOrMediumOrLargeValue>>(
this.memoryManager, this.ioManager,
source, this.parentTask,
new RuntimeSerializerFactory<Tuple2<Long, SmallOrMediumOrLargeValue>>(serializer, (Class<Tuple2<Long, SmallOrMediumOrLargeValue>>) (Class<?>) Tuple2.class),
comparator, 1.0, 1, 128, 0.7f, true /*use large record handler*/, false);
MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>> iterator = sorter.getIterator();
Tuple2<Long, SmallOrMediumOrLargeValue> val = serializer.createInstance();
long prevKey = Long.MAX_VALUE;
for (int i = 0; i < NUM_RECORDS; i++) {
val = iterator.next(val);
assertTrue(val.f0 <= prevKey);
assertTrue(val.f0.intValue() == val.f1.val());
}
assertNull(iterator.next(val));
sorter.close();
testSuccess = true;
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testSortWithMediumRecordsOnly() {
try {
final int NUM_RECORDS = 70;
final TypeInformation<?>[] types = new TypeInformation<?>[] {
BasicTypeInfo.LONG_TYPE_INFO,
new ValueTypeInfo<SmallOrMediumOrLargeValue>(SmallOrMediumOrLargeValue.class)
};
final TupleTypeInfo<Tuple2<Long, SmallOrMediumOrLargeValue>> typeInfo =
new TupleTypeInfo<Tuple2<Long,SmallOrMediumOrLargeValue>>(types);
final TypeSerializer<Tuple2<Long, SmallOrMediumOrLargeValue>> serializer = typeInfo.createSerializer(new ExecutionConfig());
final TypeComparator<Tuple2<Long, SmallOrMediumOrLargeValue>> comparator = typeInfo.createComparator(new int[] {0}, new boolean[]{false}, 0, new ExecutionConfig());
MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>> source =
new MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>>() {
private final Random rnd = new Random(62360187263087678L);
private int num = -1;
@Override
public Tuple2<Long, SmallOrMediumOrLargeValue> next(Tuple2<Long, SmallOrMediumOrLargeValue> reuse) {
return next();
}
@Override
public Tuple2<Long, SmallOrMediumOrLargeValue> next() {
if (++num < NUM_RECORDS) {
long val = rnd.nextLong();
return new Tuple2<Long, SmallOrMediumOrLargeValue>(val, new SmallOrMediumOrLargeValue((int) val, SmallOrMediumOrLargeValue.MEDIUM_SIZE));
}
else {
return null;
}
}
};
@SuppressWarnings("unchecked")
Sorter<Tuple2<Long, SmallOrMediumOrLargeValue>> sorter = new UnilateralSortMerger<Tuple2<Long, SmallOrMediumOrLargeValue>>(
this.memoryManager, this.ioManager,
source, this.parentTask,
new RuntimeSerializerFactory<Tuple2<Long, SmallOrMediumOrLargeValue>>(serializer, (Class<Tuple2<Long, SmallOrMediumOrLargeValue>>) (Class<?>) Tuple2.class),
comparator, 1.0, 1, 128, 0.7f, true /*use large record handler*/, true);
MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>> iterator = sorter.getIterator();
Tuple2<Long, SmallOrMediumOrLargeValue> val = serializer.createInstance();
long prevKey = Long.MAX_VALUE;
for (int i = 0; i < NUM_RECORDS; i++) {
val = iterator.next(val);
assertTrue(val.f0 <= prevKey);
assertTrue(val.f0.intValue() == val.f1.val());
}
assertNull(iterator.next(val));
sorter.close();
testSuccess = true;
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public static final class SomeMaybeLongValue implements org.apache.flink.types.Value {
private static final long serialVersionUID = 1L;
private static final byte[] BUFFER = new byte[100 * 1024 * 1024];
static {
for (int i = 0; i < BUFFER.length; i++) {
BUFFER[i] = (byte) i;
}
}
private int val;
private boolean isLong;
public SomeMaybeLongValue() {
this.isLong = true;
}
public SomeMaybeLongValue(int val) {
this.val = val;
this.isLong = true;
}
public SomeMaybeLongValue(int val, boolean isLong) {
this.val = val;
this.isLong = isLong;
}
public int val() {
return val;
}
public boolean isLong() {
return isLong;
}
@Override
public void read(DataInputView in) throws IOException {
val = in.readInt();
isLong = in.readBoolean();
if (isLong) {
for (int i = 0; i < BUFFER.length; i++) {
byte b = in.readByte();
assertEquals(BUFFER[i], b);
}
}
}
@Override
public void write(DataOutputView out) throws IOException {
out.writeInt(val);
out.writeBoolean(isLong);
if (isLong) {
out.write(BUFFER);
}
}
@Override
public int hashCode() {
return val;
}
@Override
public boolean equals(Object obj) {
return (obj instanceof SomeMaybeLongValue) && ((SomeMaybeLongValue) obj).val == this.val;
}
@Override
public String toString() {
return isLong ? "Large Value" : "Small Value";
}
}
public static final class SmallOrMediumOrLargeValue implements org.apache.flink.types.Value {
private static final long serialVersionUID = 1L;
public static final int SMALL_SIZE = 0;
public static final int MEDIUM_SIZE = 12 * 1024 * 1024;
public static final int LARGE_SIZE = 100 * 1024 * 1024;
private int val;
private int size;
public SmallOrMediumOrLargeValue() {
this.size = SMALL_SIZE;
}
public SmallOrMediumOrLargeValue(int val) {
this.val = val;
this.size = SMALL_SIZE;
}
public SmallOrMediumOrLargeValue(int val, int size) {
this.val = val;
this.size = size;
}
public int val() {
return val;
}
public int getSize() {
return size;
}
@Override
public void read(DataInputView in) throws IOException {
val = in.readInt();
size = in.readInt();
for (int i = 0; i < size; i++) {
byte b = in.readByte();
assertEquals((byte) i, b);
}
}
@Override
public void write(DataOutputView out) throws IOException {
out.writeInt(val);
out.writeInt(size);
for (int i = 0; i < size; i++) {
out.write((byte) (i));
}
}
@Override
public int hashCode() {
return val;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof SmallOrMediumOrLargeValue) {
SmallOrMediumOrLargeValue other = (SmallOrMediumOrLargeValue) obj;
return other.val == this.val && other.size == this.size;
} else {
return false;
}
}
@Override
public String toString() {
return String.format("Value %d (%d bytes)", val, size);
}
}
} | class ExternalSortLargeRecordsITCase extends TestLogger {
private static final int MEMORY_SIZE = 1024 * 1024 * 78;
private final AbstractInvokable parentTask = new DummyInvokable();
private IOManager ioManager;
private MemoryManager memoryManager;
private boolean testSuccess;
@Before
public void beforeTest() {
this.memoryManager = new MemoryManager(MEMORY_SIZE, 1);
this.ioManager = new IOManagerAsync();
}
@After
@Test
public void testSortWithLongRecordsOnly() {
try {
final int NUM_RECORDS = 10;
final TypeInformation<?>[] types = new TypeInformation<?>[] {
BasicTypeInfo.LONG_TYPE_INFO,
new ValueTypeInfo<SomeMaybeLongValue>(SomeMaybeLongValue.class)
};
final TupleTypeInfo<Tuple2<Long, SomeMaybeLongValue>> typeInfo =
new TupleTypeInfo<Tuple2<Long,SomeMaybeLongValue>>(types);
final TypeSerializer<Tuple2<Long, SomeMaybeLongValue>> serializer = typeInfo.createSerializer(new ExecutionConfig());
final TypeComparator<Tuple2<Long, SomeMaybeLongValue>> comparator = typeInfo.createComparator(new int[] {0}, new boolean[]{false}, 0, new ExecutionConfig());
MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>> source =
new MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>>() {
private final Random rnd = new Random(457821643089756298L);
private int num = 0;
@Override
public Tuple2<Long, SomeMaybeLongValue> next(Tuple2<Long, SomeMaybeLongValue> reuse) {
return next();
}
@Override
public Tuple2<Long, SomeMaybeLongValue> next() {
if (num++ < NUM_RECORDS) {
long val = rnd.nextLong();
return new Tuple2<Long, SomeMaybeLongValue>(val, new SomeMaybeLongValue((int) val));
}
else {
return null;
}
}
};
@SuppressWarnings("unchecked")
Sorter<Tuple2<Long, SomeMaybeLongValue>> sorter = new UnilateralSortMerger<Tuple2<Long, SomeMaybeLongValue>>(
this.memoryManager, this.ioManager,
source, this.parentTask,
new RuntimeSerializerFactory<Tuple2<Long, SomeMaybeLongValue>>(serializer, (Class<Tuple2<Long, SomeMaybeLongValue>>) (Class<?>) Tuple2.class),
comparator, 1.0, 1, 128, 0.7f, true /* use large record handler */ , false);
MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>> iterator = sorter.getIterator();
Tuple2<Long, SomeMaybeLongValue> val = serializer.createInstance();
long prevKey = Long.MAX_VALUE;
for (int i = 0; i < NUM_RECORDS; i++) {
val = iterator.next(val);
assertTrue(val.f0 <= prevKey);
assertTrue(val.f0.intValue() == val.f1.val());
}
assertNull(iterator.next(val));
sorter.close();
testSuccess = true;
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testSortWithLongAndShortRecordsMixed() {
try {
final int NUM_RECORDS = 1000000;
final int LARGE_REC_INTERVAL = 100000;
final TypeInformation<?>[] types = new TypeInformation<?>[] {
BasicTypeInfo.LONG_TYPE_INFO,
new ValueTypeInfo<SomeMaybeLongValue>(SomeMaybeLongValue.class)
};
final TupleTypeInfo<Tuple2<Long, SomeMaybeLongValue>> typeInfo =
new TupleTypeInfo<Tuple2<Long,SomeMaybeLongValue>>(types);
final TypeSerializer<Tuple2<Long, SomeMaybeLongValue>> serializer = typeInfo.createSerializer(new ExecutionConfig());
final TypeComparator<Tuple2<Long, SomeMaybeLongValue>> comparator = typeInfo.createComparator(new int[] {0}, new boolean[]{false}, 0, new ExecutionConfig());
MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>> source =
new MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>>() {
private final Random rnd = new Random(145610843608763871L);
private int num = -1;
@Override
public Tuple2<Long, SomeMaybeLongValue> next(Tuple2<Long, SomeMaybeLongValue> reuse) {
return next();
}
@Override
public Tuple2<Long, SomeMaybeLongValue> next() {
if (++num < NUM_RECORDS) {
long val = rnd.nextLong();
return new Tuple2<Long, SomeMaybeLongValue>(val, new SomeMaybeLongValue((int) val, num % LARGE_REC_INTERVAL == 0));
}
else {
return null;
}
}
};
@SuppressWarnings("unchecked")
Sorter<Tuple2<Long, SomeMaybeLongValue>> sorter = new UnilateralSortMerger<Tuple2<Long, SomeMaybeLongValue>>(
this.memoryManager, this.ioManager,
source, this.parentTask,
new RuntimeSerializerFactory<Tuple2<Long, SomeMaybeLongValue>>(serializer, (Class<Tuple2<Long, SomeMaybeLongValue>>) (Class<?>) Tuple2.class),
comparator, 1.0, 1, 128, 0.7f, true /*use large record handler*/, true);
MutableObjectIterator<Tuple2<Long, SomeMaybeLongValue>> iterator = sorter.getIterator();
Tuple2<Long, SomeMaybeLongValue> val = serializer.createInstance();
long prevKey = Long.MAX_VALUE;
for (int i = 0; i < NUM_RECORDS; i++) {
val = iterator.next(val);
assertTrue("Sort order violated", val.f0 <= prevKey);
assertEquals("Serialization of test data type incorrect", val.f0.intValue(), val.f1.val());
}
assertNull(iterator.next(val));
sorter.close();
testSuccess = true;
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testSortWithShortMediumAndLargeRecords() {
try {
final int NUM_RECORDS = 50000;
final int LARGE_REC_INTERVAL = 10000;
final int MEDIUM_REC_INTERVAL = 500;
final TypeInformation<?>[] types = new TypeInformation<?>[] {
BasicTypeInfo.LONG_TYPE_INFO,
new ValueTypeInfo<SmallOrMediumOrLargeValue>(SmallOrMediumOrLargeValue.class)
};
final TupleTypeInfo<Tuple2<Long, SmallOrMediumOrLargeValue>> typeInfo =
new TupleTypeInfo<Tuple2<Long,SmallOrMediumOrLargeValue>>(types);
final TypeSerializer<Tuple2<Long, SmallOrMediumOrLargeValue>> serializer = typeInfo.createSerializer(new ExecutionConfig());
final TypeComparator<Tuple2<Long, SmallOrMediumOrLargeValue>> comparator = typeInfo.createComparator(new int[] {0}, new boolean[]{false}, 0, new ExecutionConfig());
MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>> source =
new MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>>() {
private final Random rnd = new Random(1456108743687167086L);
private int num = -1;
@Override
public Tuple2<Long, SmallOrMediumOrLargeValue> next(Tuple2<Long, SmallOrMediumOrLargeValue> reuse) {
return next();
}
@Override
public Tuple2<Long, SmallOrMediumOrLargeValue> next() {
if (++num < NUM_RECORDS) {
int size;
if (num % LARGE_REC_INTERVAL == 0) {
size = SmallOrMediumOrLargeValue.LARGE_SIZE;
}
else if (num % MEDIUM_REC_INTERVAL == 0) {
size = SmallOrMediumOrLargeValue.MEDIUM_SIZE;
}
else {
size = SmallOrMediumOrLargeValue.SMALL_SIZE;
}
long val = rnd.nextLong();
return new Tuple2<Long, SmallOrMediumOrLargeValue>(val, new SmallOrMediumOrLargeValue((int) val, size));
}
else {
return null;
}
}
};
@SuppressWarnings("unchecked")
Sorter<Tuple2<Long, SmallOrMediumOrLargeValue>> sorter = new UnilateralSortMerger<Tuple2<Long, SmallOrMediumOrLargeValue>>(
this.memoryManager, this.ioManager,
source, this.parentTask,
new RuntimeSerializerFactory<Tuple2<Long, SmallOrMediumOrLargeValue>>(serializer, (Class<Tuple2<Long, SmallOrMediumOrLargeValue>>) (Class<?>) Tuple2.class),
comparator, 1.0, 1, 128, 0.7f, true /*use large record handler*/, false);
MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>> iterator = sorter.getIterator();
Tuple2<Long, SmallOrMediumOrLargeValue> val = serializer.createInstance();
long prevKey = Long.MAX_VALUE;
for (int i = 0; i < NUM_RECORDS; i++) {
val = iterator.next(val);
assertTrue(val.f0 <= prevKey);
assertTrue(val.f0.intValue() == val.f1.val());
}
assertNull(iterator.next(val));
sorter.close();
testSuccess = true;
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testSortWithMediumRecordsOnly() {
try {
final int NUM_RECORDS = 70;
final TypeInformation<?>[] types = new TypeInformation<?>[] {
BasicTypeInfo.LONG_TYPE_INFO,
new ValueTypeInfo<SmallOrMediumOrLargeValue>(SmallOrMediumOrLargeValue.class)
};
final TupleTypeInfo<Tuple2<Long, SmallOrMediumOrLargeValue>> typeInfo =
new TupleTypeInfo<Tuple2<Long,SmallOrMediumOrLargeValue>>(types);
final TypeSerializer<Tuple2<Long, SmallOrMediumOrLargeValue>> serializer = typeInfo.createSerializer(new ExecutionConfig());
final TypeComparator<Tuple2<Long, SmallOrMediumOrLargeValue>> comparator = typeInfo.createComparator(new int[] {0}, new boolean[]{false}, 0, new ExecutionConfig());
MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>> source =
new MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>>() {
private final Random rnd = new Random(62360187263087678L);
private int num = -1;
@Override
public Tuple2<Long, SmallOrMediumOrLargeValue> next(Tuple2<Long, SmallOrMediumOrLargeValue> reuse) {
return next();
}
@Override
public Tuple2<Long, SmallOrMediumOrLargeValue> next() {
if (++num < NUM_RECORDS) {
long val = rnd.nextLong();
return new Tuple2<Long, SmallOrMediumOrLargeValue>(val, new SmallOrMediumOrLargeValue((int) val, SmallOrMediumOrLargeValue.MEDIUM_SIZE));
}
else {
return null;
}
}
};
@SuppressWarnings("unchecked")
Sorter<Tuple2<Long, SmallOrMediumOrLargeValue>> sorter = new UnilateralSortMerger<Tuple2<Long, SmallOrMediumOrLargeValue>>(
this.memoryManager, this.ioManager,
source, this.parentTask,
new RuntimeSerializerFactory<Tuple2<Long, SmallOrMediumOrLargeValue>>(serializer, (Class<Tuple2<Long, SmallOrMediumOrLargeValue>>) (Class<?>) Tuple2.class),
comparator, 1.0, 1, 128, 0.7f, true /*use large record handler*/, true);
MutableObjectIterator<Tuple2<Long, SmallOrMediumOrLargeValue>> iterator = sorter.getIterator();
Tuple2<Long, SmallOrMediumOrLargeValue> val = serializer.createInstance();
long prevKey = Long.MAX_VALUE;
for (int i = 0; i < NUM_RECORDS; i++) {
val = iterator.next(val);
assertTrue(val.f0 <= prevKey);
assertTrue(val.f0.intValue() == val.f1.val());
}
assertNull(iterator.next(val));
sorter.close();
testSuccess = true;
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
public static final class SomeMaybeLongValue implements org.apache.flink.types.Value {
private static final long serialVersionUID = 1L;
private static final byte[] BUFFER = new byte[100 * 1024 * 1024];
static {
for (int i = 0; i < BUFFER.length; i++) {
BUFFER[i] = (byte) i;
}
}
private int val;
private boolean isLong;
public SomeMaybeLongValue() {
this.isLong = true;
}
public SomeMaybeLongValue(int val) {
this.val = val;
this.isLong = true;
}
public SomeMaybeLongValue(int val, boolean isLong) {
this.val = val;
this.isLong = isLong;
}
public int val() {
return val;
}
public boolean isLong() {
return isLong;
}
@Override
public void read(DataInputView in) throws IOException {
val = in.readInt();
isLong = in.readBoolean();
if (isLong) {
for (int i = 0; i < BUFFER.length; i++) {
byte b = in.readByte();
assertEquals(BUFFER[i], b);
}
}
}
@Override
public void write(DataOutputView out) throws IOException {
out.writeInt(val);
out.writeBoolean(isLong);
if (isLong) {
out.write(BUFFER);
}
}
@Override
public int hashCode() {
return val;
}
@Override
public boolean equals(Object obj) {
return (obj instanceof SomeMaybeLongValue) && ((SomeMaybeLongValue) obj).val == this.val;
}
@Override
public String toString() {
return isLong ? "Large Value" : "Small Value";
}
}
public static final class SmallOrMediumOrLargeValue implements org.apache.flink.types.Value {
private static final long serialVersionUID = 1L;
public static final int SMALL_SIZE = 0;
public static final int MEDIUM_SIZE = 12 * 1024 * 1024;
public static final int LARGE_SIZE = 100 * 1024 * 1024;
private int val;
private int size;
public SmallOrMediumOrLargeValue() {
this.size = SMALL_SIZE;
}
public SmallOrMediumOrLargeValue(int val) {
this.val = val;
this.size = SMALL_SIZE;
}
public SmallOrMediumOrLargeValue(int val, int size) {
this.val = val;
this.size = size;
}
public int val() {
return val;
}
public int getSize() {
return size;
}
@Override
public void read(DataInputView in) throws IOException {
val = in.readInt();
size = in.readInt();
for (int i = 0; i < size; i++) {
byte b = in.readByte();
assertEquals((byte) i, b);
}
}
@Override
public void write(DataOutputView out) throws IOException {
out.writeInt(val);
out.writeInt(size);
for (int i = 0; i < size; i++) {
out.write((byte) (i));
}
}
@Override
public int hashCode() {
return val;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof SmallOrMediumOrLargeValue) {
SmallOrMediumOrLargeValue other = (SmallOrMediumOrLargeValue) obj;
return other.val == this.val && other.size == this.size;
} else {
return false;
}
}
@Override
public String toString() {
return String.format("Value %d (%d bytes)", val, size);
}
}
} |
Is this index calculation correct? What if we are adding the first reporter. Wouldn't the index then be `-1`? | public MetricRegistryImpl(MetricRegistryConfiguration config, Collection<ReporterSetup> reporterConfigurations) {
this.maximumFramesize = config.getQueryServiceMessageSizeLimit();
this.scopeFormats = config.getScopeFormats();
this.globalDelimiter = config.getDelimiter();
this.terminationFuture = new CompletableFuture<>();
this.isShutdown = false;
this.reporters = new ArrayList<>(4);
this.executor = Executors.newSingleThreadScheduledExecutor(new ExecutorThreadFactory("Flink-MetricRegistry"));
this.queryService = null;
this.metricQueryServiceRpcService = null;
if (reporterConfigurations.isEmpty()) {
LOG.info("No metrics reporter configured, no metrics will be exposed/reported.");
} else {
for (ReporterSetup reporterSetup : reporterConfigurations) {
final String namedReporter = reporterSetup.getName();
try {
Optional<String> configuredPeriod = reporterSetup.getIntervalSettings();
TimeUnit timeunit = TimeUnit.SECONDS;
long period = 10;
if (configuredPeriod.isPresent()) {
try {
String[] interval = configuredPeriod.get().split(" ");
period = Long.parseLong(interval[0]);
timeunit = TimeUnit.valueOf(interval[1]);
}
catch (Exception e) {
LOG.error("Cannot parse report interval from config: " + configuredPeriod +
" - please use values like '10 SECONDS' or '500 MILLISECONDS'. " +
"Using default reporting interval.");
}
}
final MetricReporter reporterInstance = reporterSetup.getReporter();
final String className = reporterInstance.getClass().getName();
if (reporterInstance instanceof Scheduled) {
LOG.info("Periodically reporting metrics in intervals of {} {} for reporter {} of type {}.", period, timeunit.name(), namedReporter, className);
executor.scheduleWithFixedDelay(
new MetricRegistryImpl.ReporterTask((Scheduled) reporterInstance), period, period, timeunit);
} else {
LOG.info("Reporting metrics for reporter {} of type {}.", namedReporter, className);
}
String delimiterForReporter = reporterSetup.getDelimiter().orElse(String.valueOf(globalDelimiter));
if (delimiterForReporter.length() != 1) {
LOG.warn("Failed to parse delimiter '{}' for reporter '{}', using global delimiter '{}'.", delimiterForReporter, namedReporter, globalDelimiter);
delimiterForReporter = String.valueOf(globalDelimiter);
}
reporters.add(new ReporterAndSettings(
reporterInstance,
new ReporterScopedSettings(
reporters.size() - 1,
delimiterForReporter.charAt(0))));
}
catch (Throwable t) {
LOG.error("Could not instantiate metrics reporter {}. Metrics might not be exposed/reported.", namedReporter, t);
}
}
}
} | reporters.size() - 1, | public MetricRegistryImpl(MetricRegistryConfiguration config, Collection<ReporterSetup> reporterConfigurations) {
this.maximumFramesize = config.getQueryServiceMessageSizeLimit();
this.scopeFormats = config.getScopeFormats();
this.globalDelimiter = config.getDelimiter();
this.terminationFuture = new CompletableFuture<>();
this.isShutdown = false;
this.reporters = new ArrayList<>(4);
this.executor = Executors.newSingleThreadScheduledExecutor(new ExecutorThreadFactory("Flink-MetricRegistry"));
this.queryService = null;
this.metricQueryServiceRpcService = null;
if (reporterConfigurations.isEmpty()) {
LOG.info("No metrics reporter configured, no metrics will be exposed/reported.");
} else {
for (ReporterSetup reporterSetup : reporterConfigurations) {
final String namedReporter = reporterSetup.getName();
try {
Optional<String> configuredPeriod = reporterSetup.getIntervalSettings();
TimeUnit timeunit = TimeUnit.SECONDS;
long period = 10;
if (configuredPeriod.isPresent()) {
try {
String[] interval = configuredPeriod.get().split(" ");
period = Long.parseLong(interval[0]);
timeunit = TimeUnit.valueOf(interval[1]);
}
catch (Exception e) {
LOG.error("Cannot parse report interval from config: " + configuredPeriod +
" - please use values like '10 SECONDS' or '500 MILLISECONDS'. " +
"Using default reporting interval.");
}
}
final MetricReporter reporterInstance = reporterSetup.getReporter();
final String className = reporterInstance.getClass().getName();
if (reporterInstance instanceof Scheduled) {
LOG.info("Periodically reporting metrics in intervals of {} {} for reporter {} of type {}.", period, timeunit.name(), namedReporter, className);
executor.scheduleWithFixedDelay(
new MetricRegistryImpl.ReporterTask((Scheduled) reporterInstance), period, period, timeunit);
} else {
LOG.info("Reporting metrics for reporter {} of type {}.", namedReporter, className);
}
String delimiterForReporter = reporterSetup.getDelimiter().orElse(String.valueOf(globalDelimiter));
if (delimiterForReporter.length() != 1) {
LOG.warn("Failed to parse delimiter '{}' for reporter '{}', using global delimiter '{}'.", delimiterForReporter, namedReporter, globalDelimiter);
delimiterForReporter = String.valueOf(globalDelimiter);
}
reporters.add(new ReporterAndSettings(
reporterInstance,
new ReporterScopedSettings(
reporters.size(),
delimiterForReporter.charAt(0))));
}
catch (Throwable t) {
LOG.error("Could not instantiate metrics reporter {}. Metrics might not be exposed/reported.", namedReporter, t);
}
}
}
} | class MetricRegistryImpl implements MetricRegistry {
private static final Logger LOG = LoggerFactory.getLogger(MetricRegistryImpl.class);
private final Object lock = new Object();
private final List<ReporterAndSettings> reporters;
private final ScheduledExecutorService executor;
private final ScopeFormats scopeFormats;
private final char globalDelimiter;
private final CompletableFuture<Void> terminationFuture;
private final long maximumFramesize;
@Nullable
private MetricQueryService queryService;
@Nullable
private RpcService metricQueryServiceRpcService;
private ViewUpdater viewUpdater;
private boolean isShutdown;
public MetricRegistryImpl(MetricRegistryConfiguration config) {
this(config, Collections.emptyList());
}
/**
* Creates a new MetricRegistry and starts the configured reporter.
*/
/**
* Initializes the MetricQueryService.
*
* @param rpcService RpcService to create the MetricQueryService on
* @param resourceID resource ID used to disambiguate the actor name
*/
public void startQueryService(RpcService rpcService, ResourceID resourceID) {
synchronized (lock) {
Preconditions.checkState(!isShutdown(), "The metric registry has already been shut down.");
try {
metricQueryServiceRpcService = rpcService;
queryService = MetricQueryService.createMetricQueryService(rpcService, resourceID, maximumFramesize);
queryService.start();
} catch (Exception e) {
LOG.warn("Could not start MetricDumpActor. No metrics will be submitted to the WebInterface.", e);
}
}
}
/**
* Returns the rpc service that the {@link MetricQueryService} runs in.
*
* @return rpc service of hte MetricQueryService
*/
@Nullable
public RpcService getMetricQueryServiceRpcService() {
return metricQueryServiceRpcService;
}
/**
* Returns the address under which the {@link MetricQueryService} is reachable.
*
* @return address of the metric query service
*/
@Override
@Nullable
public String getMetricQueryServiceGatewayRpcAddress() {
if (queryService != null) {
return queryService.getSelfGateway(MetricQueryServiceGateway.class).getAddress();
} else {
return null;
}
}
@VisibleForTesting
@Nullable
MetricQueryServiceGateway getMetricQueryServiceGateway() {
if (queryService != null) {
return queryService.getSelfGateway(MetricQueryServiceGateway.class);
} else {
return null;
}
}
@Override
public char getDelimiter() {
return this.globalDelimiter;
}
@VisibleForTesting
char getDelimiter(int reporterIndex) {
try {
return reporters.get(reporterIndex).getSettings().getDelimiter();
} catch (IndexOutOfBoundsException e) {
LOG.warn("Delimiter for reporter index {} not found, returning global delimiter.", reporterIndex);
return this.globalDelimiter;
}
}
@Override
public int getNumberReporters() {
return reporters.size();
}
@VisibleForTesting
public List<MetricReporter> getReporters() {
return reporters.stream().map(ReporterAndSettings::getReporter).collect(Collectors.toList());
}
/**
* Returns whether this registry has been shutdown.
*
* @return true, if this registry was shutdown, otherwise false
*/
public boolean isShutdown() {
synchronized (lock) {
return isShutdown;
}
}
/**
* Shuts down this registry and the associated {@link MetricReporter}.
*
* <p>NOTE: This operation is asynchronous and returns a future which is completed
* once the shutdown operation has been completed.
*
* @return Future which is completed once the {@link MetricRegistryImpl}
* is shut down.
*/
public CompletableFuture<Void> shutdown() {
synchronized (lock) {
if (isShutdown) {
return terminationFuture;
} else {
isShutdown = true;
final Collection<CompletableFuture<Void>> terminationFutures = new ArrayList<>(3);
final Time gracePeriod = Time.seconds(1L);
if (metricQueryServiceRpcService != null) {
final CompletableFuture<Void> metricQueryServiceRpcServiceTerminationFuture = metricQueryServiceRpcService.stopService();
terminationFutures.add(metricQueryServiceRpcServiceTerminationFuture);
}
Throwable throwable = null;
for (ReporterAndSettings reporterAndSettings : reporters) {
try {
reporterAndSettings.getReporter().close();
} catch (Throwable t) {
throwable = ExceptionUtils.firstOrSuppressed(t, throwable);
}
}
reporters.clear();
if (throwable != null) {
terminationFutures.add(
FutureUtils.completedExceptionally(
new FlinkException("Could not shut down the metric reporters properly.", throwable)));
}
final CompletableFuture<Void> executorShutdownFuture = ExecutorUtils.nonBlockingShutdown(
gracePeriod.toMilliseconds(),
TimeUnit.MILLISECONDS,
executor);
terminationFutures.add(executorShutdownFuture);
FutureUtils
.completeAll(terminationFutures)
.whenComplete(
(Void ignored, Throwable error) -> {
if (error != null) {
terminationFuture.completeExceptionally(error);
} else {
terminationFuture.complete(null);
}
});
return terminationFuture;
}
}
}
@Override
public ScopeFormats getScopeFormats() {
return scopeFormats;
}
@Override
public void register(Metric metric, String metricName, AbstractMetricGroup group) {
synchronized (lock) {
if (isShutdown()) {
LOG.warn("Cannot register metric, because the MetricRegistry has already been shut down.");
} else {
if (reporters != null) {
for (int i = 0; i < reporters.size(); i++) {
ReporterAndSettings reporterAndSettings = reporters.get(i);
try {
if (reporterAndSettings != null) {
FrontMetricGroup front = new FrontMetricGroup<AbstractMetricGroup<?>>(reporterAndSettings.getSettings(), group);
reporterAndSettings.getReporter().notifyOfAddedMetric(metric, metricName, front);
}
} catch (Exception e) {
LOG.warn("Error while registering metric: {}.", metricName, e);
}
}
}
try {
if (queryService != null) {
queryService.addMetric(metricName, metric, group);
}
} catch (Exception e) {
LOG.warn("Error while registering metric: {}.", metricName, e);
}
try {
if (metric instanceof View) {
if (viewUpdater == null) {
viewUpdater = new ViewUpdater(executor);
}
viewUpdater.notifyOfAddedView((View) metric);
}
} catch (Exception e) {
LOG.warn("Error while registering metric: {}.", metricName, e);
}
}
}
}
@Override
public void unregister(Metric metric, String metricName, AbstractMetricGroup group) {
synchronized (lock) {
if (isShutdown()) {
LOG.warn("Cannot unregister metric, because the MetricRegistry has already been shut down.");
} else {
if (reporters != null) {
for (int i = 0; i < reporters.size(); i++) {
try {
ReporterAndSettings reporterAndSettings = reporters.get(i);
if (reporterAndSettings != null) {
FrontMetricGroup front = new FrontMetricGroup<AbstractMetricGroup<?>>(reporterAndSettings.settings, group);
reporterAndSettings.reporter.notifyOfRemovedMetric(metric, metricName, front);
}
} catch (Exception e) {
LOG.warn("Error while unregistering metric: {}.", metricName, e);
}
}
}
try {
if (queryService != null) {
queryService.removeMetric(metric);
}
} catch (Exception e) {
LOG.warn("Error while unregistering metric: {}.", metricName, e);
}
try {
if (metric instanceof View) {
if (viewUpdater != null) {
viewUpdater.notifyOfRemovedView((View) metric);
}
}
} catch (Exception e) {
LOG.warn("Error while unregistering metric: {}", metricName, e);
}
}
}
}
@VisibleForTesting
@Nullable
MetricQueryService getQueryService() {
return queryService;
}
/**
* This task is explicitly a static class, so that it does not hold any references to the enclosing
* MetricsRegistry instance.
*
* <p>This is a subtle difference, but very important: With this static class, the enclosing class instance
* may become garbage-collectible, whereas with an anonymous inner class, the timer thread
* (which is a GC root) will hold a reference via the timer task and its enclosing instance pointer.
* Making the MetricsRegistry garbage collectible makes the java.util.Timer garbage collectible,
* which acts as a fail-safe to stop the timer thread and prevents resource leaks.
*/
private static final class ReporterTask extends TimerTask {
private final Scheduled reporter;
private ReporterTask(Scheduled reporter) {
this.reporter = reporter;
}
@Override
public void run() {
try {
reporter.report();
} catch (Throwable t) {
LOG.warn("Error while reporting metrics", t);
}
}
}
private static class ReporterAndSettings {
private final MetricReporter reporter;
private final ReporterScopedSettings settings;
private ReporterAndSettings(MetricReporter reporter, ReporterScopedSettings settings) {
this.reporter = reporter;
this.settings = settings;
}
public MetricReporter getReporter() {
return reporter;
}
public ReporterScopedSettings getSettings() {
return settings;
}
}
} | class MetricRegistryImpl implements MetricRegistry {
private static final Logger LOG = LoggerFactory.getLogger(MetricRegistryImpl.class);
private final Object lock = new Object();
private final List<ReporterAndSettings> reporters;
private final ScheduledExecutorService executor;
private final ScopeFormats scopeFormats;
private final char globalDelimiter;
private final CompletableFuture<Void> terminationFuture;
private final long maximumFramesize;
@Nullable
private MetricQueryService queryService;
@Nullable
private RpcService metricQueryServiceRpcService;
private ViewUpdater viewUpdater;
private boolean isShutdown;
public MetricRegistryImpl(MetricRegistryConfiguration config) {
this(config, Collections.emptyList());
}
/**
* Creates a new MetricRegistry and starts the configured reporter.
*/
/**
* Initializes the MetricQueryService.
*
* @param rpcService RpcService to create the MetricQueryService on
* @param resourceID resource ID used to disambiguate the actor name
*/
public void startQueryService(RpcService rpcService, ResourceID resourceID) {
synchronized (lock) {
Preconditions.checkState(!isShutdown(), "The metric registry has already been shut down.");
try {
metricQueryServiceRpcService = rpcService;
queryService = MetricQueryService.createMetricQueryService(rpcService, resourceID, maximumFramesize);
queryService.start();
} catch (Exception e) {
LOG.warn("Could not start MetricDumpActor. No metrics will be submitted to the WebInterface.", e);
}
}
}
/**
* Returns the rpc service that the {@link MetricQueryService} runs in.
*
* @return rpc service of hte MetricQueryService
*/
@Nullable
public RpcService getMetricQueryServiceRpcService() {
return metricQueryServiceRpcService;
}
/**
* Returns the address under which the {@link MetricQueryService} is reachable.
*
* @return address of the metric query service
*/
@Override
@Nullable
public String getMetricQueryServiceGatewayRpcAddress() {
if (queryService != null) {
return queryService.getSelfGateway(MetricQueryServiceGateway.class).getAddress();
} else {
return null;
}
}
@VisibleForTesting
@Nullable
MetricQueryServiceGateway getMetricQueryServiceGateway() {
if (queryService != null) {
return queryService.getSelfGateway(MetricQueryServiceGateway.class);
} else {
return null;
}
}
@Override
public char getDelimiter() {
return this.globalDelimiter;
}
@VisibleForTesting
char getDelimiter(int reporterIndex) {
try {
return reporters.get(reporterIndex).getSettings().getDelimiter();
} catch (IndexOutOfBoundsException e) {
LOG.warn("Delimiter for reporter index {} not found, returning global delimiter.", reporterIndex);
return this.globalDelimiter;
}
}
@Override
public int getNumberReporters() {
return reporters.size();
}
@VisibleForTesting
public List<MetricReporter> getReporters() {
return reporters.stream().map(ReporterAndSettings::getReporter).collect(Collectors.toList());
}
/**
* Returns whether this registry has been shutdown.
*
* @return true, if this registry was shutdown, otherwise false
*/
public boolean isShutdown() {
synchronized (lock) {
return isShutdown;
}
}
/**
* Shuts down this registry and the associated {@link MetricReporter}.
*
* <p>NOTE: This operation is asynchronous and returns a future which is completed
* once the shutdown operation has been completed.
*
* @return Future which is completed once the {@link MetricRegistryImpl}
* is shut down.
*/
public CompletableFuture<Void> shutdown() {
synchronized (lock) {
if (isShutdown) {
return terminationFuture;
} else {
isShutdown = true;
final Collection<CompletableFuture<Void>> terminationFutures = new ArrayList<>(3);
final Time gracePeriod = Time.seconds(1L);
if (metricQueryServiceRpcService != null) {
final CompletableFuture<Void> metricQueryServiceRpcServiceTerminationFuture = metricQueryServiceRpcService.stopService();
terminationFutures.add(metricQueryServiceRpcServiceTerminationFuture);
}
Throwable throwable = null;
for (ReporterAndSettings reporterAndSettings : reporters) {
try {
reporterAndSettings.getReporter().close();
} catch (Throwable t) {
throwable = ExceptionUtils.firstOrSuppressed(t, throwable);
}
}
reporters.clear();
if (throwable != null) {
terminationFutures.add(
FutureUtils.completedExceptionally(
new FlinkException("Could not shut down the metric reporters properly.", throwable)));
}
final CompletableFuture<Void> executorShutdownFuture = ExecutorUtils.nonBlockingShutdown(
gracePeriod.toMilliseconds(),
TimeUnit.MILLISECONDS,
executor);
terminationFutures.add(executorShutdownFuture);
FutureUtils
.completeAll(terminationFutures)
.whenComplete(
(Void ignored, Throwable error) -> {
if (error != null) {
terminationFuture.completeExceptionally(error);
} else {
terminationFuture.complete(null);
}
});
return terminationFuture;
}
}
}
@Override
public ScopeFormats getScopeFormats() {
return scopeFormats;
}
@Override
public void register(Metric metric, String metricName, AbstractMetricGroup group) {
synchronized (lock) {
if (isShutdown()) {
LOG.warn("Cannot register metric, because the MetricRegistry has already been shut down.");
} else {
if (reporters != null) {
for (int i = 0; i < reporters.size(); i++) {
ReporterAndSettings reporterAndSettings = reporters.get(i);
try {
if (reporterAndSettings != null) {
FrontMetricGroup front = new FrontMetricGroup<AbstractMetricGroup<?>>(reporterAndSettings.getSettings(), group);
reporterAndSettings.getReporter().notifyOfAddedMetric(metric, metricName, front);
}
} catch (Exception e) {
LOG.warn("Error while registering metric: {}.", metricName, e);
}
}
}
try {
if (queryService != null) {
queryService.addMetric(metricName, metric, group);
}
} catch (Exception e) {
LOG.warn("Error while registering metric: {}.", metricName, e);
}
try {
if (metric instanceof View) {
if (viewUpdater == null) {
viewUpdater = new ViewUpdater(executor);
}
viewUpdater.notifyOfAddedView((View) metric);
}
} catch (Exception e) {
LOG.warn("Error while registering metric: {}.", metricName, e);
}
}
}
}
@Override
public void unregister(Metric metric, String metricName, AbstractMetricGroup group) {
synchronized (lock) {
if (isShutdown()) {
LOG.warn("Cannot unregister metric, because the MetricRegistry has already been shut down.");
} else {
if (reporters != null) {
for (int i = 0; i < reporters.size(); i++) {
try {
ReporterAndSettings reporterAndSettings = reporters.get(i);
if (reporterAndSettings != null) {
FrontMetricGroup front = new FrontMetricGroup<AbstractMetricGroup<?>>(reporterAndSettings.getSettings(), group);
reporterAndSettings.getReporter().notifyOfRemovedMetric(metric, metricName, front);
}
} catch (Exception e) {
LOG.warn("Error while unregistering metric: {}.", metricName, e);
}
}
}
try {
if (queryService != null) {
queryService.removeMetric(metric);
}
} catch (Exception e) {
LOG.warn("Error while unregistering metric: {}.", metricName, e);
}
try {
if (metric instanceof View) {
if (viewUpdater != null) {
viewUpdater.notifyOfRemovedView((View) metric);
}
}
} catch (Exception e) {
LOG.warn("Error while unregistering metric: {}", metricName, e);
}
}
}
}
@VisibleForTesting
@Nullable
MetricQueryService getQueryService() {
return queryService;
}
/**
* This task is explicitly a static class, so that it does not hold any references to the enclosing
* MetricsRegistry instance.
*
* <p>This is a subtle difference, but very important: With this static class, the enclosing class instance
* may become garbage-collectible, whereas with an anonymous inner class, the timer thread
* (which is a GC root) will hold a reference via the timer task and its enclosing instance pointer.
* Making the MetricsRegistry garbage collectible makes the java.util.Timer garbage collectible,
* which acts as a fail-safe to stop the timer thread and prevents resource leaks.
*/
private static final class ReporterTask extends TimerTask {
private final Scheduled reporter;
private ReporterTask(Scheduled reporter) {
this.reporter = reporter;
}
@Override
public void run() {
try {
reporter.report();
} catch (Throwable t) {
LOG.warn("Error while reporting metrics", t);
}
}
}
private static class ReporterAndSettings {
private final MetricReporter reporter;
private final ReporterScopedSettings settings;
private ReporterAndSettings(MetricReporter reporter, ReporterScopedSettings settings) {
this.reporter = Preconditions.checkNotNull(reporter);
this.settings = Preconditions.checkNotNull(settings);
}
public MetricReporter getReporter() {
return reporter;
}
public ReporterScopedSettings getSettings() {
return settings;
}
}
} |
yes, that's a common Javadoc that signals the Java version info. | public static void main(String[] args) throws IOException {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
byte[] fileContent = Files.readAllBytes(analyzeFile.toPath());
PollerFlux<OperationResult, List<RecognizedForm>> labeledCustomFormPoller =
client.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(
toFluxByteBuffer(new ByteArrayInputStream(fileContent)), analyzeFile.length(), "{labeled_model_Id}")
.setFormContentType(FormContentType.APPLICATION_PDF).setIncludeTextContent(true)
.setPollInterval(Duration.ofSeconds(5)));
PollerFlux<OperationResult, List<RecognizedForm>> unlabeledCustomFormPoller =
client.beginRecognizeCustomForms(toFluxByteBuffer(new ByteArrayInputStream(fileContent)),
analyzeFile.length(), "{unlabeled_model_Id}", FormContentType.APPLICATION_PDF);
Mono<List<RecognizedForm>> labeledDataResult = labeledCustomFormPoller
.last()
.flatMap(trainingOperationResponse -> {
if (trainingOperationResponse.getStatus().isComplete()) {
return trainingOperationResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ trainingOperationResponse.getStatus()));
}
});
Mono<List<RecognizedForm>> unlabeledDataResult = unlabeledCustomFormPoller
.last()
.flatMap(trainingOperationResponse -> {
if (trainingOperationResponse.getStatus().isComplete()) {
return trainingOperationResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ trainingOperationResponse.getStatus()));
}
});
System.out.println("--------Recognizing forms with labeled custom model--------");
labeledDataResult.subscribe(formsWithLabeledModel -> formsWithLabeledModel.forEach(labeledForm ->
labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueText().getBoundingBox() != null) {
formField.getValueText().getBoundingBox().getPoints().stream().map(point -> String.format("[%.2f,"
+ " %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getFieldValue(), formField.getValueText().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Merchant".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
})));
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
unlabeledDataResult.subscribe(recognizedForms -> recognizedForms.forEach(unLabeledForm ->
unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueText().getBoundingBox() != null) {
formField.getValueText().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
final StringBuilder boundingBoxLabelStr = new StringBuilder();
if (formField.getLabelText() != null && formField.getLabelText().getBoundingBox() != null) {
formField.getLabelText().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelText().getText(), boundingBoxLabelStr, formField.getConfidence());
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence "
+ "score of %.2f.%n",
label, formField.getFieldValue(), formField.getValueText().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelText().getText()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
})));
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | public static void main(String[] args) throws IOException {
FormRecognizerAsyncClient client = new FormRecognizerClientBuilder()
.credential(new AzureKeyCredential("{key}"))
.endpoint("https:
.buildAsyncClient();
File analyzeFile = new File("../formrecognizer/azure-ai-formrecognizer/src/samples/java/sample-forms/"
+ "forms/Form_1.jpg");
byte[] fileContent = Files.readAllBytes(analyzeFile.toPath());
PollerFlux<OperationResult, List<RecognizedForm>> labeledCustomFormPoller =
client.beginRecognizeCustomForms(new RecognizeCustomFormsOptions(
toFluxByteBuffer(new ByteArrayInputStream(fileContent)), analyzeFile.length(), "{labeled_model_Id}")
.setFormContentType(FormContentType.APPLICATION_PDF).setIncludeTextContent(true)
.setPollInterval(Duration.ofSeconds(5)));
PollerFlux<OperationResult, List<RecognizedForm>> unlabeledCustomFormPoller =
client.beginRecognizeCustomForms(toFluxByteBuffer(new ByteArrayInputStream(fileContent)),
analyzeFile.length(), "{unlabeled_model_Id}", FormContentType.APPLICATION_PDF);
Mono<List<RecognizedForm>> labeledDataResult = labeledCustomFormPoller
.last()
.flatMap(trainingOperationResponse -> {
if (trainingOperationResponse.getStatus().isComplete()) {
return trainingOperationResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ trainingOperationResponse.getStatus()));
}
});
Mono<List<RecognizedForm>> unlabeledDataResult = unlabeledCustomFormPoller
.last()
.flatMap(trainingOperationResponse -> {
if (trainingOperationResponse.getStatus().isComplete()) {
return trainingOperationResponse.getFinalResult();
} else {
return Mono.error(new RuntimeException("Polling completed unsuccessfully with status:"
+ trainingOperationResponse.getStatus()));
}
});
System.out.println("--------Recognizing forms with labeled custom model--------");
labeledDataResult.subscribe(formsWithLabeledModel -> formsWithLabeledModel.forEach(labeledForm ->
labeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueText().getBoundingBox() != null) {
formField.getValueText().getBoundingBox().getPoints().stream().map(point -> String.format("[%.2f,"
+ " %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getFieldValue(), formField.getValueText().getText(), boundingBoxStr,
formField.getConfidence());
System.out.println("Value for a specific labeled field using the training-time label:");
labeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Merchant".equals(formFieldEntry.getKey()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Merchant name is: %s%n", formFieldEntry.getValue()));
})));
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println("-----------------------------------------------------------");
System.out.println("-------Recognizing forms with unlabeled custom model-------");
unlabeledDataResult.subscribe(recognizedForms -> recognizedForms.forEach(unLabeledForm ->
unLabeledForm.getFields().forEach((label, formField) -> {
final StringBuilder boundingBoxStr = new StringBuilder();
if (formField.getValueText().getBoundingBox() != null) {
formField.getValueText().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
final StringBuilder boundingBoxLabelStr = new StringBuilder();
if (formField.getLabelText() != null && formField.getLabelText().getBoundingBox() != null) {
formField.getLabelText().getBoundingBox().getPoints().stream().map(point ->
String.format("[%.2f, %.2f]", point.getX(), point.getY())).forEach(boundingBoxStr::append);
}
System.out.printf("Field %s has label %s within bounding box %s with a confidence score "
+ "of %.2f.%n",
label, formField.getLabelText().getText(), boundingBoxLabelStr, formField.getConfidence());
System.out.printf("Field %s has value %s based on %s within bounding box %s with a confidence "
+ "score of %.2f.%n",
label, formField.getFieldValue(), formField.getValueText().getText(), boundingBoxStr,
formField.getConfidence());
unLabeledForm.getFields().entrySet()
.stream()
.filter(formFieldEntry -> "Vendor Name:".equals(formFieldEntry.getValue().getLabelText().getText()))
.findAny()
.ifPresent(formFieldEntry ->
System.out.printf("The Vendor name is: %s%n", formFieldEntry.getValue()));
})));
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
} | class AdvancedDiffLabeledUnlabeledDataAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | class AdvancedDiffLabeledUnlabeledDataAsync {
/**
* Main method to invoke this demo.
*
* @param args Unused arguments to the program.
*
* @throws IOException Exception thrown when there is an error in reading all the bytes from the File.
*/
} | |
could set isNull always true in create ArrayType | public Type clickhouseTypeToDoris(JdbcFieldSchema fieldSchema) {
String ckType = fieldSchema.getDataTypeName();
boolean isNull = false;
if (ckType.startsWith("LowCardinality")) {
ckType = ckType.substring(15, ckType.length() - 1);
if (ckType.startsWith("Nullable")) {
isNull = true;
ckType = ckType.substring(9, ckType.length() - 1);
}
} else if (ckType.startsWith("Nullable")) {
isNull = true;
ckType = ckType.substring(9, ckType.length() - 1);
}
if (ckType.startsWith("Decimal")) {
String[] accuracy = ckType.substring(8, ckType.length() - 1).split(", ");
int precision = Integer.parseInt(accuracy[0]);
int scale = Integer.parseInt(accuracy[1]);
return createDecimalOrStringType(precision, scale);
} else if ("String".contains(ckType) || ckType.startsWith("Enum")
|| ckType.startsWith("IPv") || "UUID".contains(ckType)
|| ckType.startsWith("FixedString")) {
return ScalarType.createStringType();
} else if (ckType.startsWith("DateTime")) {
return ScalarType.createDatetimeV2Type(0);
} else if (ckType.startsWith("Array")) {
String cktype = ckType.substring(6, ckType.length() - 1);
fieldSchema.setDataTypeName(cktype);
Type type = clickhouseTypeToDoris(fieldSchema);
return ArrayType.create(type, isNull);
}
switch (ckType) {
case "Bool":
return Type.BOOLEAN;
case "Int8":
return Type.TINYINT;
case "Int16":
case "UInt8":
return Type.SMALLINT;
case "Int32":
case "UInt16":
return Type.INT;
case "Int64":
case "UInt32":
return Type.BIGINT;
case "Int128":
case "UInt64":
return Type.LARGEINT;
case "Int256":
case "UInt128":
case "UInt256":
return ScalarType.createStringType();
case "Float32":
return Type.FLOAT;
case "Float64":
return Type.DOUBLE;
case "Date":
case "Date32":
return ScalarType.createDateV2Type();
default:
return Type.UNSUPPORTED;
}
} | return ArrayType.create(type, isNull); | public Type clickhouseTypeToDoris(JdbcFieldSchema fieldSchema) {
String ckType = fieldSchema.getDataTypeName();
if (ckType.startsWith("LowCardinality")) {
ckType = ckType.substring(15, ckType.length() - 1);
if (ckType.startsWith("Nullable")) {
ckType = ckType.substring(9, ckType.length() - 1);
}
} else if (ckType.startsWith("Nullable")) {
ckType = ckType.substring(9, ckType.length() - 1);
}
if (ckType.startsWith("Decimal")) {
String[] accuracy = ckType.substring(8, ckType.length() - 1).split(", ");
int precision = Integer.parseInt(accuracy[0]);
int scale = Integer.parseInt(accuracy[1]);
return createDecimalOrStringType(precision, scale);
} else if ("String".contains(ckType) || ckType.startsWith("Enum")
|| ckType.startsWith("IPv") || "UUID".contains(ckType)
|| ckType.startsWith("FixedString")) {
return ScalarType.createStringType();
} else if (ckType.startsWith("DateTime")) {
return ScalarType.createDatetimeV2Type(0);
} else if (ckType.startsWith("Array")) {
String cktype = ckType.substring(6, ckType.length() - 1);
fieldSchema.setDataTypeName(cktype);
Type type = clickhouseTypeToDoris(fieldSchema);
return ArrayType.create(type, true);
}
switch (ckType) {
case "Bool":
return Type.BOOLEAN;
case "Int8":
return Type.TINYINT;
case "Int16":
case "UInt8":
return Type.SMALLINT;
case "Int32":
case "UInt16":
return Type.INT;
case "Int64":
case "UInt32":
return Type.BIGINT;
case "Int128":
case "UInt64":
return Type.LARGEINT;
case "Int256":
case "UInt128":
case "UInt256":
return ScalarType.createStringType();
case "Float32":
return Type.FLOAT;
case "Float64":
return Type.DOUBLE;
case "Date":
case "Date32":
return ScalarType.createDateV2Type();
default:
return Type.UNSUPPORTED;
}
} | class JdbcFieldSchema {
private String columnName;
private int dataType;
private String dataTypeName;
private int columnSize;
private int decimalDigits;
private int numPrecRadix;
private String remarks;
private int charOctetLength;
private boolean isAllowNull;
} | class JdbcFieldSchema {
private String columnName;
private int dataType;
private String dataTypeName;
private int columnSize;
private int decimalDigits;
private int numPrecRadix;
private String remarks;
private int charOctetLength;
private boolean isAllowNull;
} |
Do we want to install both if we already know one of them is installed? | public void testInstall() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 1, "");
commandSupplier.expectCommand(
"yum install --assumeyes --enablerepo=repo-name package-1 package-2",
0,
"");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
commandSupplier.verifyInvocations();
} | "yum install --assumeyes --enablerepo=repo-name package-1 package-2", | public void testInstall() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 1, "");
commandSupplier.expectCommand(
"yum install --assumeyes --enablerepo=repo-name package-1 package-2",
0,
"");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
commandSupplier.verifyInvocations();
} | class YumTest {
@Test
public void testAlreadyInstalled() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 0, "");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
commandSupplier.verifyInvocations();
}
@Test
@Test(expected = CommandException.class)
public void testFailedInstall() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 1, "");
commandSupplier.expectCommand(
"yum install --assumeyes --enablerepo=repo-name package-1 package-2",
1,
"error");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
fail();
}
} | class YumTest {
@Test
public void testAlreadyInstalled() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 0, "");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
commandSupplier.verifyInvocations();
}
@Test
@Test(expected = CommandException.class)
public void testFailedInstall() {
TaskContext taskContext = mock(TaskContext.class);
TestCommandSupplier commandSupplier = new TestCommandSupplier(taskContext);
commandSupplier.expectCommand("yum list installed package-1", 0, "");
commandSupplier.expectCommand("yum list installed package-2", 1, "");
commandSupplier.expectCommand(
"yum install --assumeyes --enablerepo=repo-name package-1 package-2",
1,
"error");
Yum yum = new Yum(taskContext, commandSupplier);
yum.install("package-1", "package-2")
.enableRepo("repo-name")
.converge();
fail();
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.