INSTRUCTION
stringlengths
1
46.3k
RESPONSE
stringlengths
75
80.2k
Extract the (op_name, operator_params) tuple from a directive object.
def _get_filter_op_name_and_values(directive): """Extract the (op_name, operator_params) tuple from a directive object.""" args = get_uniquely_named_objects_by_name(directive.arguments) if 'op_name' not in args: raise AssertionError(u'op_name not found in filter directive arguments!' u'Validation should have caught this: {}'.format(directive)) # HACK(predrag): Workaround for graphql-core validation issue # https://github.com/graphql-python/graphql-core/issues/97 if not isinstance(args['value'].value, ListValue): raise GraphQLValidationError(u'Filter directive value was not a list: {}'.format(directive)) op_name = args['op_name'].value.value operator_params = [x.value for x in args['value'].value.values] return (op_name, operator_params)
Return True if we have a filter directive whose operator applies to the outer scope.
def is_filter_with_outer_scope_vertex_field_operator(directive): """Return True if we have a filter directive whose operator applies to the outer scope.""" if directive.name.value != 'filter': return False op_name, _ = _get_filter_op_name_and_values(directive) return op_name in OUTER_SCOPE_VERTEX_FIELD_OPERATORS
Return a Filter basic block that corresponds to the filter operation in the directive. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: a Filter basic block that performs the requested filtering operation
def process_filter_directive(filter_operation_info, location, context): """Return a Filter basic block that corresponds to the filter operation in the directive. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: a Filter basic block that performs the requested filtering operation """ op_name, operator_params = _get_filter_op_name_and_values(filter_operation_info.directive) non_comparison_filters = { u'name_or_alias': _process_name_or_alias_filter_directive, u'between': _process_between_filter_directive, u'in_collection': _process_in_collection_filter_directive, u'has_substring': _process_has_substring_filter_directive, u'contains': _process_contains_filter_directive, u'intersects': _process_intersects_filter_directive, u'has_edge_degree': _process_has_edge_degree_filter_directive, } all_recognized_filters = frozenset(non_comparison_filters.keys()) | COMPARISON_OPERATORS if all_recognized_filters != ALL_OPERATORS: unrecognized_filters = ALL_OPERATORS - all_recognized_filters raise AssertionError(u'Some filtering operators are defined but do not have an associated ' u'processing function. This is a bug: {}'.format(unrecognized_filters)) if op_name in COMPARISON_OPERATORS: process_func = partial(_process_comparison_filter_directive, operator=op_name) else: process_func = non_comparison_filters.get(op_name, None) if process_func is None: raise GraphQLCompilationError(u'Unknown op_name for filter directive: {}'.format(op_name)) # Operators that do not affect the inner scope require a field name to which they apply. # There is no field name on InlineFragment ASTs, which is why only operators that affect # the inner scope make semantic sense when applied to InlineFragments. # Here, we ensure that we either have a field name to which the filter applies, # or that the operator affects the inner scope. if (filter_operation_info.field_name is None and op_name not in INNER_SCOPE_VERTEX_FIELD_OPERATORS): raise GraphQLCompilationError(u'The filter with op_name "{}" must be applied on a field. ' u'It may not be applied on a type coercion.'.format(op_name)) fields = ((filter_operation_info.field_name,) if op_name != 'name_or_alias' else ('name', 'alias')) context['metadata'].record_filter_info( location, FilterInfo(fields=fields, op_name=op_name, args=tuple(operator_params)) ) return process_func(filter_operation_info, location, context, operator_params)
Return the GraphQL type name of a node.
def get_schema_type_name(node, context): """Return the GraphQL type name of a node.""" query_path = node.query_path if query_path not in context.query_path_to_location_info: raise AssertionError( u'Unable to find type name for query path {} with context {}.'.format( query_path, context)) location_info = context.query_path_to_location_info[query_path] return location_info.type.name
Return the Selectable Union[Table, CTE] associated with the node.
def get_node_selectable(node, context): """Return the Selectable Union[Table, CTE] associated with the node.""" query_path = node.query_path if query_path not in context.query_path_to_selectable: raise AssertionError( u'Unable to find selectable for query path {} with context {}.'.format( query_path, context)) selectable = context.query_path_to_selectable[query_path] return selectable
Return the SqlNode associated with the query path.
def get_node_at_path(query_path, context): """Return the SqlNode associated with the query path.""" if query_path not in context.query_path_to_node: raise AssertionError( u'Unable to find SqlNode for query path {} with context {}.'.format( query_path, context)) node = context.query_path_to_node[query_path] return node
Attempt to get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: Optional[column], the SQLAlchemy column if found, None otherwise.
def try_get_column(column_name, node, context): """Attempt to get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: Optional[column], the SQLAlchemy column if found, None otherwise. """ selectable = get_node_selectable(node, context) if not hasattr(selectable, 'c'): raise AssertionError( u'Selectable "{}" does not have a column collection. Context is {}.'.format( selectable, context)) return selectable.c.get(column_name, None)
Get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: column, the SQLAlchemy column if found. Raises an AssertionError otherwise.
def get_column(column_name, node, context): """Get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: column, the SQLAlchemy column if found. Raises an AssertionError otherwise. """ column = try_get_column(column_name, node, context) if column is None: selectable = get_node_selectable(node, context) raise AssertionError( u'Column "{}" not found in selectable "{}". Columns present are {}. ' u'Context is {}.'.format(column_name, selectable.original, [col.name for col in selectable.c], context)) return column
Return a dict of directive name to directive object for the given AST node. Any directives that are allowed to exist more than once on any AST node are ignored. For any directives that can only exist up to once, we verify that they are not duplicated raising GraphQLCompilationError in case we find them more than once on the AST node. Args: ast: GraphQL AST node, obtained from the graphql library Returns: dict of string to directive object
def get_unique_directives(ast): """Return a dict of directive name to directive object for the given AST node. Any directives that are allowed to exist more than once on any AST node are ignored. For any directives that can only exist up to once, we verify that they are not duplicated raising GraphQLCompilationError in case we find them more than once on the AST node. Args: ast: GraphQL AST node, obtained from the graphql library Returns: dict of string to directive object """ if not ast.directives: return dict() result = dict() for directive_obj in ast.directives: directive_name = directive_obj.name.value if directive_name in ALLOWED_DUPLICATED_DIRECTIVES: pass # We don't return these. elif directive_name in result: raise GraphQLCompilationError(u'Directive was unexpectedly applied twice in the same ' u'location: {} {}'.format(directive_name, ast.directives)) else: result[directive_name] = directive_obj return result
Get all filter directives that apply to the current field. This helper abstracts away the fact that some vertex field filtering operators apply on the inner scope (the scope of the inner vertex field on which they are applied), whereas some apply on the outer scope (the scope that contains the inner vertex field). See filters.py for more information. Args: ast: a GraphQL AST object for which to load local filters, from the graphql library current_schema_type: GraphQLType, the schema type at the current AST location inner_vertex_fields: a list of inner AST objects representing vertex fields that are within the current field. If currently processing a property field (i.e. there are no inner vertex fields), this argument may be set to None. Returns: list of FilterOperationInfo objects. If the field_ast field is of type InlineFragment, the field_name field is set to None.
def get_local_filter_directives(ast, current_schema_type, inner_vertex_fields): """Get all filter directives that apply to the current field. This helper abstracts away the fact that some vertex field filtering operators apply on the inner scope (the scope of the inner vertex field on which they are applied), whereas some apply on the outer scope (the scope that contains the inner vertex field). See filters.py for more information. Args: ast: a GraphQL AST object for which to load local filters, from the graphql library current_schema_type: GraphQLType, the schema type at the current AST location inner_vertex_fields: a list of inner AST objects representing vertex fields that are within the current field. If currently processing a property field (i.e. there are no inner vertex fields), this argument may be set to None. Returns: list of FilterOperationInfo objects. If the field_ast field is of type InlineFragment, the field_name field is set to None. """ result = [] if ast.directives: # it'll be None if the AST has no directives at that node for directive_obj in ast.directives: # Of all filters that appear *on the field itself*, only the ones that apply # to the outer scope are not considered "local" and are not to be returned. if directive_obj.name.value == 'filter': filtered_field_name = get_ast_field_name_or_none(ast) if is_filter_with_outer_scope_vertex_field_operator(directive_obj): # We found a filter that affects the outer scope vertex. Let's make sure # we are at a vertex field. If we are actually at a property field, # that is a compilation error. if not is_vertex_field_type(current_schema_type): raise GraphQLCompilationError( u'Found disallowed filter on a property field: {} {} ' u'{}'.format(directive_obj, current_schema_type, filtered_field_name)) elif isinstance(ast, InlineFragment): raise GraphQLCompilationError( u'Found disallowed filter on a type coercion: {} ' u'{}'.format(directive_obj, current_schema_type)) else: # The filter is valid and non-local, since it is applied at this AST node # but affects the outer scope vertex field. Skip over it. pass else: operation = FilterOperationInfo( directive=directive_obj, field_name=filtered_field_name, field_type=current_schema_type, field_ast=ast) result.append(operation) if inner_vertex_fields: # allow the argument to be None for inner_ast in inner_vertex_fields: for directive_obj in inner_ast.directives: # Of all filters that appear on an inner vertex field, only the ones that apply # to the outer scope are "local" to the outer field and therefore to be returned. if is_filter_with_outer_scope_vertex_field_operator(directive_obj): # The inner AST must not be an InlineFragment, so it must have a field name. filtered_field_name = get_ast_field_name(inner_ast) filtered_field_type = get_vertex_field_type( current_schema_type, filtered_field_name) operation = FilterOperationInfo( directive=directive_obj, field_name=filtered_field_name, field_type=filtered_field_type, field_ast=inner_ast) result.append(operation) return result
Validate the directives that appear at a property field.
def validate_property_directives(directives): """Validate the directives that appear at a property field.""" for directive_name in six.iterkeys(directives): if directive_name in VERTEX_ONLY_DIRECTIVES: raise GraphQLCompilationError( u'Found vertex-only directive {} set on property.'.format(directive_name))
Validate the directives that appear at a vertex field.
def validate_vertex_directives(directives): """Validate the directives that appear at a vertex field.""" for directive_name in six.iterkeys(directives): if directive_name in PROPERTY_ONLY_DIRECTIVES: raise GraphQLCompilationError( u'Found property-only directive {} set on vertex.'.format(directive_name))
Validate the directives that appear at the root vertex field.
def validate_root_vertex_directives(root_ast): """Validate the directives that appear at the root vertex field.""" directives_present_at_root = set() for directive_obj in root_ast.directives: directive_name = directive_obj.name.value if is_filter_with_outer_scope_vertex_field_operator(directive_obj): raise GraphQLCompilationError(u'Found a filter directive with an operator that is not' u'allowed on the root vertex: {}'.format(directive_obj)) directives_present_at_root.add(directive_name) disallowed_directives = directives_present_at_root & VERTEX_DIRECTIVES_PROHIBITED_ON_ROOT if disallowed_directives: raise GraphQLCompilationError(u'Found prohibited directives on root vertex: ' u'{}'.format(disallowed_directives))
Ensure that the specified vertex field directives are not mutually disallowed.
def validate_vertex_field_directive_interactions(parent_location, vertex_field_name, directives): """Ensure that the specified vertex field directives are not mutually disallowed.""" fold_directive = directives.get('fold', None) optional_directive = directives.get('optional', None) output_source_directive = directives.get('output_source', None) recurse_directive = directives.get('recurse', None) if fold_directive and optional_directive: raise GraphQLCompilationError(u'@fold and @optional may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if fold_directive and output_source_directive: raise GraphQLCompilationError(u'@fold and @output_source may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if fold_directive and recurse_directive: raise GraphQLCompilationError(u'@fold and @recurse may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_directive and output_source_directive: raise GraphQLCompilationError(u'@optional and @output_source may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_directive and recurse_directive: raise GraphQLCompilationError(u'@optional and @recurse may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name))
Ensure that the specified vertex field directives are allowed in the current context.
def validate_vertex_field_directive_in_context(parent_location, vertex_field_name, directives, context): """Ensure that the specified vertex field directives are allowed in the current context.""" fold_directive = directives.get('fold', None) optional_directive = directives.get('optional', None) recurse_directive = directives.get('recurse', None) output_source_directive = directives.get('output_source', None) fold_context = 'fold' in context optional_context = 'optional' in context output_source_context = 'output_source' in context if fold_directive and fold_context: raise GraphQLCompilationError(u'@fold is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_directive and fold_context: raise GraphQLCompilationError(u'@optional is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if output_source_directive and fold_context: raise GraphQLCompilationError(u'@output_source is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if recurse_directive and fold_context: raise GraphQLCompilationError(u'@recurse is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if output_source_context and not fold_directive: raise GraphQLCompilationError(u'Found non-fold vertex field after the vertex marked ' u'output source! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_context and fold_directive: raise GraphQLCompilationError(u'@fold is not allowed within a @optional traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_context and output_source_directive: raise GraphQLCompilationError(u'@output_source is not allowed within a @optional ' u'traversal! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name))
Sanitize and represent a string argument in MATCH.
def _safe_match_string(value): """Sanitize and represent a string argument in MATCH.""" if not isinstance(value, six.string_types): if isinstance(value, bytes): # should only happen in py3 value = value.decode('utf-8') else: raise GraphQLInvalidArgumentError(u'Attempting to convert a non-string into a string: ' u'{}'.format(value)) # Using JSON encoding means that all unicode literals and special chars # (e.g. newlines and backslashes) are replaced by appropriate escape sequences. # JSON has the same escaping rules as MATCH / SQL, so no further escaping is necessary. return json.dumps(value)
Represent date and datetime objects as MATCH strings.
def _safe_match_date_and_datetime(graphql_type, expected_python_types, value): """Represent date and datetime objects as MATCH strings.""" # Python datetime.datetime is a subclass of datetime.date, # but in this case, the two are not interchangeable. # Rather than using isinstance, we will therefore check for exact type equality. value_type = type(value) if not any(value_type == x for x in expected_python_types): raise GraphQLInvalidArgumentError(u'Expected value to be exactly one of ' u'python types {}, but was {}: ' u'{}'.format(expected_python_types, value_type, value)) # The serialize() method of GraphQLDate and GraphQLDateTime produces the correct # ISO-8601 format that MATCH expects. We then simply represent it as a regular string. try: serialized_value = graphql_type.serialize(value) except ValueError as e: raise GraphQLInvalidArgumentError(e) return _safe_match_string(serialized_value)
Represent the list of "inner_type" objects in MATCH form.
def _safe_match_list(inner_type, argument_value): """Represent the list of "inner_type" objects in MATCH form.""" stripped_type = strip_non_null_from_type(inner_type) if isinstance(stripped_type, GraphQLList): raise GraphQLInvalidArgumentError(u'MATCH does not currently support nested lists, ' u'but inner type was {}: ' u'{}'.format(inner_type, argument_value)) if not isinstance(argument_value, list): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-list as a list: ' u'{}'.format(argument_value)) components = ( _safe_match_argument(stripped_type, x) for x in argument_value ) return u'[' + u','.join(components) + u']'
Return a MATCH (SQL) string representing the given argument value.
def _safe_match_argument(expected_type, argument_value): """Return a MATCH (SQL) string representing the given argument value.""" if GraphQLString.is_same_type(expected_type): return _safe_match_string(argument_value) elif GraphQLID.is_same_type(expected_type): # IDs can be strings or numbers, but the GraphQL library coerces them to strings. # We will follow suit and treat them as strings. if not isinstance(argument_value, six.string_types): if isinstance(argument_value, bytes): # should only happen in py3 argument_value = argument_value.decode('utf-8') else: argument_value = six.text_type(argument_value) return _safe_match_string(argument_value) elif GraphQLFloat.is_same_type(expected_type): return represent_float_as_str(argument_value) elif GraphQLInt.is_same_type(expected_type): # Special case: in Python, isinstance(True, int) returns True. # Safeguard against this with an explicit check against bool type. if isinstance(argument_value, bool): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-int as an int: ' u'{}'.format(argument_value)) return type_check_and_str(int, argument_value) elif GraphQLBoolean.is_same_type(expected_type): return type_check_and_str(bool, argument_value) elif GraphQLDecimal.is_same_type(expected_type): return _safe_match_decimal(argument_value) elif GraphQLDate.is_same_type(expected_type): return _safe_match_date_and_datetime(expected_type, (datetime.date,), argument_value) elif GraphQLDateTime.is_same_type(expected_type): return _safe_match_date_and_datetime(expected_type, (datetime.datetime, arrow.Arrow), argument_value) elif isinstance(expected_type, GraphQLList): return _safe_match_list(expected_type.of_type, argument_value) else: raise AssertionError(u'Could not safely represent the requested GraphQL type: ' u'{} {}'.format(expected_type, argument_value))
Insert the arguments into the compiled MATCH query to form a complete query. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every parameter the query expects. Returns: string, a MATCH query with inserted argument data
def insert_arguments_into_match_query(compilation_result, arguments): """Insert the arguments into the compiled MATCH query to form a complete query. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every parameter the query expects. Returns: string, a MATCH query with inserted argument data """ if compilation_result.language != MATCH_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query argument_types = compilation_result.input_metadata # The arguments are assumed to have already been validated against the query. sanitized_arguments = { key: _safe_match_argument(argument_types[key], value) for key, value in six.iteritems(arguments) } return base_query.format(**sanitized_arguments)
Retrieve a SQLAlchemy table based on the supplied GraphQL schema type name.
def get_table(self, schema_type): """Retrieve a SQLAlchemy table based on the supplied GraphQL schema type name.""" table_name = schema_type.lower() if not self.has_table(table_name): raise exceptions.GraphQLCompilationError( 'No Table found in SQLAlchemy metadata for table name "{}"'.format(table_name) ) return self.table_name_to_table[table_name]
Construct a MatchStep from a tuple of its constituent blocks.
def _per_location_tuple_to_step(ir_tuple): """Construct a MatchStep from a tuple of its constituent blocks.""" root_block = ir_tuple[0] if not isinstance(root_block, root_block_types): raise AssertionError(u'Unexpected root block type for MatchStep: ' u'{} {}'.format(root_block, ir_tuple)) coerce_type_block = None where_block = None as_block = None for block in ir_tuple[1:]: if isinstance(block, CoerceType): if coerce_type_block is not None: raise AssertionError(u'Unexpectedly found two blocks eligible for "class" clause: ' u'{} {} {}'.format(block, coerce_type_block, ir_tuple)) coerce_type_block = block elif isinstance(block, MarkLocation): if as_block is not None: raise AssertionError(u'Unexpectedly found two blocks eligible for "as" clause: ' u'{} {} {}'.format(block, as_block, ir_tuple)) as_block = block elif isinstance(block, Filter): if where_block is not None: raise AssertionError(u'Unexpectedly found two blocks eligible for "where" clause: ' u'{} {} {}'.format(block, as_block, ir_tuple)) # Filter always comes before MarkLocation in a given MatchStep. if as_block is not None: raise AssertionError(u'Unexpectedly found MarkLocation before Filter in ' u'MatchStep: {} {} {}'.format(block, where_block, ir_tuple)) where_block = block else: raise AssertionError(u'Unexpected block encountered: {} {}'.format(block, ir_tuple)) step = MatchStep(root_block=root_block, coerce_type_block=coerce_type_block, where_block=where_block, as_block=as_block) # MatchSteps with Backtrack as the root block should only contain MarkLocation, # and not do filtering or type coercion. if isinstance(root_block, Backtrack): if where_block is not None or coerce_type_block is not None: raise AssertionError(u'Unexpected blocks in Backtrack-based MatchStep: {}'.format(step)) return step
Split a list of IR blocks into per-location MATCH steps. Args: pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step. Returns: list of MatchStep namedtuples, each of which contains all basic blocks that correspond to a single MATCH step.
def _split_ir_into_match_steps(pruned_ir_blocks): """Split a list of IR blocks into per-location MATCH steps. Args: pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step. Returns: list of MatchStep namedtuples, each of which contains all basic blocks that correspond to a single MATCH step. """ output = [] current_tuple = None for block in pruned_ir_blocks: if isinstance(block, OutputSource): # OutputSource blocks do not require any MATCH code, and only serve to help # optimizations and debugging. Simply omit them at this stage. continue elif isinstance(block, root_block_types): if current_tuple is not None: output.append(current_tuple) current_tuple = (block,) elif isinstance(block, (CoerceType, Filter, MarkLocation)): current_tuple += (block,) else: raise AssertionError(u'Unexpected block type when converting to MATCH query: ' u'{} {}'.format(block, pruned_ir_blocks)) if current_tuple is None: raise AssertionError(u'current_tuple was unexpectedly None: {}'.format(pruned_ir_blocks)) output.append(current_tuple) return [_per_location_tuple_to_step(x) for x in output]
Split a list of MatchSteps into multiple lists, each denoting a single MATCH traversal.
def _split_match_steps_into_match_traversals(match_steps): """Split a list of MatchSteps into multiple lists, each denoting a single MATCH traversal.""" output = [] current_list = None for step in match_steps: if isinstance(step.root_block, QueryRoot): if current_list is not None: output.append(current_list) current_list = [step] else: current_list.append(step) if current_list is None: raise AssertionError(u'current_list was unexpectedly None: {}'.format(match_steps)) output.append(current_list) return output
Extract all global operation blocks (all blocks following GlobalOperationsStart). Args: ir_blocks_except_output_and_folds: list of IR blocks (excluding ConstructResult and all fold blocks), to extract global operations from Returns: tuple (global_operation_blocks, remaining_ir_blocks): - global_operation_blocks: list of IR blocks following a GlobalOperationsStart block if it exists, and an empty list otherwise - remaining_ir_blocks: list of IR blocks excluding GlobalOperationsStart and all global operation blocks
def _extract_global_operations(ir_blocks_except_output_and_folds): """Extract all global operation blocks (all blocks following GlobalOperationsStart). Args: ir_blocks_except_output_and_folds: list of IR blocks (excluding ConstructResult and all fold blocks), to extract global operations from Returns: tuple (global_operation_blocks, remaining_ir_blocks): - global_operation_blocks: list of IR blocks following a GlobalOperationsStart block if it exists, and an empty list otherwise - remaining_ir_blocks: list of IR blocks excluding GlobalOperationsStart and all global operation blocks """ global_operation_blocks = [] remaining_ir_blocks = [] in_global_operations_scope = False for block in ir_blocks_except_output_and_folds: if isinstance(block, (ConstructResult, Fold, Unfold)): raise AssertionError(u'Received unexpected block of type {}. No ConstructResult or ' u'Fold/Unfold blocks should be present: {}' .format(type(block).__name__, ir_blocks_except_output_and_folds)) elif isinstance(block, GlobalOperationsStart): in_global_operations_scope = True elif in_global_operations_scope: global_operation_blocks.append(block) else: remaining_ir_blocks.append(block) return global_operation_blocks, remaining_ir_blocks
Convert the list of IR blocks into a MatchQuery object, for easier manipulation.
def convert_to_match_query(ir_blocks): """Convert the list of IR blocks into a MatchQuery object, for easier manipulation.""" output_block = ir_blocks[-1] if not isinstance(output_block, ConstructResult): raise AssertionError(u'Expected last IR block to be ConstructResult, found: ' u'{} {}'.format(output_block, ir_blocks)) ir_except_output = ir_blocks[:-1] folds, ir_except_output_and_folds = extract_folds_from_ir_blocks(ir_except_output) # Extract WHERE Filter global_operation_ir_blocks_tuple = _extract_global_operations(ir_except_output_and_folds) global_operation_blocks, pruned_ir_blocks = global_operation_ir_blocks_tuple if len(global_operation_blocks) > 1: raise AssertionError(u'Received IR blocks with multiple global operation blocks. Only one ' u'is allowed: {} {}'.format(global_operation_blocks, ir_blocks)) if len(global_operation_blocks) == 1: if not isinstance(global_operation_blocks[0], Filter): raise AssertionError(u'Received non-Filter global operation block. {}' .format(global_operation_blocks[0])) where_block = global_operation_blocks[0] else: where_block = None match_steps = _split_ir_into_match_steps(pruned_ir_blocks) match_traversals = _split_match_steps_into_match_traversals(match_steps) return MatchQuery( match_traversals=match_traversals, folds=folds, output_block=output_block, where_block=where_block, )
Insert the arguments into the compiled SQL query to form a complete query. Args: compilation_result: CompilationResult, compilation result from the GraphQL compiler. arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects. Returns: SQLAlchemy Selectable, a executable SQL query with parameters bound.
def insert_arguments_into_sql_query(compilation_result, arguments): """Insert the arguments into the compiled SQL query to form a complete query. Args: compilation_result: CompilationResult, compilation result from the GraphQL compiler. arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects. Returns: SQLAlchemy Selectable, a executable SQL query with parameters bound. """ if compilation_result.language != SQL_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query return base_query.params(**arguments)
Create an "INSTANCEOF" Filter block from a CoerceType block.
def convert_coerce_type_to_instanceof_filter(coerce_type_block): """Create an "INSTANCEOF" Filter block from a CoerceType block.""" coerce_type_target = get_only_element_from_collection(coerce_type_block.target_class) # INSTANCEOF requires the target class to be passed in as a string, # so we make the target class a string literal. new_predicate = BinaryComposition( u'INSTANCEOF', LocalField('@this'), Literal(coerce_type_target)) return Filter(new_predicate)
Create an "INSTANCEOF" Filter from a CoerceType, adding to an existing Filter if any.
def convert_coerce_type_and_add_to_where_block(coerce_type_block, where_block): """Create an "INSTANCEOF" Filter from a CoerceType, adding to an existing Filter if any.""" instanceof_filter = convert_coerce_type_to_instanceof_filter(coerce_type_block) if where_block: # There was already a Filter block -- we'll merge the two predicates together. return Filter(BinaryComposition(u'&&', instanceof_filter.predicate, where_block.predicate)) else: return instanceof_filter
Convert a list of expressions to an Expression that is the conjunction of all of them.
def expression_list_to_conjunction(expression_list): """Convert a list of expressions to an Expression that is the conjunction of all of them.""" if not isinstance(expression_list, list): raise AssertionError(u'Expected `list`, Received {}.'.format(expression_list)) if len(expression_list) == 0: return TrueLiteral if not isinstance(expression_list[0], Expression): raise AssertionError(u'Non-Expression object {} found in expression_list' .format(expression_list[0])) if len(expression_list) == 1: return expression_list[0] else: return BinaryComposition(u'&&', expression_list_to_conjunction(expression_list[1:]), expression_list[0])
Return an Expression that is True iff the specified edge (edge_expression) does not exist.
def filter_edge_field_non_existence(edge_expression): """Return an Expression that is True iff the specified edge (edge_expression) does not exist.""" # When an edge does not exist at a given vertex, OrientDB represents that in one of two ways: # - the edge's field does not exist (is null) on the vertex document, or # - the edge's field does exist, but is an empty list. # We check both of these possibilities. if not isinstance(edge_expression, (LocalField, GlobalContextField)): raise AssertionError(u'Received invalid edge_expression {} of type {}.' u'Expected LocalField or GlobalContextField.' .format(edge_expression, type(edge_expression).__name__)) if isinstance(edge_expression, LocalField): if not is_vertex_field_name(edge_expression.field_name): raise AssertionError(u'Received LocalField edge_expression {} with non-edge field_name ' u'{}.'.format(edge_expression, edge_expression.field_name)) field_null_check = BinaryComposition(u'=', edge_expression, NullLiteral) local_field_size = UnaryTransformation(u'size', edge_expression) field_size_check = BinaryComposition(u'=', local_field_size, ZeroLiteral) return BinaryComposition(u'||', field_null_check, field_size_check)
Return an Expression that is False for rows that don't follow the @optional specification. OrientDB does not filter correctly within optionals. Namely, a result where the optional edge DOES EXIST will be returned regardless of whether the inner filter is satisfed. To mitigate this, we add a final filter to reject such results. A valid result must satisfy either of the following: - The location within the optional exists (the filter will have been applied in this case) - The optional edge field does not exist at the root location of the optional traverse So, if the inner location within the optional was never visited, it must be the case that the corresponding edge field does not exist at all. Example: A MATCH traversal which starts at location `Animal___1`, and follows the optional edge `out_Animal_ParentOf` to the location `Animal__out_Animal_ParentOf___1` results in the following filtering Expression: ( ( (Animal___1.out_Animal_ParentOf IS null) OR (Animal___1.out_Animal_ParentOf.size() = 0) ) OR (Animal__out_Animal_ParentOf___1 IS NOT null) ) Here, the `optional_edge_location` is `Animal___1.out_Animal_ParentOf`. Args: query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). optional_edge_location: Location object representing the optional edge field inner_location_name: string representing location within the corresponding optional traverse Returns: Expression that evaluates to False for rows that do not follow the @optional specification
def _filter_orientdb_simple_optional_edge( query_metadata_table, optional_edge_location, inner_location_name): """Return an Expression that is False for rows that don't follow the @optional specification. OrientDB does not filter correctly within optionals. Namely, a result where the optional edge DOES EXIST will be returned regardless of whether the inner filter is satisfed. To mitigate this, we add a final filter to reject such results. A valid result must satisfy either of the following: - The location within the optional exists (the filter will have been applied in this case) - The optional edge field does not exist at the root location of the optional traverse So, if the inner location within the optional was never visited, it must be the case that the corresponding edge field does not exist at all. Example: A MATCH traversal which starts at location `Animal___1`, and follows the optional edge `out_Animal_ParentOf` to the location `Animal__out_Animal_ParentOf___1` results in the following filtering Expression: ( ( (Animal___1.out_Animal_ParentOf IS null) OR (Animal___1.out_Animal_ParentOf.size() = 0) ) OR (Animal__out_Animal_ParentOf___1 IS NOT null) ) Here, the `optional_edge_location` is `Animal___1.out_Animal_ParentOf`. Args: query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). optional_edge_location: Location object representing the optional edge field inner_location_name: string representing location within the corresponding optional traverse Returns: Expression that evaluates to False for rows that do not follow the @optional specification """ inner_local_field = LocalField(inner_location_name) inner_location_existence = BinaryComposition(u'!=', inner_local_field, NullLiteral) # The optional_edge_location here is actually referring to the edge field itself. # This is definitely non-standard, but required to get the proper semantics. # To get its type, we construct the location of the vertex field on the other side of the edge. vertex_location = ( optional_edge_location.at_vertex().navigate_to_subpath(optional_edge_location.field) ) location_type = query_metadata_table.get_location_info(vertex_location).type edge_context_field = GlobalContextField(optional_edge_location, location_type) edge_field_non_existence = filter_edge_field_non_existence(edge_context_field) return BinaryComposition(u'||', edge_field_non_existence, inner_location_existence)
Return an Expression that is True if and only if each simple optional filter is True. Construct filters for each simple optional, that are True if and only if `edge_field` does not exist in the `simple_optional_root_location` OR the `inner_location` is not defined. Return an Expression that evaluates to True if and only if *all* of the aforementioned filters evaluate to True (conjunction). Args: query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). simple_optional_root_info: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple @optional (one that does not expands vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope Returns: a new Expression object
def construct_where_filter_predicate(query_metadata_table, simple_optional_root_info): """Return an Expression that is True if and only if each simple optional filter is True. Construct filters for each simple optional, that are True if and only if `edge_field` does not exist in the `simple_optional_root_location` OR the `inner_location` is not defined. Return an Expression that evaluates to True if and only if *all* of the aforementioned filters evaluate to True (conjunction). Args: query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). simple_optional_root_info: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple @optional (one that does not expands vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope Returns: a new Expression object """ inner_location_name_to_where_filter = {} for root_location, root_info_dict in six.iteritems(simple_optional_root_info): inner_location_name = root_info_dict['inner_location_name'] edge_field = root_info_dict['edge_field'] optional_edge_location = root_location.navigate_to_field(edge_field) optional_edge_where_filter = _filter_orientdb_simple_optional_edge( query_metadata_table, optional_edge_location, inner_location_name) inner_location_name_to_where_filter[inner_location_name] = optional_edge_where_filter # Sort expressions by inner_location_name to obtain deterministic order where_filter_expressions = [ inner_location_name_to_where_filter[key] for key in sorted(inner_location_name_to_where_filter.keys()) ] return expression_list_to_conjunction(where_filter_expressions)
Return a tree of complex optional root locations. Args: complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: OptionalTraversalTree object representing the tree of complex optional roots
def construct_optional_traversal_tree(complex_optional_roots, location_to_optional_roots): """Return a tree of complex optional root locations. Args: complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: OptionalTraversalTree object representing the tree of complex optional roots """ tree = OptionalTraversalTree(complex_optional_roots) for optional_root_locations_stack in six.itervalues(location_to_optional_roots): tree.insert(list(optional_root_locations_stack)) return tree
Validate that the Between Expression is correctly representable.
def validate(self): """Validate that the Between Expression is correctly representable.""" if not isinstance(self.field, LocalField): raise TypeError(u'Expected LocalField field, got: {} {}'.format( type(self.field).__name__, self.field)) if not isinstance(self.lower_bound, Expression): raise TypeError(u'Expected Expression lower_bound, got: {} {}'.format( type(self.lower_bound).__name__, self.lower_bound)) if not isinstance(self.upper_bound, Expression): raise TypeError(u'Expected Expression upper_bound, got: {} {}'.format( type(self.upper_bound).__name__, self.upper_bound))
Create an updated version (if needed) of BetweenClause via the visitor pattern.
def visit_and_update(self, visitor_fn): """Create an updated version (if needed) of BetweenClause via the visitor pattern.""" new_lower_bound = self.lower_bound.visit_and_update(visitor_fn) new_upper_bound = self.upper_bound.visit_and_update(visitor_fn) if new_lower_bound is not self.lower_bound or new_upper_bound is not self.upper_bound: return visitor_fn(BetweenClause(self.field, new_lower_bound, new_upper_bound)) else: return visitor_fn(self)
Return a unicode object with the MATCH representation of this BetweenClause.
def to_match(self): """Return a unicode object with the MATCH representation of this BetweenClause.""" template = u'({field_name} BETWEEN {lower_bound} AND {upper_bound})' return template.format( field_name=self.field.to_match(), lower_bound=self.lower_bound.to_match(), upper_bound=self.upper_bound.to_match())
Insert a path of optional Locations into the tree. Each OptionalTraversalTree object contains child Location objects as keys mapping to other OptionalTraversalTree objects. Args: optional_root_locations_path: list of optional root Locations all except the last of which must be present in complex_optional_roots
def insert(self, optional_root_locations_path): """Insert a path of optional Locations into the tree. Each OptionalTraversalTree object contains child Location objects as keys mapping to other OptionalTraversalTree objects. Args: optional_root_locations_path: list of optional root Locations all except the last of which must be present in complex_optional_roots """ encountered_simple_optional = False parent_location = self._root_location for optional_root_location in optional_root_locations_path: if encountered_simple_optional: raise AssertionError(u'Encountered simple optional root location {} in path, but' u'further locations are present. This should not happen: {}' .format(optional_root_location, optional_root_locations_path)) if optional_root_location not in self._location_to_children: # Simple optionals are ignored. # There should be no complex optionals after a simple optional. encountered_simple_optional = True else: self._location_to_children[parent_location].add(optional_root_location) parent_location = optional_root_location
Return a list of all rooted subtrees (each as a list of Location objects).
def get_all_rooted_subtrees_as_lists(self, start_location=None): """Return a list of all rooted subtrees (each as a list of Location objects).""" if start_location is not None and start_location not in self._location_to_children: raise AssertionError(u'Received invalid start_location {} that was not present ' u'in the tree. Present root locations of complex @optional ' u'queries (ones that expand vertex fields within) are: {}' .format(start_location, self._location_to_children.keys())) if start_location is None: start_location = self._root_location if len(self._location_to_children[start_location]) == 0: # Node with no children only returns a singleton list containing the null set. return [[]] current_children = sorted(self._location_to_children[start_location]) # Recursively find all rooted subtrees of each of the children of the current node. location_to_list_of_subtrees = { location: list(self.get_all_rooted_subtrees_as_lists(location)) for location in current_children } # All subsets of direct child Location objects all_location_subsets = [ list(subset) for subset in itertools.chain(*[ itertools.combinations(current_children, x) for x in range(0, len(current_children) + 1) ]) ] # For every possible subset of the children, and every combination of the chosen # subtrees within, create a list of subtree Location lists. new_subtrees_as_lists = [] for location_subset in all_location_subsets: all_child_subtree_possibilities = [ location_to_list_of_subtrees[location] for location in location_subset ] all_child_subtree_combinations = itertools.product(*all_child_subtree_possibilities) for child_subtree_combination in all_child_subtree_combinations: merged_child_subtree_combination = list(itertools.chain(*child_subtree_combination)) new_subtree_as_list = location_subset + merged_child_subtree_combination new_subtrees_as_lists.append(new_subtree_as_list) return new_subtrees_as_lists
Return a SQLAlchemy Query from a passed SqlQueryTree. Args: sql_query_tree: SqlQueryTree, tree representation of the query to emit. compiler_metadata: SqlMetadata, SQLAlchemy specific metadata. Returns: SQLAlchemy Query
def emit_code_from_ir(sql_query_tree, compiler_metadata): """Return a SQLAlchemy Query from a passed SqlQueryTree. Args: sql_query_tree: SqlQueryTree, tree representation of the query to emit. compiler_metadata: SqlMetadata, SQLAlchemy specific metadata. Returns: SQLAlchemy Query """ context = CompilationContext( query_path_to_selectable=dict(), query_path_to_location_info=sql_query_tree.query_path_to_location_info, query_path_to_output_fields=sql_query_tree.query_path_to_output_fields, query_path_to_filters=sql_query_tree.query_path_to_filters, query_path_to_node=sql_query_tree.query_path_to_node, compiler_metadata=compiler_metadata, ) return _query_tree_to_query(sql_query_tree.root, context)
Create an aliased table for a SqlNode. Updates the relevant Selectable global context. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Table, the newly aliased SQLAlchemy table.
def _create_table_and_update_context(node, context): """Create an aliased table for a SqlNode. Updates the relevant Selectable global context. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Table, the newly aliased SQLAlchemy table. """ schema_type_name = sql_context_helpers.get_schema_type_name(node, context) table = context.compiler_metadata.get_table(schema_type_name).alias() context.query_path_to_selectable[node.query_path] = table return table
Create a query from a SqlNode. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Selectable, selectable of the generated query.
def _create_query(node, context): """Create a query from a SqlNode. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Selectable, selectable of the generated query. """ visited_nodes = [node] output_columns = _get_output_columns(visited_nodes, context) filters = _get_filters(visited_nodes, context) selectable = sql_context_helpers.get_node_selectable(node, context) query = select(output_columns).select_from(selectable).where(and_(*filters)) return query
Get the output columns for a list of SqlNodes. Args: nodes: List[SqlNode], the nodes to get output columns from. context: CompilationContext, global compilation state and metadata. Returns: List[Column], list of SqlAlchemy Columns to output for this query.
def _get_output_columns(nodes, context): """Get the output columns for a list of SqlNodes. Args: nodes: List[SqlNode], the nodes to get output columns from. context: CompilationContext, global compilation state and metadata. Returns: List[Column], list of SqlAlchemy Columns to output for this query. """ columns = [] for node in nodes: for sql_output in sql_context_helpers.get_outputs(node, context): field_name = sql_output.field_name column = sql_context_helpers.get_column(field_name, node, context) column = column.label(sql_output.output_name) columns.append(column) return columns
Get filters to apply to a list of SqlNodes. Args: nodes: List[SqlNode], the SqlNodes to get filters for. context: CompilationContext, global compilation state and metadata. Returns: List[Expression], list of SQLAlchemy expressions.
def _get_filters(nodes, context): """Get filters to apply to a list of SqlNodes. Args: nodes: List[SqlNode], the SqlNodes to get filters for. context: CompilationContext, global compilation state and metadata. Returns: List[Expression], list of SQLAlchemy expressions. """ filters = [] for node in nodes: for filter_block in sql_context_helpers.get_filters(node, context): filter_sql_expression = _transform_filter_to_sql(filter_block, node, context) filters.append(filter_sql_expression) return filters
Transform a Filter block to its corresponding SQLAlchemy expression. Args: filter_block: Filter, the Filter block to transform. node: SqlNode, the node Filter block applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression equivalent to the Filter.predicate expression.
def _transform_filter_to_sql(filter_block, node, context): """Transform a Filter block to its corresponding SQLAlchemy expression. Args: filter_block: Filter, the Filter block to transform. node: SqlNode, the node Filter block applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression equivalent to the Filter.predicate expression. """ expression = filter_block.predicate return _expression_to_sql(expression, node, context)
Recursively transform a Filter block predicate to its SQLAlchemy expression representation. Args: expression: expression, the compiler expression to transform. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy Expression equivalent to the passed compiler expression.
def _expression_to_sql(expression, node, context): """Recursively transform a Filter block predicate to its SQLAlchemy expression representation. Args: expression: expression, the compiler expression to transform. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy Expression equivalent to the passed compiler expression. """ _expression_transformers = { expressions.LocalField: _transform_local_field_to_expression, expressions.Variable: _transform_variable_to_expression, expressions.Literal: _transform_literal_to_expression, expressions.BinaryComposition: _transform_binary_composition_to_expression, } expression_type = type(expression) if expression_type not in _expression_transformers: raise NotImplementedError( u'Unsupported compiler expression "{}" of type "{}" cannot be converted to SQL ' u'expression.'.format(expression, type(expression))) return _expression_transformers[expression_type](expression, node, context)
Transform a BinaryComposition compiler expression into a SQLAlchemy expression. Recursively calls _expression_to_sql to convert its left and right sub-expressions. Args: expression: expression, BinaryComposition compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression.
def _transform_binary_composition_to_expression(expression, node, context): """Transform a BinaryComposition compiler expression into a SQLAlchemy expression. Recursively calls _expression_to_sql to convert its left and right sub-expressions. Args: expression: expression, BinaryComposition compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression. """ if expression.operator not in constants.SUPPORTED_OPERATORS: raise NotImplementedError( u'Filter operation "{}" is not supported by the SQL backend.'.format( expression.operator)) sql_operator = constants.SUPPORTED_OPERATORS[expression.operator] left = _expression_to_sql(expression.left, node, context) right = _expression_to_sql(expression.right, node, context) if sql_operator.cardinality == constants.CARDINALITY_UNARY: left, right = _get_column_and_bindparam(left, right, sql_operator) clause = getattr(left, sql_operator.name)(right) return clause elif sql_operator.cardinality == constants.CARDINALITY_BINARY: clause = getattr(sql_expressions, sql_operator.name)(left, right) return clause elif sql_operator.cardinality == constants.CARDINALITY_LIST_VALUED: left, right = _get_column_and_bindparam(left, right, sql_operator) # ensure that SQLAlchemy treats the right bind parameter as list valued right.expanding = True clause = getattr(left, sql_operator.name)(right) return clause raise AssertionError(u'Unreachable, operator cardinality {} for compiler expression {} is ' u'unknown'.format(sql_operator.cardinality, expression))
Return left and right expressions in (Column, BindParameter) order.
def _get_column_and_bindparam(left, right, operator): """Return left and right expressions in (Column, BindParameter) order.""" if not isinstance(left, Column): left, right = right, left if not isinstance(left, Column): raise AssertionError( u'SQLAlchemy operator {} expects Column as left side the of expression, got {} ' u'of type {} instead.'.format(operator, left, type(left))) if not isinstance(right, BindParameter): raise AssertionError( u'SQLAlchemy operator {} expects BindParameter as the right side of the expression, ' u'got {} of type {} instead.'.format(operator, right, type(right))) return left, right
Transform a Variable compiler expression into its SQLAlchemy expression representation. Args: expression: expression, Variable compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression.
def _transform_variable_to_expression(expression, node, context): """Transform a Variable compiler expression into its SQLAlchemy expression representation. Args: expression: expression, Variable compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression. """ variable_name = expression.variable_name if not variable_name.startswith(u'$'): raise AssertionError(u'Unexpectedly received variable name {} that is not ' u'prefixed with "$"'.format(variable_name)) return bindparam(variable_name[1:])
Transform a LocalField compiler expression into its SQLAlchemy expression representation. Args: expression: expression, LocalField compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression.
def _transform_local_field_to_expression(expression, node, context): """Transform a LocalField compiler expression into its SQLAlchemy expression representation. Args: expression: expression, LocalField compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression. """ column_name = expression.field_name column = sql_context_helpers.get_column(column_name, node, context) return column
Merge consecutive Filter(x), Filter(y) blocks into Filter(x && y) block.
def merge_consecutive_filter_clauses(ir_blocks): """Merge consecutive Filter(x), Filter(y) blocks into Filter(x && y) block.""" if not ir_blocks: return ir_blocks new_ir_blocks = [ir_blocks[0]] for block in ir_blocks[1:]: last_block = new_ir_blocks[-1] if isinstance(last_block, Filter) and isinstance(block, Filter): new_ir_blocks[-1] = Filter( BinaryComposition(u'&&', last_block.predicate, block.predicate)) else: new_ir_blocks.append(block) return new_ir_blocks
Lower ContextFieldExistence expressions into lower-level expressions.
def lower_context_field_existence(ir_blocks, query_metadata_table): """Lower ContextFieldExistence expressions into lower-level expressions.""" def regular_visitor_fn(expression): """Expression visitor function that rewrites ContextFieldExistence expressions.""" if not isinstance(expression, ContextFieldExistence): return expression location_type = query_metadata_table.get_location_info(expression.location).type # Since this function is only used in blocks that aren't ConstructResult, # the location check is performed using a regular ContextField expression. return BinaryComposition( u'!=', ContextField(expression.location, location_type), NullLiteral) def construct_result_visitor_fn(expression): """Expression visitor function that rewrites ContextFieldExistence expressions.""" if not isinstance(expression, ContextFieldExistence): return expression location_type = query_metadata_table.get_location_info(expression.location).type # Since this function is only used in ConstructResult blocks, # the location check is performed using the special OutputContextVertex expression. return BinaryComposition( u'!=', OutputContextVertex(expression.location, location_type), NullLiteral) new_ir_blocks = [] for block in ir_blocks: new_block = None if isinstance(block, ConstructResult): new_block = block.visit_and_update_expressions(construct_result_visitor_fn) else: new_block = block.visit_and_update_expressions(regular_visitor_fn) new_ir_blocks.append(new_block) return new_ir_blocks
Optimize comparisons of a boolean binary comparison expression against a boolean literal. Rewriting example: BinaryComposition( '=', BinaryComposition('!=', something, NullLiteral) False) The above is rewritten into: BinaryComposition('=', something, NullLiteral) Args: ir_blocks: list of basic block objects Returns: a new list of basic block objects, with the optimization applied
def optimize_boolean_expression_comparisons(ir_blocks): """Optimize comparisons of a boolean binary comparison expression against a boolean literal. Rewriting example: BinaryComposition( '=', BinaryComposition('!=', something, NullLiteral) False) The above is rewritten into: BinaryComposition('=', something, NullLiteral) Args: ir_blocks: list of basic block objects Returns: a new list of basic block objects, with the optimization applied """ operator_inverses = { u'=': u'!=', u'!=': u'=', } def visitor_fn(expression): """Expression visitor function that performs the above rewriting.""" if not isinstance(expression, BinaryComposition): return expression left_is_binary_composition = isinstance(expression.left, BinaryComposition) right_is_binary_composition = isinstance(expression.right, BinaryComposition) if not left_is_binary_composition and not right_is_binary_composition: # Nothing to rewrite, return the expression as-is. return expression identity_literal = None # The boolean literal for which we just use the inner expression. inverse_literal = None # The boolean literal for which we negate the inner expression. if expression.operator == u'=': identity_literal = TrueLiteral inverse_literal = FalseLiteral elif expression.operator == u'!=': identity_literal = FalseLiteral inverse_literal = TrueLiteral else: return expression expression_to_rewrite = None if expression.left == identity_literal and right_is_binary_composition: return expression.right elif expression.right == identity_literal and left_is_binary_composition: return expression.left elif expression.left == inverse_literal and right_is_binary_composition: expression_to_rewrite = expression.right elif expression.right == inverse_literal and left_is_binary_composition: expression_to_rewrite = expression.left if expression_to_rewrite is None: # We couldn't find anything to rewrite, return the expression as-is. return expression elif expression_to_rewrite.operator not in operator_inverses: # We can't rewrite the inner expression since we don't know its inverse operator. return expression else: return BinaryComposition( operator_inverses[expression_to_rewrite.operator], expression_to_rewrite.left, expression_to_rewrite.right) new_ir_blocks = [] for block in ir_blocks: new_block = block.visit_and_update_expressions(visitor_fn) new_ir_blocks.append(new_block) return new_ir_blocks
Extract all @fold data from the IR blocks, and cut the folded IR blocks out of the IR. Args: ir_blocks: list of IR blocks to extract fold data from Returns: tuple (folds, remaining_ir_blocks): - folds: dict of FoldScopeLocation -> list of IR blocks corresponding to that @fold scope. The list does not contain Fold or Unfold blocks. - remaining_ir_blocks: list of IR blocks that were not part of a Fold-Unfold section.
def extract_folds_from_ir_blocks(ir_blocks): """Extract all @fold data from the IR blocks, and cut the folded IR blocks out of the IR. Args: ir_blocks: list of IR blocks to extract fold data from Returns: tuple (folds, remaining_ir_blocks): - folds: dict of FoldScopeLocation -> list of IR blocks corresponding to that @fold scope. The list does not contain Fold or Unfold blocks. - remaining_ir_blocks: list of IR blocks that were not part of a Fold-Unfold section. """ folds = dict() remaining_ir_blocks = [] current_folded_blocks = [] in_fold_location = None for block in ir_blocks: if isinstance(block, Fold): if in_fold_location is not None: raise AssertionError(u'in_fold_location was not None at a Fold block: {} {} ' u'{}'.format(current_folded_blocks, remaining_ir_blocks, ir_blocks)) in_fold_location = block.fold_scope_location elif isinstance(block, Unfold): if in_fold_location is None: raise AssertionError(u'in_fold_location was None at an Unfold block: {} {} ' u'{}'.format(current_folded_blocks, remaining_ir_blocks, ir_blocks)) folds[in_fold_location] = current_folded_blocks current_folded_blocks = [] in_fold_location = None else: if in_fold_location is not None: current_folded_blocks.append(block) else: remaining_ir_blocks.append(block) return folds, remaining_ir_blocks
Construct a mapping from locations within @optional to their correspoding optional Traverse. Args: ir_blocks: list of IR blocks to extract optional data from Returns: tuple (complex_optional_roots, location_to_optional_roots): complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides
def extract_optional_location_root_info(ir_blocks): """Construct a mapping from locations within @optional to their correspoding optional Traverse. Args: ir_blocks: list of IR blocks to extract optional data from Returns: tuple (complex_optional_roots, location_to_optional_roots): complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides """ complex_optional_roots = [] location_to_optional_roots = dict() # These are both stacks that perform depth-first search on the tree of @optional edges. # At any given location they contain # - in_optional_root_locations: all the optional root locations # - encountered_traverse_within_optional: whether the optional is complex or not # in order that they appear on the path from the root to that location. in_optional_root_locations = [] encountered_traverse_within_optional = [] # Blocks within folded scopes should not be taken into account in this function. _, non_folded_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) preceding_location = None for current_block in non_folded_ir_blocks: if len(in_optional_root_locations) > 0 and isinstance(current_block, (Traverse, Recurse)): encountered_traverse_within_optional[-1] = True if isinstance(current_block, Traverse) and current_block.optional: if preceding_location is None: raise AssertionError(u'No MarkLocation found before an optional Traverse: {} {}' .format(current_block, non_folded_ir_blocks)) in_optional_root_locations.append(preceding_location) encountered_traverse_within_optional.append(False) elif isinstance(current_block, EndOptional): if len(in_optional_root_locations) == 0: raise AssertionError(u'in_optional_root_locations was empty at an EndOptional ' u'block: {}'.format(ir_blocks)) if encountered_traverse_within_optional[-1]: complex_optional_roots.append(in_optional_root_locations[-1]) in_optional_root_locations.pop() encountered_traverse_within_optional.pop() elif isinstance(current_block, MarkLocation): preceding_location = current_block.location if len(in_optional_root_locations) != 0: # in_optional_root_locations will not be empty if and only if we are within an # @optional scope. In this case, we add the current location to the dictionary # mapping it to the sequence of optionals locations leading up to it. optional_root_locations_stack = tuple(in_optional_root_locations) location_to_optional_roots[current_block.location] = optional_root_locations_stack else: # No locations need to be marked, and no optional scopes begin or end here. pass return complex_optional_roots, location_to_optional_roots
Construct a map from simple optional locations to their inner location and traversed edge. Args: ir_blocks: list of IR blocks to extract optional data from complex_optional_roots: list of @optional locations (location immmediately preceding an @optional traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple optional (one that does not expand vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope
def extract_simple_optional_location_info( ir_blocks, complex_optional_roots, location_to_optional_roots): """Construct a map from simple optional locations to their inner location and traversed edge. Args: ir_blocks: list of IR blocks to extract optional data from complex_optional_roots: list of @optional locations (location immmediately preceding an @optional traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple optional (one that does not expand vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope """ # Simple optional roots are a subset of location_to_optional_roots.values() (all optional roots) # We filter out the ones that are also present in complex_optional_roots. location_to_preceding_optional_root_iteritems = six.iteritems({ location: optional_root_locations_stack[-1] for location, optional_root_locations_stack in six.iteritems(location_to_optional_roots) }) simple_optional_root_to_inner_location = { optional_root_location: inner_location for inner_location, optional_root_location in location_to_preceding_optional_root_iteritems if optional_root_location not in complex_optional_roots } simple_optional_root_locations = set(simple_optional_root_to_inner_location.keys()) # Blocks within folded scopes should not be taken into account in this function. _, non_folded_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) simple_optional_root_info = {} preceding_location = None for current_block in non_folded_ir_blocks: if isinstance(current_block, MarkLocation): preceding_location = current_block.location elif isinstance(current_block, Traverse) and current_block.optional: if preceding_location in simple_optional_root_locations: # The current optional Traverse is "simple" # i.e. it does not contain any Traverses within. inner_location = simple_optional_root_to_inner_location[preceding_location] inner_location_name, _ = inner_location.get_location_name() simple_optional_info_dict = { 'inner_location_name': inner_location_name, 'edge_field': current_block.get_field_name(), } simple_optional_root_info[preceding_location] = simple_optional_info_dict return simple_optional_root_info
Return a list of IR blocks as a copy of the original, with EndOptional blocks removed.
def remove_end_optionals(ir_blocks): """Return a list of IR blocks as a copy of the original, with EndOptional blocks removed.""" new_ir_blocks = [] for block in ir_blocks: if not isinstance(block, EndOptional): new_ir_blocks.append(block) return new_ir_blocks
Validate that the OutputContextVertex is correctly representable.
def validate(self): """Validate that the OutputContextVertex is correctly representable.""" super(OutputContextVertex, self).validate() if self.location.field is not None: raise ValueError(u'Expected location at a vertex, but got: {}'.format(self.location))
Return a unicode object with the MATCH representation of this expression.
def to_match(self): """Return a unicode object with the MATCH representation of this expression.""" self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) if field_name is not None: raise AssertionError(u'Vertex location has non-None field_name: ' u'{} {}'.format(field_name, self.location)) return mark_name
Rewrite BinaryConditional expressions in the true/false values of TernaryConditionals.
def rewrite_binary_composition_inside_ternary_conditional(ir_blocks): """Rewrite BinaryConditional expressions in the true/false values of TernaryConditionals.""" def visitor_fn(expression): """Expression visitor function.""" # MATCH queries do not allow BinaryComposition inside a TernaryConditional's true/false # value blocks, since OrientDB cannot produce boolean values for comparisons inside them. # We transform any structures that resemble the following: # TernaryConditional(predicate, X, Y), with X or Y of type BinaryComposition # into the following: # - if X is of type BinaryComposition, and Y is not, # BinaryComposition( # u'=', # TernaryConditional( # predicate, # TernaryConditional(X, true, false), # Y # ), # true # ) # - if Y is of type BinaryComposition, and X is not, # BinaryComposition( # u'=', # TernaryConditional( # predicate, # X, # TernaryConditional(Y, true, false), # ), # true # ) # - if both X and Y are of type BinaryComposition, # BinaryComposition( # u'=', # TernaryConditional( # predicate, # TernaryConditional(X, true, false), # TernaryConditional(Y, true, false) # ), # true # ) if not isinstance(expression, TernaryConditional): return expression if_true = expression.if_true if_false = expression.if_false true_branch_rewriting_necessary = isinstance(if_true, BinaryComposition) false_branch_rewriting_necessary = isinstance(if_false, BinaryComposition) if not (true_branch_rewriting_necessary or false_branch_rewriting_necessary): # No rewriting is necessary. return expression if true_branch_rewriting_necessary: if_true = TernaryConditional(if_true, TrueLiteral, FalseLiteral) if false_branch_rewriting_necessary: if_false = TernaryConditional(if_false, TrueLiteral, FalseLiteral) ternary = TernaryConditional(expression.predicate, if_true, if_false) return BinaryComposition(u'=', ternary, TrueLiteral) new_ir_blocks = [ block.visit_and_update_expressions(visitor_fn) for block in ir_blocks ] return new_ir_blocks
Lower Filter blocks that use the "has_substring" operation into MATCH-representable form.
def lower_has_substring_binary_compositions(ir_blocks): """Lower Filter blocks that use the "has_substring" operation into MATCH-representable form.""" def visitor_fn(expression): """Rewrite BinaryComposition expressions with "has_substring" into representable form.""" # The implementation of "has_substring" must use the LIKE operator in MATCH, and must # prepend and append "%" symbols to the substring being matched. # We transform any structures that resemble the following: # BinaryComposition(u'has_substring', X, Y) # into the following: # BinaryComposition( # u'LIKE', # X, # BinaryComposition( # u'+', # Literal("%"), # BinaryComposition( # u'+', # Y, # Literal("%") # ) # ) # ) if not isinstance(expression, BinaryComposition) or expression.operator != u'has_substring': return expression return BinaryComposition( u'LIKE', expression.left, BinaryComposition( u'+', Literal('%'), BinaryComposition( u'+', expression.right, Literal('%') ) ) ) new_ir_blocks = [ block.visit_and_update_expressions(visitor_fn) for block in ir_blocks ] return new_ir_blocks
Truncate one-step traversals that overlap a previous traversal location.
def truncate_repeated_single_step_traversals(match_query): """Truncate one-step traversals that overlap a previous traversal location.""" # Such traversals frequently happen as side-effects of the lowering process # of Backtrack blocks, and needlessly complicate the executed queries. new_match_traversals = [] visited_locations = set() for current_match_traversal in match_query.match_traversals: ignore_traversal = False if len(current_match_traversal) == 1: # Single-step traversal detected. If its location was visited already, ignore it. single_step = current_match_traversal[0] if single_step.as_block is None: raise AssertionError(u'Unexpectedly found a single-step traversal with no as_block:' u' {} {}'.format(current_match_traversal, match_query)) if single_step.as_block.location in visited_locations: # This location was visited before, omit the traversal. ignore_traversal = True if not ignore_traversal: # For each step in this traversal, mark its location as visited. for step in current_match_traversal: if step.as_block is not None: visited_locations.add(step.as_block.location) new_match_traversals.append(current_match_traversal) return match_query._replace(match_traversals=new_match_traversals)
Lower Backtrack blocks into (QueryRoot, MarkLocation) pairs of blocks.
def lower_backtrack_blocks(match_query, location_types): """Lower Backtrack blocks into (QueryRoot, MarkLocation) pairs of blocks.""" # The lowering works as follows: # 1. Upon seeing a Backtrack block, end the current traversal (if non-empty). # 2. Start new traversal from the type and location to which the Backtrack pointed. # 3. If the Backtrack block had an associated MarkLocation, mark that location # as equivalent to the location where the Backtrack pointed. new_match_traversals = [] location_translations = dict() for current_match_traversal in match_query.match_traversals: new_traversal = [] for step in current_match_traversal: if not isinstance(step.root_block, Backtrack): new_traversal.append(step) else: # 1. Upon seeing a Backtrack block, end the current traversal (if non-empty). if new_traversal: new_match_traversals.append(new_traversal) new_traversal = [] backtrack_location = step.root_block.location backtrack_location_type = location_types[backtrack_location] # 2. Start new traversal from the type and location to which the Backtrack pointed. new_root_block = QueryRoot({backtrack_location_type.name}) new_as_block = MarkLocation(backtrack_location) # 3. If the Backtrack block had an associated MarkLocation, mark that location # as equivalent to the location where the Backtrack pointed. if step.as_block is not None: location_translations[step.as_block.location] = backtrack_location if step.coerce_type_block is not None: raise AssertionError(u'Encountered type coercion in a MatchStep with ' u'a Backtrack root block, this is unexpected: {} {}' .format(step, match_query)) new_step = step._replace(root_block=new_root_block, as_block=new_as_block) new_traversal.append(new_step) new_match_traversals.append(new_traversal) _flatten_location_translations(location_translations) new_match_query = match_query._replace(match_traversals=new_match_traversals) return _translate_equivalent_locations(new_match_query, location_translations)
If location A translates to B, and B to C, then make A translate directly to C. Args: location_translations: dict of Location -> Location, where the key translates to the value. Mutated in place for efficiency and simplicity of implementation.
def _flatten_location_translations(location_translations): """If location A translates to B, and B to C, then make A translate directly to C. Args: location_translations: dict of Location -> Location, where the key translates to the value. Mutated in place for efficiency and simplicity of implementation. """ sources_to_process = set(six.iterkeys(location_translations)) def _update_translation(source): """Return the proper (fully-flattened) translation for the given location.""" destination = location_translations[source] if destination not in location_translations: # "destination" cannot be translated, no further flattening required. return destination else: # "destination" can itself be translated -- do so, # and then flatten "source" to the final translation as well. sources_to_process.discard(destination) final_destination = _update_translation(destination) location_translations[source] = final_destination return final_destination while sources_to_process: _update_translation(sources_to_process.pop())
Translate Location objects into their equivalent locations, based on the given dict.
def _translate_equivalent_locations(match_query, location_translations): """Translate Location objects into their equivalent locations, based on the given dict.""" new_match_traversals = [] def visitor_fn(expression): """Expression visitor function used to rewrite expressions with updated Location data.""" if isinstance(expression, (ContextField, GlobalContextField)): old_location = expression.location.at_vertex() new_location = location_translations.get(old_location, old_location) if expression.location.field is not None: new_location = new_location.navigate_to_field(expression.location.field) # The Expression could be one of many types, including: # - ContextField # - GlobalContextField # We determine its exact class to make sure we return an object of the same class # as the expression being replaced. expression_cls = type(expression) return expression_cls(new_location, expression.field_type) elif isinstance(expression, ContextFieldExistence): old_location = expression.location new_location = location_translations.get(old_location, old_location) return ContextFieldExistence(new_location) elif isinstance(expression, FoldedContextField): # Update the Location within FoldedContextField old_location = expression.fold_scope_location.base_location new_location = location_translations.get(old_location, old_location) fold_path = expression.fold_scope_location.fold_path fold_field = expression.fold_scope_location.field new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field) field_type = expression.field_type return FoldedContextField(new_fold_scope_location, field_type) else: return expression # Rewrite the Locations in the steps of each MATCH traversal. for current_match_traversal in match_query.match_traversals: new_traversal = [] for step in current_match_traversal: new_step = step # If the root_block is a Backtrack, translate its Location if necessary. if isinstance(new_step.root_block, Backtrack): old_location = new_step.root_block.location if old_location in location_translations: new_location = location_translations[old_location] new_step = new_step._replace(root_block=Backtrack(new_location)) # If the as_block exists, translate its Location if necessary. if new_step.as_block is not None: old_location = new_step.as_block.location if old_location in location_translations: new_location = location_translations[old_location] new_step = new_step._replace(as_block=MarkLocation(new_location)) # If the where_block exists, update any Location objects in its predicate. if new_step.where_block is not None: new_where_block = new_step.where_block.visit_and_update_expressions(visitor_fn) new_step = new_step._replace(where_block=new_where_block) new_traversal.append(new_step) new_match_traversals.append(new_traversal) new_folds = {} # Update the Location within each FoldScopeLocation for fold_scope_location, fold_ir_blocks in six.iteritems(match_query.folds): fold_path = fold_scope_location.fold_path fold_field = fold_scope_location.field old_location = fold_scope_location.base_location new_location = location_translations.get(old_location, old_location) new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field) new_folds[new_fold_scope_location] = fold_ir_blocks # Rewrite the Locations in the ConstructResult output block. new_output_block = match_query.output_block.visit_and_update_expressions(visitor_fn) # Rewrite the Locations in the global where block. new_where_block = None if match_query.where_block is not None: new_where_block = match_query.where_block.visit_and_update_expressions(visitor_fn) return match_query._replace(match_traversals=new_match_traversals, folds=new_folds, output_block=new_output_block, where_block=new_where_block)
Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks.
def lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks): """Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks.""" new_folded_ir_blocks = [] for block in folded_ir_blocks: if isinstance(block, CoerceType): new_block = convert_coerce_type_to_instanceof_filter(block) else: new_block = block new_folded_ir_blocks.append(new_block) return new_folded_ir_blocks
Return a list of IR blocks with all Backtrack blocks removed.
def remove_backtrack_blocks_from_fold(folded_ir_blocks): """Return a list of IR blocks with all Backtrack blocks removed.""" new_folded_ir_blocks = [] for block in folded_ir_blocks: if not isinstance(block, Backtrack): new_folded_ir_blocks.append(block) return new_folded_ir_blocks
For each sub-query, remove one-step traversals that overlap a previous traversal location.
def truncate_repeated_single_step_traversals_in_sub_queries(compound_match_query): """For each sub-query, remove one-step traversals that overlap a previous traversal location.""" lowered_match_queries = [] for match_query in compound_match_query.match_queries: new_match_query = truncate_repeated_single_step_traversals(match_query) lowered_match_queries.append(new_match_query) return compound_match_query._replace(match_queries=lowered_match_queries)
Return a prefix of the given traverse, excluding any blocks after an omitted optional. Given a subset (omitted_locations) of complex_optional_roots, return a new match traversal removing all MatchStep objects that are within any omitted location. Args: match_traversal: list of MatchStep objects to be pruned omitted_locations: subset of complex_optional_roots to be omitted complex_optional_roots: list of all @optional locations (location immmediately preceding an @optional traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: list of MatchStep objects as a copy of the given match traversal with all steps within any omitted location removed.
def _prune_traverse_using_omitted_locations(match_traversal, omitted_locations, complex_optional_roots, location_to_optional_roots): """Return a prefix of the given traverse, excluding any blocks after an omitted optional. Given a subset (omitted_locations) of complex_optional_roots, return a new match traversal removing all MatchStep objects that are within any omitted location. Args: match_traversal: list of MatchStep objects to be pruned omitted_locations: subset of complex_optional_roots to be omitted complex_optional_roots: list of all @optional locations (location immmediately preceding an @optional traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: list of MatchStep objects as a copy of the given match traversal with all steps within any omitted location removed. """ new_match_traversal = [] for step in match_traversal: new_step = step if isinstance(step.root_block, Traverse) and step.root_block.optional: current_location = step.as_block.location optional_root_locations_stack = location_to_optional_roots.get(current_location, None) optional_root_location = optional_root_locations_stack[-1] if optional_root_location is None: raise AssertionError(u'Found optional Traverse location {} that was not present ' u'in location_to_optional_roots dict: {}' .format(current_location, location_to_optional_roots)) elif optional_root_location in omitted_locations: # Add filter to indicate that the omitted edge(s) shoud not exist field_name = step.root_block.get_field_name() new_predicate = filter_edge_field_non_existence(LocalField(field_name)) old_filter = new_match_traversal[-1].where_block if old_filter is not None: new_predicate = BinaryComposition(u'&&', old_filter.predicate, new_predicate) new_match_step = new_match_traversal[-1]._replace( where_block=Filter(new_predicate)) new_match_traversal[-1] = new_match_step # Discard all steps following the omitted @optional traverse new_step = None elif optional_root_location in complex_optional_roots: # Any non-omitted @optional traverse (that expands vertex fields) # becomes a normal mandatory traverse (discard the optional flag). new_root_block = Traverse(step.root_block.direction, step.root_block.edge_name) new_step = step._replace(root_block=new_root_block) else: # The current optional traverse is a "simple optional" (one that does not # expand vertex fields). No further action is required since MATCH supports it. pass # If new_step was set to None, # we have encountered a Traverse that is within an omitted location. # We discard the remainder of the match traversal (everything following is also omitted). if new_step is None: break else: new_match_traversal.append(new_step) return new_match_traversal
Return 2^n distinct MatchQuery objects in a CompoundMatchQuery. Given a MatchQuery containing `n` optional traverses that expand vertex fields, construct `2^n` different MatchQuery objects: one for each possible subset of optional edges that can be followed. For each edge `e` in a subset of optional edges chosen to be omitted, discard all traversals following `e`, and add filters specifying that `e` *does not exist*. Args: match_query: MatchQuery object containing n `@optional` scopes which expand vertex fields complex_optional_roots: list of @optional locations (location preceding an @optional traverse) that expand vertex fields within location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: CompoundMatchQuery object containing 2^n MatchQuery objects, one for each possible subset of the n optional edges being followed
def convert_optional_traversals_to_compound_match_query( match_query, complex_optional_roots, location_to_optional_roots): """Return 2^n distinct MatchQuery objects in a CompoundMatchQuery. Given a MatchQuery containing `n` optional traverses that expand vertex fields, construct `2^n` different MatchQuery objects: one for each possible subset of optional edges that can be followed. For each edge `e` in a subset of optional edges chosen to be omitted, discard all traversals following `e`, and add filters specifying that `e` *does not exist*. Args: match_query: MatchQuery object containing n `@optional` scopes which expand vertex fields complex_optional_roots: list of @optional locations (location preceding an @optional traverse) that expand vertex fields within location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: CompoundMatchQuery object containing 2^n MatchQuery objects, one for each possible subset of the n optional edges being followed """ tree = construct_optional_traversal_tree( complex_optional_roots, location_to_optional_roots) rooted_optional_root_location_subsets = tree.get_all_rooted_subtrees_as_lists() omitted_location_subsets = [ set(complex_optional_roots) - set(subset) for subset in rooted_optional_root_location_subsets ] sorted_omitted_location_subsets = sorted(omitted_location_subsets) compound_match_traversals = [] for omitted_locations in reversed(sorted_omitted_location_subsets): new_match_traversals = [] for match_traversal in match_query.match_traversals: location = match_traversal[0].as_block.location optional_root_locations_stack = location_to_optional_roots.get(location, None) if optional_root_locations_stack is not None: optional_root_location = optional_root_locations_stack[-1] else: optional_root_location = None if optional_root_location is None or optional_root_location not in omitted_locations: new_match_traversal = _prune_traverse_using_omitted_locations( match_traversal, set(omitted_locations), complex_optional_roots, location_to_optional_roots) new_match_traversals.append(new_match_traversal) else: # The root_block is within an omitted scope. # Discard the entire match traversal (do not append to new_match_traversals) pass compound_match_traversals.append(new_match_traversals) match_queries = [ MatchQuery( match_traversals=match_traversals, folds=match_query.folds, output_block=match_query.output_block, where_block=match_query.where_block, ) for match_traversals in compound_match_traversals ] return CompoundMatchQuery(match_queries=match_queries)
Return the set of locations and non-optional locations present in the given match traversals. When enumerating the possibilities for optional traversals, the resulting match traversals may have sections of the query omitted. These locations will not be included in the returned `present_locations`. All of the above locations that are not optional traverse locations will be included in present_non_optional_locations. Args: match_traversals: one possible list of match traversals generated from a query containing @optional traversal(s) Returns: tuple (present_locations, present_non_optional_locations): - present_locations: set of all locations present in the given match traversals - present_non_optional_locations: set of all locations present in the match traversals that are not reached through optional traverses. Guaranteed to be a subset of present_locations.
def _get_present_locations(match_traversals): """Return the set of locations and non-optional locations present in the given match traversals. When enumerating the possibilities for optional traversals, the resulting match traversals may have sections of the query omitted. These locations will not be included in the returned `present_locations`. All of the above locations that are not optional traverse locations will be included in present_non_optional_locations. Args: match_traversals: one possible list of match traversals generated from a query containing @optional traversal(s) Returns: tuple (present_locations, present_non_optional_locations): - present_locations: set of all locations present in the given match traversals - present_non_optional_locations: set of all locations present in the match traversals that are not reached through optional traverses. Guaranteed to be a subset of present_locations. """ present_locations = set() present_non_optional_locations = set() for match_traversal in match_traversals: for step in match_traversal: if step.as_block is not None: location_name, _ = step.as_block.location.get_location_name() present_locations.add(location_name) if isinstance(step.root_block, Traverse) and not step.root_block.optional: present_non_optional_locations.add(location_name) if not present_non_optional_locations.issubset(present_locations): raise AssertionError(u'present_non_optional_locations {} was not a subset of ' u'present_locations {}. THis hould never happen.' .format(present_non_optional_locations, present_locations)) return present_locations, present_non_optional_locations
Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery. Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks, For each of these, remove the outputs (that have been implicitly pruned away) from each corresponding ConstructResult block. Args: compound_match_query: CompoundMatchQuery object containing 2^n pruned MatchQuery objects (see convert_optional_traversals_to_compound_match_query) Returns: CompoundMatchQuery with pruned ConstructResult blocks for each of the 2^n MatchQuery objects
def prune_non_existent_outputs(compound_match_query): """Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery. Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks, For each of these, remove the outputs (that have been implicitly pruned away) from each corresponding ConstructResult block. Args: compound_match_query: CompoundMatchQuery object containing 2^n pruned MatchQuery objects (see convert_optional_traversals_to_compound_match_query) Returns: CompoundMatchQuery with pruned ConstructResult blocks for each of the 2^n MatchQuery objects """ if len(compound_match_query.match_queries) == 1: return compound_match_query elif len(compound_match_query.match_queries) == 0: raise AssertionError(u'Received CompoundMatchQuery with ' u'an empty list of MatchQuery objects.') else: match_queries = [] for match_query in compound_match_query.match_queries: match_traversals = match_query.match_traversals output_block = match_query.output_block present_locations_tuple = _get_present_locations(match_traversals) present_locations, present_non_optional_locations = present_locations_tuple new_output_fields = {} for output_name, expression in six.iteritems(output_block.fields): if isinstance(expression, OutputContextField): # An OutputContextField as an output Expression indicates that we are not # within an @optional scope. Therefore, the location this output uses must # be in present_locations, and the output is never pruned. location_name, _ = expression.location.get_location_name() if location_name not in present_locations: raise AssertionError(u'Non-optional output location {} was not found in ' u'present_locations: {}' .format(expression.location, present_locations)) new_output_fields[output_name] = expression elif isinstance(expression, FoldedContextField): # A FoldedContextField as an output Expression indicates that we are not # within an @optional scope. Therefore, the location this output uses must # be in present_locations, and the output is never pruned. base_location = expression.fold_scope_location.base_location location_name, _ = base_location.get_location_name() if location_name not in present_locations: raise AssertionError(u'Folded output location {} was found in ' u'present_locations: {}' .format(base_location, present_locations)) new_output_fields[output_name] = expression elif isinstance(expression, TernaryConditional): # A TernaryConditional indicates that this output is within some optional scope. # This may be pruned away based on the contents of present_locations. location_name, _ = expression.if_true.location.get_location_name() if location_name in present_locations: if location_name in present_non_optional_locations: new_output_fields[output_name] = expression.if_true else: new_output_fields[output_name] = expression else: raise AssertionError(u'Invalid expression of type {} in output block: ' u'{}'.format(type(expression).__name__, output_block)) match_queries.append( MatchQuery( match_traversals=match_traversals, folds=match_query.folds, output_block=ConstructResult(new_output_fields), where_block=match_query.where_block, ) ) return CompoundMatchQuery(match_queries=match_queries)
Return a dict mapping location -> list of filters applied at that location. Args: match_query: MatchQuery object from which to extract location -> filters dict Returns: dict mapping each location in match_query to a list of Filter objects applied at that location
def _construct_location_to_filter_list(match_query): """Return a dict mapping location -> list of filters applied at that location. Args: match_query: MatchQuery object from which to extract location -> filters dict Returns: dict mapping each location in match_query to a list of Filter objects applied at that location """ # For each location, all filters for that location should be applied at the first instance. # This function collects a list of all filters corresponding to each location # present in the given MatchQuery. location_to_filters = {} for match_traversal in match_query.match_traversals: for match_step in match_traversal: current_filter = match_step.where_block if current_filter is not None: current_location = match_step.as_block.location location_to_filters.setdefault(current_location, []).append( current_filter) return location_to_filters
Convert a list of filters to an Expression that is the conjunction of all of them.
def _filter_list_to_conjunction_expression(filter_list): """Convert a list of filters to an Expression that is the conjunction of all of them.""" if not isinstance(filter_list, list): raise AssertionError(u'Expected `list`, Received: {}.'.format(filter_list)) if any((not isinstance(filter_block, Filter) for filter_block in filter_list)): raise AssertionError(u'Expected list of Filter objects. Received: {}'.format(filter_list)) expression_list = [filter_block.predicate for filter_block in filter_list] return expression_list_to_conjunction(expression_list)
Apply all filters for a specific location into its first occurrence in a given traversal. For each location in the given match traversal, construct a conjunction of all filters applied to that location, and apply the resulting Filter to the first instance of the location. Args: match_traversal: list of MatchStep objects to be lowered location_to_filters: dict mapping each location in the MatchQuery which contains the given match traversal to a list of filters applied at that location already_filtered_locations: set of locations that have already had their filters applied Returns: new list of MatchStep objects with all filters for any given location composed into a single filter which is applied to the first instance of that location
def _apply_filters_to_first_location_occurrence(match_traversal, location_to_filters, already_filtered_locations): """Apply all filters for a specific location into its first occurrence in a given traversal. For each location in the given match traversal, construct a conjunction of all filters applied to that location, and apply the resulting Filter to the first instance of the location. Args: match_traversal: list of MatchStep objects to be lowered location_to_filters: dict mapping each location in the MatchQuery which contains the given match traversal to a list of filters applied at that location already_filtered_locations: set of locations that have already had their filters applied Returns: new list of MatchStep objects with all filters for any given location composed into a single filter which is applied to the first instance of that location """ new_match_traversal = [] newly_filtered_locations = set() for match_step in match_traversal: # Apply all filters for a location to the first occurence of that location current_location = match_step.as_block.location if current_location in newly_filtered_locations: raise AssertionError(u'The same location {} was encountered twice in a single ' u'match traversal: {}. This should never happen.' .format(current_location, match_traversal)) if all((current_location in location_to_filters, current_location not in already_filtered_locations)): where_block = Filter( _filter_list_to_conjunction_expression( location_to_filters[current_location] ) ) # No further filters needed for this location. If the same location is found in # another call to this function, no filters will be added. newly_filtered_locations.add(current_location) else: where_block = None new_match_step = MatchStep( root_block=match_step.root_block, coerce_type_block=match_step.coerce_type_block, where_block=where_block, as_block=match_step.as_block ) new_match_traversal.append(new_match_step) return new_match_traversal, newly_filtered_locations
Collect all filters for a particular location to the first instance of the location. Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may result in filters being applied to locations after their first occurence. OrientDB does not resolve this behavior correctly. Therefore, for each MatchQuery, we collect all the filters for each location in a list. For each location, we make a conjunction of the filter list (`_predicate_list_to_where_block`) and apply the new filter to only the first instance of that location. All other instances will have no filters (None). Args: compound_match_query: CompoundMatchQuery object containing 2^n MatchQuery objects Returns: CompoundMatchQuery with all filters for each location applied to the first instance of that location.
def collect_filters_to_first_location_occurrence(compound_match_query): """Collect all filters for a particular location to the first instance of the location. Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may result in filters being applied to locations after their first occurence. OrientDB does not resolve this behavior correctly. Therefore, for each MatchQuery, we collect all the filters for each location in a list. For each location, we make a conjunction of the filter list (`_predicate_list_to_where_block`) and apply the new filter to only the first instance of that location. All other instances will have no filters (None). Args: compound_match_query: CompoundMatchQuery object containing 2^n MatchQuery objects Returns: CompoundMatchQuery with all filters for each location applied to the first instance of that location. """ new_match_queries = [] # Each MatchQuery has a different set of locations, and associated Filters. # Hence, each of them is processed independently. for match_query in compound_match_query.match_queries: # Construct mapping from location -> list of filter predicates applied at that location location_to_filters = _construct_location_to_filter_list(match_query) already_filtered_locations = set() new_match_traversals = [] for match_traversal in match_query.match_traversals: result = _apply_filters_to_first_location_occurrence( match_traversal, location_to_filters, already_filtered_locations) new_match_traversal, newly_filtered_locations = result new_match_traversals.append(new_match_traversal) already_filtered_locations.update(newly_filtered_locations) new_match_queries.append( MatchQuery( match_traversals=new_match_traversals, folds=match_query.folds, output_block=match_query.output_block, where_block=match_query.where_block, ) ) return CompoundMatchQuery(match_queries=new_match_queries)
Lower BinaryCompositions involving non-existent ContextFields to True. Args: present_locations: set of all locations in the current MatchQuery that have not been pruned expression: BinaryComposition with at least one ContextField operand Returns: TrueLiteral iff either ContextField operand is not in `present_locations`, and the original expression otherwise
def _update_context_field_binary_composition(present_locations, expression): """Lower BinaryCompositions involving non-existent ContextFields to True. Args: present_locations: set of all locations in the current MatchQuery that have not been pruned expression: BinaryComposition with at least one ContextField operand Returns: TrueLiteral iff either ContextField operand is not in `present_locations`, and the original expression otherwise """ if not any((isinstance(expression.left, ContextField), isinstance(expression.right, ContextField))): raise AssertionError(u'Received a BinaryComposition {} without any ContextField ' u'operands. This should never happen.'.format(expression)) if isinstance(expression.left, ContextField): context_field = expression.left location_name, _ = context_field.location.get_location_name() if location_name not in present_locations: return TrueLiteral if isinstance(expression.right, ContextField): context_field = expression.right location_name, _ = context_field.location.get_location_name() if location_name not in present_locations: return TrueLiteral return expression
Return a simplified BinaryComposition if either operand is a TrueLiteral. Args: expression: BinaryComposition without any ContextField operand(s) Returns: simplified expression if the given expression is a disjunction/conjunction and one of it's operands is a TrueLiteral, and the original expression otherwise
def _simplify_non_context_field_binary_composition(expression): """Return a simplified BinaryComposition if either operand is a TrueLiteral. Args: expression: BinaryComposition without any ContextField operand(s) Returns: simplified expression if the given expression is a disjunction/conjunction and one of it's operands is a TrueLiteral, and the original expression otherwise """ if any((isinstance(expression.left, ContextField), isinstance(expression.right, ContextField))): raise AssertionError(u'Received a BinaryComposition {} with a ContextField ' u'operand. This should never happen.'.format(expression)) if expression.operator == u'||': if expression.left == TrueLiteral or expression.right == TrueLiteral: return TrueLiteral else: return expression elif expression.operator == u'&&': if expression.left == TrueLiteral: return expression.right if expression.right == TrueLiteral: return expression.left else: return expression else: return expression
Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result.
def _update_context_field_expression(present_locations, expression): """Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result.""" no_op_blocks = (ContextField, Literal, LocalField, UnaryTransformation, Variable) if isinstance(expression, BinaryComposition): if isinstance(expression.left, ContextField) or isinstance(expression.right, ContextField): return _update_context_field_binary_composition(present_locations, expression) else: return _simplify_non_context_field_binary_composition(expression) elif isinstance(expression, TernaryConditional): return _simplify_ternary_conditional(expression) elif isinstance(expression, BetweenClause): lower_bound = expression.lower_bound upper_bound = expression.upper_bound if isinstance(lower_bound, ContextField) or isinstance(upper_bound, ContextField): raise AssertionError(u'Found BetweenClause with ContextFields as lower/upper bounds. ' u'This should never happen: {}'.format(expression)) return expression elif isinstance(expression, (OutputContextField, FoldedContextField)): raise AssertionError(u'Found unexpected expression of type {}. This should never happen: ' u'{}'.format(type(expression).__name__, expression)) elif isinstance(expression, no_op_blocks): return expression raise AssertionError(u'Found unhandled expression of type {}. This should never happen: ' u'{}'.format(type(expression).__name__, expression))
Return new match traversals, lowering filters involving non-existent ContextFields. Expressions involving non-existent ContextFields are evaluated to TrueLiteral. BinaryCompositions, where one of the operands is lowered to a TrueLiteral, are lowered appropriately based on the present operator (u'||' and u'&&' are affected). TernaryConditionals, where the predicate is lowered to a TrueLiteral, are replaced by their if_true predicate. The `visitor_fn` implements these behaviors (see `_update_context_field_expression`). Args: match_traversals: list of match traversal enitities to be lowered visitor_fn: visit_and_update function for lowering expressions in given match traversal Returns: new list of match_traversals, with all filter expressions lowered
def _lower_non_existent_context_field_filters(match_traversals, visitor_fn): """Return new match traversals, lowering filters involving non-existent ContextFields. Expressions involving non-existent ContextFields are evaluated to TrueLiteral. BinaryCompositions, where one of the operands is lowered to a TrueLiteral, are lowered appropriately based on the present operator (u'||' and u'&&' are affected). TernaryConditionals, where the predicate is lowered to a TrueLiteral, are replaced by their if_true predicate. The `visitor_fn` implements these behaviors (see `_update_context_field_expression`). Args: match_traversals: list of match traversal enitities to be lowered visitor_fn: visit_and_update function for lowering expressions in given match traversal Returns: new list of match_traversals, with all filter expressions lowered """ new_match_traversals = [] for match_traversal in match_traversals: new_match_traversal = [] for step in match_traversal: if step.where_block is not None: new_filter = step.where_block.visit_and_update_expressions(visitor_fn) if new_filter.predicate == TrueLiteral: new_filter = None new_step = step._replace(where_block=new_filter) else: new_step = step new_match_traversal.append(new_step) new_match_traversals.append(new_match_traversal) return new_match_traversals
Lower Expressons involving non-existent ContextFields.
def lower_context_field_expressions(compound_match_query): """Lower Expressons involving non-existent ContextFields.""" if len(compound_match_query.match_queries) == 0: raise AssertionError(u'Received CompoundMatchQuery {} with no MatchQuery objects.' .format(compound_match_query)) elif len(compound_match_query.match_queries) == 1: # All ContextFields exist if there is only one MatchQuery # becuase none of the traverses were omitted, and all locations exist (are defined). return compound_match_query else: new_match_queries = [] for match_query in compound_match_query.match_queries: match_traversals = match_query.match_traversals present_locations, _ = _get_present_locations(match_traversals) current_visitor_fn = partial(_update_context_field_expression, present_locations) new_match_traversals = _lower_non_existent_context_field_filters( match_traversals, current_visitor_fn) new_match_queries.append( MatchQuery( match_traversals=new_match_traversals, folds=match_query.folds, output_block=match_query.output_block, where_block=match_query.where_block, ) ) return CompoundMatchQuery(match_queries=new_match_queries)
Validate that the non-abstract edge properties dict has defined in/out link properties.
def _validate_non_abstract_edge_has_defined_endpoint_types(class_name, properties): """Validate that the non-abstract edge properties dict has defined in/out link properties.""" edge_source = properties.get(EDGE_SOURCE_PROPERTY_NAME, None) edge_destination = properties.get(EDGE_DESTINATION_PROPERTY_NAME, None) has_defined_endpoint_types = all(( edge_source is not None and edge_source.type_id == PROPERTY_TYPE_LINK_ID, edge_destination is not None and edge_destination.type_id == PROPERTY_TYPE_LINK_ID, )) if not has_defined_endpoint_types: raise IllegalSchemaStateError(u'Found a non-abstract edge class with undefined or illegal ' u'in/out properties: {} {}'.format(class_name, properties))
Validate that non-edges do not have the in/out properties defined.
def _validate_non_edges_do_not_have_edge_like_properties(class_name, properties): """Validate that non-edges do not have the in/out properties defined.""" has_source = EDGE_SOURCE_PROPERTY_NAME in properties has_destination = EDGE_DESTINATION_PROPERTY_NAME in properties if has_source or has_destination: raise IllegalSchemaStateError(u'Found a non-edge class that defines edge-like "in" or ' u'"out" properties: {} {}'.format(class_name, properties)) for property_name, property_descriptor in six.iteritems(properties): if property_descriptor.type_id == PROPERTY_TYPE_LINK_ID: raise IllegalSchemaStateError(u'Non-edge class "{}" has a property of type Link, this ' u'is not allowed: {}'.format(class_name, property_name))
Validate that edges do not have properties of Link type that aren't the edge endpoints.
def _validate_edges_do_not_have_extra_links(class_name, properties): """Validate that edges do not have properties of Link type that aren't the edge endpoints.""" for property_name, property_descriptor in six.iteritems(properties): if property_name in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERTY_NAME}: continue if property_descriptor.type_id == PROPERTY_TYPE_LINK_ID: raise IllegalSchemaStateError(u'Edge class "{}" has a property of type Link that is ' u'not an edge endpoint, this is not allowed: ' u'{}'.format(class_name, property_name))
Validate that properties do not have names that may cause problems in the GraphQL schema.
def _validate_property_names(class_name, properties): """Validate that properties do not have names that may cause problems in the GraphQL schema.""" for property_name in properties: if not property_name or property_name.startswith(ILLEGAL_PROPERTY_NAME_PREFIXES): raise IllegalSchemaStateError(u'Class "{}" has a property with an illegal name: ' u'{}'.format(class_name, property_name))
Validate that if the property is of collection type, it has a specified default value.
def _validate_collections_have_default_values(class_name, property_name, property_descriptor): """Validate that if the property is of collection type, it has a specified default value.""" # We don't want properties of collection type having "null" values, since that may cause # unexpected errors during GraphQL query execution and other operations. if property_descriptor.type_id in COLLECTION_PROPERTY_TYPES: if property_descriptor.default is None: raise IllegalSchemaStateError(u'Class "{}" has a property "{}" of collection type with ' u'no default value.'.format(class_name, property_name))
Extract a list of all superclass names from a class definition dict.
def get_superclasses_from_class_definition(class_definition): """Extract a list of all superclass names from a class definition dict.""" # New-style superclasses definition, supporting multiple-inheritance. superclasses = class_definition.get('superClasses', None) if superclasses: return list(superclasses) # Old-style superclass definition, single inheritance only. superclass = class_definition.get('superClass', None) if superclass: return [superclass] # No superclasses are present. return []
Make the SchemaElement's connections immutable.
def freeze(self): """Make the SchemaElement's connections immutable.""" self.in_connections = frozenset(self.in_connections) self.out_connections = frozenset(self.out_connections)
Return a dict with default values for all properties declared on this class.
def get_default_property_values(self, classname): """Return a dict with default values for all properties declared on this class.""" schema_element = self.get_element_by_class_name(classname) result = { property_name: property_descriptor.default for property_name, property_descriptor in six.iteritems(schema_element.properties) } if schema_element.is_edge: # Remove the source/destination properties for edges, if they exist. result.pop(EDGE_SOURCE_PROPERTY_NAME, None) result.pop(EDGE_DESTINATION_PROPERTY_NAME, None) return result
Return the SchemaElement for the specified class name, asserting that it exists.
def get_element_by_class_name_or_raise(self, class_name): """Return the SchemaElement for the specified class name, asserting that it exists.""" if class_name not in self._elements: raise InvalidClassError(u'Class does not exist: {}'.format(class_name)) return self._elements[class_name]
Return the property values for the class, with default values applied where needed.
def _get_property_values_with_defaults(self, classname, property_values): """Return the property values for the class, with default values applied where needed.""" # To uphold OrientDB semantics, make a new dict with all property values set # to their default values, which are None if no default was set. # Then, overwrite its data with the supplied property values. final_values = self.get_default_property_values(classname) final_values.update(property_values) return final_values
Return the schema element with the given name, asserting that it's of vertex type.
def get_vertex_schema_element_or_raise(self, vertex_classname): """Return the schema element with the given name, asserting that it's of vertex type.""" schema_element = self.get_element_by_class_name_or_raise(vertex_classname) if not schema_element.is_vertex: raise InvalidClassError(u'Non-vertex class provided: {}'.format(vertex_classname)) return schema_element
Return the schema element with the given name, asserting that it's of edge type.
def get_edge_schema_element_or_raise(self, edge_classname): """Return the schema element with the given name, asserting that it's of edge type.""" schema_element = self.get_element_by_class_name_or_raise(edge_classname) if not schema_element.is_edge: raise InvalidClassError(u'Non-edge class provided: {}'.format(edge_classname)) return schema_element
Validate that a vertex classname corresponds to a non-abstract vertex class.
def validate_is_non_abstract_vertex_type(self, vertex_classname): """Validate that a vertex classname corresponds to a non-abstract vertex class.""" element = self.get_vertex_schema_element_or_raise(vertex_classname) if element.abstract: raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract' .format(vertex_classname))
Validate that a edge classname corresponds to a non-abstract edge class.
def validate_is_non_abstract_edge_type(self, edge_classname): """Validate that a edge classname corresponds to a non-abstract edge class.""" element = self.get_edge_schema_element_or_raise(edge_classname) if element.abstract: raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract' .format(edge_classname))
Validate that the specified property names are indeed defined on the given class.
def validate_properties_exist(self, classname, property_names): """Validate that the specified property names are indeed defined on the given class.""" schema_element = self.get_element_by_class_name(classname) requested_properties = set(property_names) available_properties = set(schema_element.properties.keys()) non_existent_properties = requested_properties - available_properties if non_existent_properties: raise InvalidPropertyError( u'Class "{}" does not have definitions for properties "{}": ' u'{}'.format(classname, non_existent_properties, property_names))
Load all inheritance data from the OrientDB schema. Used as part of __init__.
def _set_up_inheritance_and_subclass_sets(self, schema_data): """Load all inheritance data from the OrientDB schema. Used as part of __init__.""" # For each class name, construct its inheritance set: # itself + the set of class names from which it inherits. for class_definition in schema_data: class_name = class_definition['name'] immediate_superclass_names = get_superclasses_from_class_definition( class_definition) inheritance_set = set(immediate_superclass_names) inheritance_set.add(class_name) # Since the input data must be in topological order, the superclasses of # the current class should have already been processed. # A KeyError on the following line would mean that the input # was not topologically sorted. inheritance_set.update(chain.from_iterable( self._inheritance_sets[superclass_name] for superclass_name in immediate_superclass_names )) # Freeze the inheritance set so it can't ever be modified again. self._inheritance_sets[class_name] = frozenset(inheritance_set) # For each class name, construct its subclass set: # itself + the set of class names that inherit from it. for subclass_name, superclass_names in six.iteritems(self._inheritance_sets): for superclass_name in superclass_names: self._subclass_sets.setdefault( superclass_name, set()).add(subclass_name) # Freeze all subclass sets so they can never be modified again, # making a list of all keys before modifying any of their values. # It's bad practice to mutate a dict while iterating over it. for class_name in list(six.iterkeys(self._subclass_sets)): self._subclass_sets[class_name] = frozenset(self._subclass_sets[class_name])
Assign each class to the vertex, edge or non-graph type sets based on its kind.
def _split_classes_by_kind(self, class_name_to_definition): """Assign each class to the vertex, edge or non-graph type sets based on its kind.""" for class_name in class_name_to_definition: inheritance_set = self._inheritance_sets[class_name] is_vertex = ORIENTDB_BASE_VERTEX_CLASS_NAME in inheritance_set is_edge = ORIENTDB_BASE_EDGE_CLASS_NAME in inheritance_set if is_vertex and is_edge: raise AssertionError(u'Class {} appears to be both a vertex and an edge class: ' u'{}'.format(class_name, inheritance_set)) elif is_vertex: self._vertex_class_names.add(class_name) elif is_edge: self._edge_class_names.add(class_name) else: self._non_graph_class_names.add(class_name) # Freeze the classname sets so they cannot be modified again. self._vertex_class_names = frozenset(self._vertex_class_names) self._edge_class_names = frozenset(self._edge_class_names) self._non_graph_class_names = frozenset(self._non_graph_class_names)
Load all schema classes of the given kind. Used as part of __init__.
def _set_up_schema_elements_of_kind(self, class_name_to_definition, kind, class_names): """Load all schema classes of the given kind. Used as part of __init__.""" allowed_duplicated_edge_property_names = frozenset({ EDGE_DESTINATION_PROPERTY_NAME, EDGE_SOURCE_PROPERTY_NAME }) orientdb_base_classes = frozenset({ ORIENTDB_BASE_VERTEX_CLASS_NAME, ORIENTDB_BASE_EDGE_CLASS_NAME, }) for class_name in class_names: class_definition = class_name_to_definition[class_name] class_fields = class_definition.get('customFields') if class_fields is None: # OrientDB likes to make empty collections be None instead. # We convert this field back to an empty dict, for our general sanity. class_fields = dict() abstract = class_definition['abstract'] if class_name in orientdb_base_classes: # Special-case the V and E base classes: # OrientDB won't let us make them abstract, but we don't want to create # any vertices or edges with those types either. # Pretend they are marked abstract in OrientDB's schema. abstract = True property_name_to_descriptor = {} all_property_lists = ( class_name_to_definition[inherited_class_name]['properties'] for inherited_class_name in self._inheritance_sets[class_name] ) links = {EDGE_DESTINATION_PROPERTY_NAME: [], EDGE_SOURCE_PROPERTY_NAME: []} for property_definition in chain.from_iterable(all_property_lists): property_name = property_definition['name'] # The only properties we allow to be redefined are the in/out properties # of edge classes. All other properties may only be defined once # in the entire inheritance hierarchy of any schema class, of any kind. duplication_allowed = all(( property_name in allowed_duplicated_edge_property_names, kind == SchemaElement.ELEMENT_KIND_EDGE )) if not duplication_allowed and property_name in property_name_to_descriptor: raise AssertionError(u'The property "{}" on class "{}" is defined ' u'more than once, this is not allowed!' .format(property_name, class_name)) property_descriptor = self._create_descriptor_from_property_definition( class_name, property_definition, class_name_to_definition) if property_name in allowed_duplicated_edge_property_names: links[property_name].append(property_descriptor) else: property_name_to_descriptor[property_name] = property_descriptor for property_name in allowed_duplicated_edge_property_names: elements = { property_descriptor.qualifier for property_descriptor in links[property_name] } # If there are multiple in/out properties, we choose to include the one that # is a subclass of all the elements present in the in/out properties. for property_descriptor in links[property_name]: subclass_set = self._subclass_sets[property_descriptor.qualifier] if len(elements.intersection(subclass_set)) == 1: current_descriptor = property_name_to_descriptor.get(property_name, None) if current_descriptor and current_descriptor != property_descriptor: raise AssertionError(u'There already exists property "{}" in addition ' u'to property "{}" which is a subclass of all ' u'in/out properties for class "{}".' .format(current_descriptor, property_descriptor, class_name)) property_name_to_descriptor[property_name] = property_descriptor if (property_name not in property_name_to_descriptor and not abstract and kind == SchemaElement.ELEMENT_KIND_EDGE): raise AssertionError(u'For property "{}" of non-abstract edge class "{}", ' u'no such subclass-of-all-elements exists.' .format(property_name, class_name)) self._elements[class_name] = SchemaElement(class_name, kind, abstract, property_name_to_descriptor, class_fields)
Return a PropertyDescriptor corresponding to the given OrientDB property definition.
def _create_descriptor_from_property_definition(self, class_name, property_definition, class_name_to_definition): """Return a PropertyDescriptor corresponding to the given OrientDB property definition.""" name = property_definition['name'] type_id = property_definition['type'] linked_class = property_definition.get('linkedClass', None) linked_type = property_definition.get('linkedType', None) qualifier = None validate_supported_property_type_id(name, type_id) if type_id == PROPERTY_TYPE_LINK_ID: if class_name not in self._edge_class_names: raise AssertionError(u'Found a property of type Link on a non-edge class: ' u'{} {}'.format(name, class_name)) if name not in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERTY_NAME}: raise AssertionError(u'Found a property of type Link with an unexpected name: ' u'{} {}'.format(name, class_name)) if linked_class is None: raise AssertionError(u'Property "{}" is declared with type Link but has no ' u'linked class: {}'.format(name, property_definition)) if linked_class not in self._vertex_class_names: is_linked_class_abstract = class_name_to_definition[linked_class]['abstract'] all_subclasses_are_vertices = True for subclass in self._subclass_sets[linked_class]: if subclass != linked_class and subclass not in self.vertex_class_names: all_subclasses_are_vertices = False break if not (is_linked_class_abstract and all_subclasses_are_vertices): raise AssertionError(u'Property "{}" is declared as a Link to class {}, but ' u'that class is neither a vertex nor is it an ' u'abstract class whose subclasses are all vertices!' .format(name, linked_class)) qualifier = linked_class elif type_id in COLLECTION_PROPERTY_TYPES: if linked_class is not None and linked_type is not None: raise AssertionError(u'Property "{}" unexpectedly has both a linked class and ' u'a linked type: {}'.format(name, property_definition)) elif linked_type is not None and linked_class is None: # No linked class, must be a linked native OrientDB type. validate_supported_property_type_id(name + ' inner type', linked_type) qualifier = linked_type elif linked_class is not None and linked_type is None: # No linked type, must be a linked non-graph user-defined type. if linked_class not in self._non_graph_class_names: raise AssertionError(u'Property "{}" is declared as the inner type of ' u'an embedded collection, but is not a non-graph class: ' u'{}'.format(name, linked_class)) qualifier = linked_class else: raise AssertionError(u'Property "{}" is an embedded collection but has ' u'neither a linked class nor a linked type: ' u'{}'.format(name, property_definition)) default_value = None default_value_string = property_definition.get('defaultValue', None) if default_value_string is not None: default_value = parse_default_property_value(name, type_id, default_value_string) descriptor = PropertyDescriptor(type_id=type_id, qualifier=qualifier, default=default_value) # Sanity-check the descriptor before returning it. _validate_collections_have_default_values(class_name, name, descriptor) return descriptor