INSTRUCTION
stringlengths
1
46.3k
RESPONSE
stringlengths
75
80.2k
Extract the (op_name, operator_params) tuple from a directive object.
def _get_filter_op_name_and_values(directive): """Extract the (op_name, operator_params) tuple from a directive object.""" args = get_uniquely_named_objects_by_name(directive.arguments) if 'op_name' not in args: raise AssertionError(u'op_name not found in filter directive arguments!' ...
Return True if we have a filter directive whose operator applies to the outer scope.
def is_filter_with_outer_scope_vertex_field_operator(directive): """Return True if we have a filter directive whose operator applies to the outer scope.""" if directive.name.value != 'filter': return False op_name, _ = _get_filter_op_name_and_values(directive) return op_name in OUTER_SCOPE_VERT...
Return a Filter basic block that corresponds to the filter operation in the directive. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter...
def process_filter_directive(filter_operation_info, location, context): """Return a Filter basic block that corresponds to the filter operation in the directive. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field ...
Return the GraphQL type name of a node.
def get_schema_type_name(node, context): """Return the GraphQL type name of a node.""" query_path = node.query_path if query_path not in context.query_path_to_location_info: raise AssertionError( u'Unable to find type name for query path {} with context {}.'.format( query...
Return the Selectable Union[Table, CTE] associated with the node.
def get_node_selectable(node, context): """Return the Selectable Union[Table, CTE] associated with the node.""" query_path = node.query_path if query_path not in context.query_path_to_selectable: raise AssertionError( u'Unable to find selectable for query path {} with context {}.'.format...
Return the SqlNode associated with the query path.
def get_node_at_path(query_path, context): """Return the SqlNode associated with the query path.""" if query_path not in context.query_path_to_node: raise AssertionError( u'Unable to find SqlNode for query path {} with context {}.'.format( query_path, context)) node = con...
Attempt to get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: Optional[column], the SQLAlchemy column i...
def try_get_column(column_name, node, context): """Attempt to get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Ret...
Get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: column, the SQLAlchemy column if found. Raises an As...
def get_column(column_name, node, context): """Get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: c...
Return a dict of directive name to directive object for the given AST node. Any directives that are allowed to exist more than once on any AST node are ignored. For any directives that can only exist up to once, we verify that they are not duplicated raising GraphQLCompilationError in case we find them mor...
def get_unique_directives(ast): """Return a dict of directive name to directive object for the given AST node. Any directives that are allowed to exist more than once on any AST node are ignored. For any directives that can only exist up to once, we verify that they are not duplicated raising GraphQLCo...
Get all filter directives that apply to the current field. This helper abstracts away the fact that some vertex field filtering operators apply on the inner scope (the scope of the inner vertex field on which they are applied), whereas some apply on the outer scope (the scope that contains the inner vertex...
def get_local_filter_directives(ast, current_schema_type, inner_vertex_fields): """Get all filter directives that apply to the current field. This helper abstracts away the fact that some vertex field filtering operators apply on the inner scope (the scope of the inner vertex field on which they are applie...
Validate the directives that appear at a property field.
def validate_property_directives(directives): """Validate the directives that appear at a property field.""" for directive_name in six.iterkeys(directives): if directive_name in VERTEX_ONLY_DIRECTIVES: raise GraphQLCompilationError( u'Found vertex-only directive {} set on pro...
Validate the directives that appear at a vertex field.
def validate_vertex_directives(directives): """Validate the directives that appear at a vertex field.""" for directive_name in six.iterkeys(directives): if directive_name in PROPERTY_ONLY_DIRECTIVES: raise GraphQLCompilationError( u'Found property-only directive {} set on ver...
Validate the directives that appear at the root vertex field.
def validate_root_vertex_directives(root_ast): """Validate the directives that appear at the root vertex field.""" directives_present_at_root = set() for directive_obj in root_ast.directives: directive_name = directive_obj.name.value if is_filter_with_outer_scope_vertex_field_operator(direc...
Ensure that the specified vertex field directives are not mutually disallowed.
def validate_vertex_field_directive_interactions(parent_location, vertex_field_name, directives): """Ensure that the specified vertex field directives are not mutually disallowed.""" fold_directive = directives.get('fold', None) optional_directive = directives.get('optional', None) output_source_directi...
Ensure that the specified vertex field directives are allowed in the current context.
def validate_vertex_field_directive_in_context(parent_location, vertex_field_name, directives, context): """Ensure that the specified vertex field directives are allowed in the current context.""" fold_directive = directives.get('fold', None) optional_directive...
Sanitize and represent a string argument in MATCH.
def _safe_match_string(value): """Sanitize and represent a string argument in MATCH.""" if not isinstance(value, six.string_types): if isinstance(value, bytes): # should only happen in py3 value = value.decode('utf-8') else: raise GraphQLInvalidArgumentError(u'Attempting...
Represent date and datetime objects as MATCH strings.
def _safe_match_date_and_datetime(graphql_type, expected_python_types, value): """Represent date and datetime objects as MATCH strings.""" # Python datetime.datetime is a subclass of datetime.date, # but in this case, the two are not interchangeable. # Rather than using isinstance, we will therefore che...
Represent the list of "inner_type" objects in MATCH form.
def _safe_match_list(inner_type, argument_value): """Represent the list of "inner_type" objects in MATCH form.""" stripped_type = strip_non_null_from_type(inner_type) if isinstance(stripped_type, GraphQLList): raise GraphQLInvalidArgumentError(u'MATCH does not currently support nested lists, ' ...
Return a MATCH (SQL) string representing the given argument value.
def _safe_match_argument(expected_type, argument_value): """Return a MATCH (SQL) string representing the given argument value.""" if GraphQLString.is_same_type(expected_type): return _safe_match_string(argument_value) elif GraphQLID.is_same_type(expected_type): # IDs can be strings or number...
Insert the arguments into the compiled MATCH query to form a complete query. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every parameter the query expects. Returns: string, a MATCH query...
def insert_arguments_into_match_query(compilation_result, arguments): """Insert the arguments into the compiled MATCH query to form a complete query. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for e...
Retrieve a SQLAlchemy table based on the supplied GraphQL schema type name.
def get_table(self, schema_type): """Retrieve a SQLAlchemy table based on the supplied GraphQL schema type name.""" table_name = schema_type.lower() if not self.has_table(table_name): raise exceptions.GraphQLCompilationError( 'No Table found in SQLAlchemy metadata for...
Construct a MatchStep from a tuple of its constituent blocks.
def _per_location_tuple_to_step(ir_tuple): """Construct a MatchStep from a tuple of its constituent blocks.""" root_block = ir_tuple[0] if not isinstance(root_block, root_block_types): raise AssertionError(u'Unexpected root block type for MatchStep: ' u'{} {}'.format(roo...
Split a list of IR blocks into per-location MATCH steps. Args: pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step. Returns: list of MatchStep namedtuples, each of which contains all basic blocks that correspond to a single MATCH step.
def _split_ir_into_match_steps(pruned_ir_blocks): """Split a list of IR blocks into per-location MATCH steps. Args: pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step. Returns: list of MatchStep namedtuples, each of which contains all basic blocks that ...
Split a list of MatchSteps into multiple lists, each denoting a single MATCH traversal.
def _split_match_steps_into_match_traversals(match_steps): """Split a list of MatchSteps into multiple lists, each denoting a single MATCH traversal.""" output = [] current_list = None for step in match_steps: if isinstance(step.root_block, QueryRoot): if current_list is not None: ...
Extract all global operation blocks (all blocks following GlobalOperationsStart). Args: ir_blocks_except_output_and_folds: list of IR blocks (excluding ConstructResult and all fold blocks), to extract global operations from Returns: tuple (global_oper...
def _extract_global_operations(ir_blocks_except_output_and_folds): """Extract all global operation blocks (all blocks following GlobalOperationsStart). Args: ir_blocks_except_output_and_folds: list of IR blocks (excluding ConstructResult and all fold blocks), ...
Convert the list of IR blocks into a MatchQuery object, for easier manipulation.
def convert_to_match_query(ir_blocks): """Convert the list of IR blocks into a MatchQuery object, for easier manipulation.""" output_block = ir_blocks[-1] if not isinstance(output_block, ConstructResult): raise AssertionError(u'Expected last IR block to be ConstructResult, found: ' ...
Insert the arguments into the compiled SQL query to form a complete query. Args: compilation_result: CompilationResult, compilation result from the GraphQL compiler. arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects. Returns: SQLAlchemy Selectabl...
def insert_arguments_into_sql_query(compilation_result, arguments): """Insert the arguments into the compiled SQL query to form a complete query. Args: compilation_result: CompilationResult, compilation result from the GraphQL compiler. arguments: Dict[str, Any], parameter name -> value, for ev...
Create an "INSTANCEOF" Filter block from a CoerceType block.
def convert_coerce_type_to_instanceof_filter(coerce_type_block): """Create an "INSTANCEOF" Filter block from a CoerceType block.""" coerce_type_target = get_only_element_from_collection(coerce_type_block.target_class) # INSTANCEOF requires the target class to be passed in as a string, # so we make the ...
Create an "INSTANCEOF" Filter from a CoerceType, adding to an existing Filter if any.
def convert_coerce_type_and_add_to_where_block(coerce_type_block, where_block): """Create an "INSTANCEOF" Filter from a CoerceType, adding to an existing Filter if any.""" instanceof_filter = convert_coerce_type_to_instanceof_filter(coerce_type_block) if where_block: # There was already a Filter bl...
Convert a list of expressions to an Expression that is the conjunction of all of them.
def expression_list_to_conjunction(expression_list): """Convert a list of expressions to an Expression that is the conjunction of all of them.""" if not isinstance(expression_list, list): raise AssertionError(u'Expected `list`, Received {}.'.format(expression_list)) if len(expression_list) == 0: ...
Return an Expression that is True iff the specified edge (edge_expression) does not exist.
def filter_edge_field_non_existence(edge_expression): """Return an Expression that is True iff the specified edge (edge_expression) does not exist.""" # When an edge does not exist at a given vertex, OrientDB represents that in one of two ways: # - the edge's field does not exist (is null) on the vertex d...
Return an Expression that is False for rows that don't follow the @optional specification. OrientDB does not filter correctly within optionals. Namely, a result where the optional edge DOES EXIST will be returned regardless of whether the inner filter is satisfed. To mitigate this, we add a final filter to...
def _filter_orientdb_simple_optional_edge( query_metadata_table, optional_edge_location, inner_location_name): """Return an Expression that is False for rows that don't follow the @optional specification. OrientDB does not filter correctly within optionals. Namely, a result where the optional edge ...
Return an Expression that is True if and only if each simple optional filter is True. Construct filters for each simple optional, that are True if and only if `edge_field` does not exist in the `simple_optional_root_location` OR the `inner_location` is not defined. Return an Expression that evaluates to Tr...
def construct_where_filter_predicate(query_metadata_table, simple_optional_root_info): """Return an Expression that is True if and only if each simple optional filter is True. Construct filters for each simple optional, that are True if and only if `edge_field` does not exist in the `simple_optional_root_l...
Return a tree of complex optional root locations. Args: complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots wher...
def construct_optional_traversal_tree(complex_optional_roots, location_to_optional_roots): """Return a tree of complex optional root locations. Args: complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand v...
Validate that the Between Expression is correctly representable.
def validate(self): """Validate that the Between Expression is correctly representable.""" if not isinstance(self.field, LocalField): raise TypeError(u'Expected LocalField field, got: {} {}'.format( type(self.field).__name__, self.field)) if not isinstance(self.lower...
Create an updated version (if needed) of BetweenClause via the visitor pattern.
def visit_and_update(self, visitor_fn): """Create an updated version (if needed) of BetweenClause via the visitor pattern.""" new_lower_bound = self.lower_bound.visit_and_update(visitor_fn) new_upper_bound = self.upper_bound.visit_and_update(visitor_fn) if new_lower_bound is not self.lo...
Return a unicode object with the MATCH representation of this BetweenClause.
def to_match(self): """Return a unicode object with the MATCH representation of this BetweenClause.""" template = u'({field_name} BETWEEN {lower_bound} AND {upper_bound})' return template.format( field_name=self.field.to_match(), lower_bound=self.lower_bound.to_match(), ...
Insert a path of optional Locations into the tree. Each OptionalTraversalTree object contains child Location objects as keys mapping to other OptionalTraversalTree objects. Args: optional_root_locations_path: list of optional root Locations all except the last ...
def insert(self, optional_root_locations_path): """Insert a path of optional Locations into the tree. Each OptionalTraversalTree object contains child Location objects as keys mapping to other OptionalTraversalTree objects. Args: optional_root_locations_path: list of option...
Return a list of all rooted subtrees (each as a list of Location objects).
def get_all_rooted_subtrees_as_lists(self, start_location=None): """Return a list of all rooted subtrees (each as a list of Location objects).""" if start_location is not None and start_location not in self._location_to_children: raise AssertionError(u'Received invalid start_location {} that...
Return a SQLAlchemy Query from a passed SqlQueryTree. Args: sql_query_tree: SqlQueryTree, tree representation of the query to emit. compiler_metadata: SqlMetadata, SQLAlchemy specific metadata. Returns: SQLAlchemy Query
def emit_code_from_ir(sql_query_tree, compiler_metadata): """Return a SQLAlchemy Query from a passed SqlQueryTree. Args: sql_query_tree: SqlQueryTree, tree representation of the query to emit. compiler_metadata: SqlMetadata, SQLAlchemy specific metadata. Returns: SQLAlchemy Query ...
Create an aliased table for a SqlNode. Updates the relevant Selectable global context. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Table, the newly aliased SQLAlchemy table.
def _create_table_and_update_context(node, context): """Create an aliased table for a SqlNode. Updates the relevant Selectable global context. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Table, the newly a...
Create a query from a SqlNode. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Selectable, selectable of the generated query.
def _create_query(node, context): """Create a query from a SqlNode. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Selectable, selectable of the generated query. """ visited_nodes = [node] output_colum...
Get the output columns for a list of SqlNodes. Args: nodes: List[SqlNode], the nodes to get output columns from. context: CompilationContext, global compilation state and metadata. Returns: List[Column], list of SqlAlchemy Columns to output for this query.
def _get_output_columns(nodes, context): """Get the output columns for a list of SqlNodes. Args: nodes: List[SqlNode], the nodes to get output columns from. context: CompilationContext, global compilation state and metadata. Returns: List[Column], list of SqlAlchemy Columns to outp...
Get filters to apply to a list of SqlNodes. Args: nodes: List[SqlNode], the SqlNodes to get filters for. context: CompilationContext, global compilation state and metadata. Returns: List[Expression], list of SQLAlchemy expressions.
def _get_filters(nodes, context): """Get filters to apply to a list of SqlNodes. Args: nodes: List[SqlNode], the SqlNodes to get filters for. context: CompilationContext, global compilation state and metadata. Returns: List[Expression], list of SQLAlchemy expressions. """ f...
Transform a Filter block to its corresponding SQLAlchemy expression. Args: filter_block: Filter, the Filter block to transform. node: SqlNode, the node Filter block applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy e...
def _transform_filter_to_sql(filter_block, node, context): """Transform a Filter block to its corresponding SQLAlchemy expression. Args: filter_block: Filter, the Filter block to transform. node: SqlNode, the node Filter block applies to. context: CompilationContext, global compilation ...
Recursively transform a Filter block predicate to its SQLAlchemy expression representation. Args: expression: expression, the compiler expression to transform. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Retu...
def _expression_to_sql(expression, node, context): """Recursively transform a Filter block predicate to its SQLAlchemy expression representation. Args: expression: expression, the compiler expression to transform. node: SqlNode, the SqlNode the expression applies to. context: Compilatio...
Transform a BinaryComposition compiler expression into a SQLAlchemy expression. Recursively calls _expression_to_sql to convert its left and right sub-expressions. Args: expression: expression, BinaryComposition compiler expression. node: SqlNode, the SqlNode the expression applies to. ...
def _transform_binary_composition_to_expression(expression, node, context): """Transform a BinaryComposition compiler expression into a SQLAlchemy expression. Recursively calls _expression_to_sql to convert its left and right sub-expressions. Args: expression: expression, BinaryComposition compile...
Return left and right expressions in (Column, BindParameter) order.
def _get_column_and_bindparam(left, right, operator): """Return left and right expressions in (Column, BindParameter) order.""" if not isinstance(left, Column): left, right = right, left if not isinstance(left, Column): raise AssertionError( u'SQLAlchemy operator {} expects Colum...
Transform a Variable compiler expression into its SQLAlchemy expression representation. Args: expression: expression, Variable compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: ...
def _transform_variable_to_expression(expression, node, context): """Transform a Variable compiler expression into its SQLAlchemy expression representation. Args: expression: expression, Variable compiler expression. node: SqlNode, the SqlNode the expression applies to. context: Compila...
Transform a LocalField compiler expression into its SQLAlchemy expression representation. Args: expression: expression, LocalField compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: ...
def _transform_local_field_to_expression(expression, node, context): """Transform a LocalField compiler expression into its SQLAlchemy expression representation. Args: expression: expression, LocalField compiler expression. node: SqlNode, the SqlNode the expression applies to. context: ...
Merge consecutive Filter(x), Filter(y) blocks into Filter(x && y) block.
def merge_consecutive_filter_clauses(ir_blocks): """Merge consecutive Filter(x), Filter(y) blocks into Filter(x && y) block.""" if not ir_blocks: return ir_blocks new_ir_blocks = [ir_blocks[0]] for block in ir_blocks[1:]: last_block = new_ir_blocks[-1] if isinstance(last_block,...
Lower ContextFieldExistence expressions into lower-level expressions.
def lower_context_field_existence(ir_blocks, query_metadata_table): """Lower ContextFieldExistence expressions into lower-level expressions.""" def regular_visitor_fn(expression): """Expression visitor function that rewrites ContextFieldExistence expressions.""" if not isinstance(expression, Con...
Optimize comparisons of a boolean binary comparison expression against a boolean literal. Rewriting example: BinaryComposition( '=', BinaryComposition('!=', something, NullLiteral) False) The above is rewritten into: BinaryComposition('=', something, NullLit...
def optimize_boolean_expression_comparisons(ir_blocks): """Optimize comparisons of a boolean binary comparison expression against a boolean literal. Rewriting example: BinaryComposition( '=', BinaryComposition('!=', something, NullLiteral) False) The above is re...
Extract all @fold data from the IR blocks, and cut the folded IR blocks out of the IR. Args: ir_blocks: list of IR blocks to extract fold data from Returns: tuple (folds, remaining_ir_blocks): - folds: dict of FoldScopeLocation -> list of IR blocks corresponding to that @fold scope. ...
def extract_folds_from_ir_blocks(ir_blocks): """Extract all @fold data from the IR blocks, and cut the folded IR blocks out of the IR. Args: ir_blocks: list of IR blocks to extract fold data from Returns: tuple (folds, remaining_ir_blocks): - folds: dict of FoldScopeLocation -> lis...
Construct a mapping from locations within @optional to their correspoding optional Traverse. Args: ir_blocks: list of IR blocks to extract optional data from Returns: tuple (complex_optional_roots, location_to_optional_roots): complex_optional_roots: list of @optional locations (locati...
def extract_optional_location_root_info(ir_blocks): """Construct a mapping from locations within @optional to their correspoding optional Traverse. Args: ir_blocks: list of IR blocks to extract optional data from Returns: tuple (complex_optional_roots, location_to_optional_roots): ...
Construct a map from simple optional locations to their inner location and traversed edge. Args: ir_blocks: list of IR blocks to extract optional data from complex_optional_roots: list of @optional locations (location immmediately preceding an @optional traverse) tha...
def extract_simple_optional_location_info( ir_blocks, complex_optional_roots, location_to_optional_roots): """Construct a map from simple optional locations to their inner location and traversed edge. Args: ir_blocks: list of IR blocks to extract optional data from complex_optional_root...
Return a list of IR blocks as a copy of the original, with EndOptional blocks removed.
def remove_end_optionals(ir_blocks): """Return a list of IR blocks as a copy of the original, with EndOptional blocks removed.""" new_ir_blocks = [] for block in ir_blocks: if not isinstance(block, EndOptional): new_ir_blocks.append(block) return new_ir_blocks
Validate that the OutputContextVertex is correctly representable.
def validate(self): """Validate that the OutputContextVertex is correctly representable.""" super(OutputContextVertex, self).validate() if self.location.field is not None: raise ValueError(u'Expected location at a vertex, but got: {}'.format(self.location))
Return a unicode object with the MATCH representation of this expression.
def to_match(self): """Return a unicode object with the MATCH representation of this expression.""" self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) if field_name is not None: raise AssertionError(u'Vertex loc...
Rewrite BinaryConditional expressions in the true/false values of TernaryConditionals.
def rewrite_binary_composition_inside_ternary_conditional(ir_blocks): """Rewrite BinaryConditional expressions in the true/false values of TernaryConditionals.""" def visitor_fn(expression): """Expression visitor function.""" # MATCH queries do not allow BinaryComposition inside a TernaryConditi...
Lower Filter blocks that use the "has_substring" operation into MATCH-representable form.
def lower_has_substring_binary_compositions(ir_blocks): """Lower Filter blocks that use the "has_substring" operation into MATCH-representable form.""" def visitor_fn(expression): """Rewrite BinaryComposition expressions with "has_substring" into representable form.""" # The implementation of "h...
Truncate one-step traversals that overlap a previous traversal location.
def truncate_repeated_single_step_traversals(match_query): """Truncate one-step traversals that overlap a previous traversal location.""" # Such traversals frequently happen as side-effects of the lowering process # of Backtrack blocks, and needlessly complicate the executed queries. new_match_traversal...
Lower Backtrack blocks into (QueryRoot, MarkLocation) pairs of blocks.
def lower_backtrack_blocks(match_query, location_types): """Lower Backtrack blocks into (QueryRoot, MarkLocation) pairs of blocks.""" # The lowering works as follows: # 1. Upon seeing a Backtrack block, end the current traversal (if non-empty). # 2. Start new traversal from the type and location to ...
If location A translates to B, and B to C, then make A translate directly to C. Args: location_translations: dict of Location -> Location, where the key translates to the value. Mutated in place for efficiency and simplicity of implementation.
def _flatten_location_translations(location_translations): """If location A translates to B, and B to C, then make A translate directly to C. Args: location_translations: dict of Location -> Location, where the key translates to the value. Mutated in place for efficiency ...
Translate Location objects into their equivalent locations, based on the given dict.
def _translate_equivalent_locations(match_query, location_translations): """Translate Location objects into their equivalent locations, based on the given dict.""" new_match_traversals = [] def visitor_fn(expression): """Expression visitor function used to rewrite expressions with updated Location ...
Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks.
def lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks): """Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks.""" new_folded_ir_blocks = [] for block in folded_ir_blocks: if isinstance(block, CoerceType): new_block = convert_coerce_type_to_...
Return a list of IR blocks with all Backtrack blocks removed.
def remove_backtrack_blocks_from_fold(folded_ir_blocks): """Return a list of IR blocks with all Backtrack blocks removed.""" new_folded_ir_blocks = [] for block in folded_ir_blocks: if not isinstance(block, Backtrack): new_folded_ir_blocks.append(block) return new_folded_ir_blocks
For each sub-query, remove one-step traversals that overlap a previous traversal location.
def truncate_repeated_single_step_traversals_in_sub_queries(compound_match_query): """For each sub-query, remove one-step traversals that overlap a previous traversal location.""" lowered_match_queries = [] for match_query in compound_match_query.match_queries: new_match_query = truncate_repeated_si...
Return a prefix of the given traverse, excluding any blocks after an omitted optional. Given a subset (omitted_locations) of complex_optional_roots, return a new match traversal removing all MatchStep objects that are within any omitted location. Args: match_traversal: list of MatchStep objects to...
def _prune_traverse_using_omitted_locations(match_traversal, omitted_locations, complex_optional_roots, location_to_optional_roots): """Return a prefix of the given traverse, excluding any blocks after an omitted optional. Given a subset (omitted_locations) of comple...
Return 2^n distinct MatchQuery objects in a CompoundMatchQuery. Given a MatchQuery containing `n` optional traverses that expand vertex fields, construct `2^n` different MatchQuery objects: one for each possible subset of optional edges that can be followed. For each edge `e` in a subset of optional ed...
def convert_optional_traversals_to_compound_match_query( match_query, complex_optional_roots, location_to_optional_roots): """Return 2^n distinct MatchQuery objects in a CompoundMatchQuery. Given a MatchQuery containing `n` optional traverses that expand vertex fields, construct `2^n` different Mat...
Return the set of locations and non-optional locations present in the given match traversals. When enumerating the possibilities for optional traversals, the resulting match traversals may have sections of the query omitted. These locations will not be included in the returned `present_locations`. All ...
def _get_present_locations(match_traversals): """Return the set of locations and non-optional locations present in the given match traversals. When enumerating the possibilities for optional traversals, the resulting match traversals may have sections of the query omitted. These locations will not be i...
Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery. Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks, For each of these, remove the outputs (that have been implicitly pruned away) from each corresponding ConstructResult block. ...
def prune_non_existent_outputs(compound_match_query): """Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery. Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks, For each of these, remove the outputs (that have been implicitly pruned...
Return a dict mapping location -> list of filters applied at that location. Args: match_query: MatchQuery object from which to extract location -> filters dict Returns: dict mapping each location in match_query to a list of Filter objects applied at that location
def _construct_location_to_filter_list(match_query): """Return a dict mapping location -> list of filters applied at that location. Args: match_query: MatchQuery object from which to extract location -> filters dict Returns: dict mapping each location in match_query to a list of ...
Convert a list of filters to an Expression that is the conjunction of all of them.
def _filter_list_to_conjunction_expression(filter_list): """Convert a list of filters to an Expression that is the conjunction of all of them.""" if not isinstance(filter_list, list): raise AssertionError(u'Expected `list`, Received: {}.'.format(filter_list)) if any((not isinstance(filter_block, Fil...
Apply all filters for a specific location into its first occurrence in a given traversal. For each location in the given match traversal, construct a conjunction of all filters applied to that location, and apply the resulting Filter to the first instance of the location. Args: match_traversal...
def _apply_filters_to_first_location_occurrence(match_traversal, location_to_filters, already_filtered_locations): """Apply all filters for a specific location into its first occurrence in a given traversal. For each location in the given match traversal, con...
Collect all filters for a particular location to the first instance of the location. Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may result in filters being applied to locations after their first occurence. OrientDB does not resolve this behavior correctly. Therefo...
def collect_filters_to_first_location_occurrence(compound_match_query): """Collect all filters for a particular location to the first instance of the location. Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may result in filters being applied to locations after their ...
Lower BinaryCompositions involving non-existent ContextFields to True. Args: present_locations: set of all locations in the current MatchQuery that have not been pruned expression: BinaryComposition with at least one ContextField operand Returns: TrueLiteral iff either ContextField ope...
def _update_context_field_binary_composition(present_locations, expression): """Lower BinaryCompositions involving non-existent ContextFields to True. Args: present_locations: set of all locations in the current MatchQuery that have not been pruned expression: BinaryComposition with at least on...
Return a simplified BinaryComposition if either operand is a TrueLiteral. Args: expression: BinaryComposition without any ContextField operand(s) Returns: simplified expression if the given expression is a disjunction/conjunction and one of it's operands is a TrueLiteral, and t...
def _simplify_non_context_field_binary_composition(expression): """Return a simplified BinaryComposition if either operand is a TrueLiteral. Args: expression: BinaryComposition without any ContextField operand(s) Returns: simplified expression if the given expression is a disjunction/conju...
Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result.
def _update_context_field_expression(present_locations, expression): """Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result.""" no_op_blocks = (ContextField, Literal, LocalField, UnaryTransformation, Variable) if isinstance(expression, BinaryComposition): if isi...
Return new match traversals, lowering filters involving non-existent ContextFields. Expressions involving non-existent ContextFields are evaluated to TrueLiteral. BinaryCompositions, where one of the operands is lowered to a TrueLiteral, are lowered appropriately based on the present operator (u'||' and u'...
def _lower_non_existent_context_field_filters(match_traversals, visitor_fn): """Return new match traversals, lowering filters involving non-existent ContextFields. Expressions involving non-existent ContextFields are evaluated to TrueLiteral. BinaryCompositions, where one of the operands is lowered to a Tr...
Lower Expressons involving non-existent ContextFields.
def lower_context_field_expressions(compound_match_query): """Lower Expressons involving non-existent ContextFields.""" if len(compound_match_query.match_queries) == 0: raise AssertionError(u'Received CompoundMatchQuery {} with no MatchQuery objects.' .format(compound_match_...
Validate that the non-abstract edge properties dict has defined in/out link properties.
def _validate_non_abstract_edge_has_defined_endpoint_types(class_name, properties): """Validate that the non-abstract edge properties dict has defined in/out link properties.""" edge_source = properties.get(EDGE_SOURCE_PROPERTY_NAME, None) edge_destination = properties.get(EDGE_DESTINATION_PROPERTY_NAME, No...
Validate that non-edges do not have the in/out properties defined.
def _validate_non_edges_do_not_have_edge_like_properties(class_name, properties): """Validate that non-edges do not have the in/out properties defined.""" has_source = EDGE_SOURCE_PROPERTY_NAME in properties has_destination = EDGE_DESTINATION_PROPERTY_NAME in properties if has_source or has_destination...
Validate that edges do not have properties of Link type that aren't the edge endpoints.
def _validate_edges_do_not_have_extra_links(class_name, properties): """Validate that edges do not have properties of Link type that aren't the edge endpoints.""" for property_name, property_descriptor in six.iteritems(properties): if property_name in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERT...
Validate that properties do not have names that may cause problems in the GraphQL schema.
def _validate_property_names(class_name, properties): """Validate that properties do not have names that may cause problems in the GraphQL schema.""" for property_name in properties: if not property_name or property_name.startswith(ILLEGAL_PROPERTY_NAME_PREFIXES): raise IllegalSchemaStateErr...
Validate that if the property is of collection type, it has a specified default value.
def _validate_collections_have_default_values(class_name, property_name, property_descriptor): """Validate that if the property is of collection type, it has a specified default value.""" # We don't want properties of collection type having "null" values, since that may cause # unexpected errors during Grap...
Extract a list of all superclass names from a class definition dict.
def get_superclasses_from_class_definition(class_definition): """Extract a list of all superclass names from a class definition dict.""" # New-style superclasses definition, supporting multiple-inheritance. superclasses = class_definition.get('superClasses', None) if superclasses: return list(s...
Make the SchemaElement's connections immutable.
def freeze(self): """Make the SchemaElement's connections immutable.""" self.in_connections = frozenset(self.in_connections) self.out_connections = frozenset(self.out_connections)
Return a dict with default values for all properties declared on this class.
def get_default_property_values(self, classname): """Return a dict with default values for all properties declared on this class.""" schema_element = self.get_element_by_class_name(classname) result = { property_name: property_descriptor.default for property_name, proper...
Return the SchemaElement for the specified class name, asserting that it exists.
def get_element_by_class_name_or_raise(self, class_name): """Return the SchemaElement for the specified class name, asserting that it exists.""" if class_name not in self._elements: raise InvalidClassError(u'Class does not exist: {}'.format(class_name)) return self._elements[class_n...
Return the property values for the class, with default values applied where needed.
def _get_property_values_with_defaults(self, classname, property_values): """Return the property values for the class, with default values applied where needed.""" # To uphold OrientDB semantics, make a new dict with all property values set # to their default values, which are None if no default...
Return the schema element with the given name, asserting that it's of vertex type.
def get_vertex_schema_element_or_raise(self, vertex_classname): """Return the schema element with the given name, asserting that it's of vertex type.""" schema_element = self.get_element_by_class_name_or_raise(vertex_classname) if not schema_element.is_vertex: raise InvalidClassErro...
Return the schema element with the given name, asserting that it's of edge type.
def get_edge_schema_element_or_raise(self, edge_classname): """Return the schema element with the given name, asserting that it's of edge type.""" schema_element = self.get_element_by_class_name_or_raise(edge_classname) if not schema_element.is_edge: raise InvalidClassError(u'Non-ed...
Validate that a vertex classname corresponds to a non-abstract vertex class.
def validate_is_non_abstract_vertex_type(self, vertex_classname): """Validate that a vertex classname corresponds to a non-abstract vertex class.""" element = self.get_vertex_schema_element_or_raise(vertex_classname) if element.abstract: raise InvalidClassError(u'Expected a non-abst...
Validate that a edge classname corresponds to a non-abstract edge class.
def validate_is_non_abstract_edge_type(self, edge_classname): """Validate that a edge classname corresponds to a non-abstract edge class.""" element = self.get_edge_schema_element_or_raise(edge_classname) if element.abstract: raise InvalidClassError(u'Expected a non-abstract vertex ...
Validate that the specified property names are indeed defined on the given class.
def validate_properties_exist(self, classname, property_names): """Validate that the specified property names are indeed defined on the given class.""" schema_element = self.get_element_by_class_name(classname) requested_properties = set(property_names) available_properties = set(schema...
Load all inheritance data from the OrientDB schema. Used as part of __init__.
def _set_up_inheritance_and_subclass_sets(self, schema_data): """Load all inheritance data from the OrientDB schema. Used as part of __init__.""" # For each class name, construct its inheritance set: # itself + the set of class names from which it inherits. for class_definition in schema...
Assign each class to the vertex, edge or non-graph type sets based on its kind.
def _split_classes_by_kind(self, class_name_to_definition): """Assign each class to the vertex, edge or non-graph type sets based on its kind.""" for class_name in class_name_to_definition: inheritance_set = self._inheritance_sets[class_name] is_vertex = ORIENTDB_BASE_VERTEX_CLA...
Load all schema classes of the given kind. Used as part of __init__.
def _set_up_schema_elements_of_kind(self, class_name_to_definition, kind, class_names): """Load all schema classes of the given kind. Used as part of __init__.""" allowed_duplicated_edge_property_names = frozenset({ EDGE_DESTINATION_PROPERTY_NAME, EDGE_SOURCE_PROPERTY_NAME }) ...
Return a PropertyDescriptor corresponding to the given OrientDB property definition.
def _create_descriptor_from_property_definition(self, class_name, property_definition, class_name_to_definition): """Return a PropertyDescriptor corresponding to the given OrientDB property definition.""" name = property_definition['name'] type...