INSTRUCTION
stringlengths
1
46.3k
RESPONSE
stringlengths
75
80.2k
Return a MATCH query string from a list of MatchQuery namedtuples.
def emit_code_from_multiple_match_queries(match_queries): """Return a MATCH query string from a list of MatchQuery namedtuples.""" optional_variable_base_name = '$optional__' union_variable_name = '$result' query_data = deque([u'SELECT EXPAND(', union_variable_name, u')', u' LET ']) optional_variables = [] sub_queries = [emit_code_from_single_match_query(match_query) for match_query in match_queries] for (i, sub_query) in enumerate(sub_queries): variable_name = optional_variable_base_name + str(i) variable_assignment = variable_name + u' = (' sub_query_end = u'),' query_data.append(variable_assignment) query_data.append(sub_query) query_data.append(sub_query_end) optional_variables.append(variable_name) query_data.append(union_variable_name) query_data.append(u' = UNIONALL(') query_data.append(u', '.join(optional_variables)) query_data.append(u')') return u' '.join(query_data)
Return a MATCH query string from a CompoundMatchQuery.
def emit_code_from_ir(compound_match_query, compiler_metadata): """Return a MATCH query string from a CompoundMatchQuery.""" # If the compound match query contains only one match query, # just call `emit_code_from_single_match_query` # If there are multiple match queries, construct the query string for each # individual query and combine them as follows. # # SELECT EXPAND($result) # LET # $optional__0 = ( # <query_string_0> # ), # $optional__1 = ( # <query_string_1> # ), # $optional__2 = ( # <query_string_2> # ), # # . . . # # $result = UNIONALL($optional__0, $optional__1, . . . ) match_queries = compound_match_query.match_queries if len(match_queries) == 1: query_string = emit_code_from_single_match_query(match_queries[0]) elif len(match_queries) > 1: query_string = emit_code_from_multiple_match_queries(match_queries) else: raise AssertionError(u'Received CompoundMatchQuery with an empty list of MatchQueries: ' u'{}'.format(match_queries)) return query_string
Serialize a Date object to its proper ISO-8601 representation.
def _serialize_date(value): """Serialize a Date object to its proper ISO-8601 representation.""" if not isinstance(value, date): raise ValueError(u'The received object was not a date: ' u'{} {}'.format(type(value), value)) return value.isoformat()
Serialize a DateTime object to its proper ISO-8601 representation.
def _serialize_datetime(value): """Serialize a DateTime object to its proper ISO-8601 representation.""" if not isinstance(value, (datetime, arrow.Arrow)): raise ValueError(u'The received object was not a datetime: ' u'{} {}'.format(type(value), value)) return value.isoformat()
Deserialize a DateTime object from its proper ISO-8601 representation.
def _parse_datetime_value(value): """Deserialize a DateTime object from its proper ISO-8601 representation.""" if value.endswith('Z'): # Arrow doesn't support the "Z" literal to denote UTC time. # Strip the "Z" and add an explicit time zone instead. value = value[:-1] + '+00:00' return arrow.get(value, 'YYYY-MM-DDTHH:mm:ssZ').datetime
Add compiler-specific meta-fields into all interfaces and types of the specified schema. It is preferable to use the EXTENDED_META_FIELD_DEFINITIONS constant above to directly inject the meta-fields during the initial process of building the schema, as that approach is more robust. This function does its best to not mutate unexpected definitions, but may break unexpectedly as the GraphQL standard is extended and the underlying GraphQL library is updated. Use this function at your own risk. Don't say you haven't been warned. Properties added include: - "_x_count", which allows filtering folds based on the number of elements they capture. Args: graphql_schema: GraphQLSchema object describing the schema that is going to be used with the compiler. N.B.: MUTATED IN-PLACE in this method.
def insert_meta_fields_into_existing_schema(graphql_schema): """Add compiler-specific meta-fields into all interfaces and types of the specified schema. It is preferable to use the EXTENDED_META_FIELD_DEFINITIONS constant above to directly inject the meta-fields during the initial process of building the schema, as that approach is more robust. This function does its best to not mutate unexpected definitions, but may break unexpectedly as the GraphQL standard is extended and the underlying GraphQL library is updated. Use this function at your own risk. Don't say you haven't been warned. Properties added include: - "_x_count", which allows filtering folds based on the number of elements they capture. Args: graphql_schema: GraphQLSchema object describing the schema that is going to be used with the compiler. N.B.: MUTATED IN-PLACE in this method. """ root_type_name = graphql_schema.get_query_type().name for type_name, type_obj in six.iteritems(graphql_schema.get_type_map()): if type_name.startswith('__') or type_name == root_type_name: # Ignore the types that are built into GraphQL itself, as well as the root query type. continue if not isinstance(type_obj, (GraphQLObjectType, GraphQLInterfaceType)): # Ignore definitions that are not interfaces or types. continue for meta_field_name, meta_field in six.iteritems(EXTENDED_META_FIELD_DEFINITIONS): if meta_field_name in type_obj.fields: raise AssertionError(u'Unexpectedly encountered an existing field named {} while ' u'attempting to add a meta-field of the same name. Make sure ' u'you are not attempting to add meta-fields twice.' .format(meta_field_name)) type_obj.fields[meta_field_name] = meta_field
Ensure that the current context allows for visiting a vertex field.
def validate_context_for_visiting_vertex_field(parent_location, vertex_field_name, context): """Ensure that the current context allows for visiting a vertex field.""" if is_in_fold_innermost_scope(context): raise GraphQLCompilationError( u'Traversing inside a @fold block after filtering on {} or outputting fields ' u'is not supported! Parent location: {}, vertex field name: {}' .format(COUNT_META_FIELD_NAME, parent_location, vertex_field_name))
Take a GraphQL query, pretty print it, and return it.
def pretty_print_graphql(query, use_four_spaces=True): """Take a GraphQL query, pretty print it, and return it.""" # Use our custom visitor, which fixes directive argument order # to get the canonical representation output = visit(parse(query), CustomPrintingVisitor()) # Using four spaces for indentation makes it easier to edit in # Python source files. if use_four_spaces: return fix_indentation_depth(output) return output
Make indentation use 4 spaces, rather than the 2 spaces GraphQL normally uses.
def fix_indentation_depth(query): """Make indentation use 4 spaces, rather than the 2 spaces GraphQL normally uses.""" lines = query.split('\n') final_lines = [] for line in lines: consecutive_spaces = 0 for char in line: if char == ' ': consecutive_spaces += 1 else: break if consecutive_spaces % 2 != 0: raise AssertionError(u'Indentation was not a multiple of two: ' u'{}'.format(consecutive_spaces)) final_lines.append((' ' * consecutive_spaces) + line[consecutive_spaces:]) return '\n'.join(final_lines)
Call when exiting a directive node in the ast.
def leave_Directive(self, node, *args): """Call when exiting a directive node in the ast.""" name_to_arg_value = { # Taking [0] is ok here because the GraphQL parser checks for the # existence of ':' in directive arguments. arg.split(':', 1)[0]: arg for arg in node.arguments } ordered_args = node.arguments directive = DIRECTIVES_BY_NAME.get(node.name) if directive: sorted_args = [] encountered_argument_names = set() # Iterate through all defined arguments in the directive schema. for defined_arg_name in six.iterkeys(directive.args): if defined_arg_name in name_to_arg_value: # The argument was present in the query, print it in the correct order. encountered_argument_names.add(defined_arg_name) sorted_args.append(name_to_arg_value[defined_arg_name]) # Get all the arguments that weren't defined in the directive schema. # They will be printed after all the arguments that were in the schema. unsorted_args = [ value for name, value in six.iteritems(name_to_arg_value) if name not in encountered_argument_names ] ordered_args = sorted_args + unsorted_args return '@' + node.name + wrap('(', join(ordered_args, ', '), ')')
Lower the IR into an IR form that can be represented in MATCH queries. Args: ir_blocks: list of IR blocks to lower into MATCH-compatible form query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: MatchQuery object containing the IR blocks organized in a MATCH-like structure
def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None): """Lower the IR into an IR form that can be represented in MATCH queries. Args: ir_blocks: list of IR blocks to lower into MATCH-compatible form query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: MatchQuery object containing the IR blocks organized in a MATCH-like structure """ sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table) # Construct the mapping of each location to its corresponding GraphQL type. location_types = { location: location_info.type for location, location_info in query_metadata_table.registered_locations } # Compute the set of all locations that have associated type coercions. coerced_locations = { location for location, location_info in query_metadata_table.registered_locations if location_info.coerced_from_type is not None } # Extract information for both simple and complex @optional traverses location_to_optional_results = extract_optional_location_root_info(ir_blocks) complex_optional_roots, location_to_optional_roots = location_to_optional_results simple_optional_root_info = extract_simple_optional_location_info( ir_blocks, complex_optional_roots, location_to_optional_roots) ir_blocks = remove_end_optionals(ir_blocks) # Append global operation block(s) to filter out incorrect results # from simple optional match traverses (using a WHERE statement) if len(simple_optional_root_info) > 0: where_filter_predicate = construct_where_filter_predicate( query_metadata_table, simple_optional_root_info) ir_blocks.insert(-1, GlobalOperationsStart()) ir_blocks.insert(-1, Filter(where_filter_predicate)) # These lowering / optimization passes work on IR blocks. ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table) ir_blocks = optimize_boolean_expression_comparisons(ir_blocks) ir_blocks = rewrite_binary_composition_inside_ternary_conditional(ir_blocks) ir_blocks = merge_consecutive_filter_clauses(ir_blocks) ir_blocks = lower_has_substring_binary_compositions(ir_blocks) ir_blocks = orientdb_eval_scheduling.workaround_lowering_pass(ir_blocks, query_metadata_table) # Here, we lower from raw IR blocks into a MatchQuery object. # From this point on, the lowering / optimization passes work on the MatchQuery representation. match_query = convert_to_match_query(ir_blocks) match_query = lower_comparisons_to_between(match_query) match_query = lower_backtrack_blocks(match_query, location_types) match_query = truncate_repeated_single_step_traversals(match_query) match_query = orientdb_class_with_while.workaround_type_coercions_in_recursions(match_query) # Optimize and lower the IR blocks inside @fold scopes. new_folds = { key: merge_consecutive_filter_clauses( remove_backtrack_blocks_from_fold( lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks) ) ) for key, folded_ir_blocks in six.iteritems(match_query.folds) } match_query = match_query._replace(folds=new_folds) compound_match_query = convert_optional_traversals_to_compound_match_query( match_query, complex_optional_roots, location_to_optional_roots) compound_match_query = prune_non_existent_outputs(compound_match_query) compound_match_query = collect_filters_to_first_location_occurrence(compound_match_query) compound_match_query = lower_context_field_expressions(compound_match_query) compound_match_query = truncate_repeated_single_step_traversals_in_sub_queries( compound_match_query) compound_match_query = orientdb_query_execution.expose_ideal_query_execution_start_points( compound_match_query, location_types, coerced_locations) return compound_match_query
Sort class metadatas so that a superclass is always before the subclass
def toposort_classes(classes): """Sort class metadatas so that a superclass is always before the subclass""" def get_class_topolist(class_name, name_to_class, processed_classes, current_trace): """Return a topologically sorted list of this class's dependencies and class itself Args: class_name: string, name of the class to process name_to_class: dict, class_name -> descriptor processed_classes: set of strings, a set of classes that have already been processed current_trace: list of strings, list of classes traversed during the recursion Returns: list of dicts, list of classes sorted in topological order """ # Check if this class has already been handled if class_name in processed_classes: return [] if class_name in current_trace: raise AssertionError( 'Encountered self-reference in dependency chain of {}'.format(class_name)) cls = name_to_class[class_name] # Collect the dependency classes # These are bases and classes from linked properties dependencies = _list_superclasses(cls) # Recursively process linked edges properties = cls['properties'] if 'properties' in cls else [] for prop in properties: if 'linkedClass' in prop: dependencies.append(prop['linkedClass']) class_list = [] # Recursively process superclasses current_trace.add(class_name) for dependency in dependencies: class_list.extend(get_class_topolist( dependency, name_to_class, processed_classes, current_trace)) current_trace.remove(class_name) # Do the bookkeeping class_list.append(name_to_class[class_name]) processed_classes.add(class_name) return class_list # Map names to classes class_map = {c['name']: c for c in classes} seen_classes = set() toposorted = [] for name in class_map.keys(): toposorted.extend(get_class_topolist(name, class_map, seen_classes, set())) return toposorted
Return a list of the superclasses of the given class
def _list_superclasses(class_def): """Return a list of the superclasses of the given class""" superclasses = class_def.get('superClasses', []) if superclasses: # Make sure to duplicate the list return list(superclasses) sup = class_def.get('superClass', None) if sup: return [sup] else: return []
Return a LocationStackEntry namedtuple with the specified parameters.
def _construct_location_stack_entry(location, num_traverses): """Return a LocationStackEntry namedtuple with the specified parameters.""" if not isinstance(num_traverses, int) or num_traverses < 0: raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid ' u'value for "num_traverses" {}. This is not allowed.' .format(num_traverses)) if not isinstance(location, Location): raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid ' u'value for "location" {}. This is not allowed.' .format(location)) return LocationStackEntry(location=location, num_traverses=num_traverses)
Return a list of vertex fields, and a list of property fields, for the given AST node. Also verifies that all property fields for the AST node appear before all vertex fields, raising GraphQLCompilationError if that is not the case. Args: ast: GraphQL AST node, obtained from the graphql library Returns: tuple of two lists - the first list contains ASTs for vertex fields - the second list contains ASTs for property fields
def _get_fields(ast): """Return a list of vertex fields, and a list of property fields, for the given AST node. Also verifies that all property fields for the AST node appear before all vertex fields, raising GraphQLCompilationError if that is not the case. Args: ast: GraphQL AST node, obtained from the graphql library Returns: tuple of two lists - the first list contains ASTs for vertex fields - the second list contains ASTs for property fields """ if not ast.selection_set: # There are no child fields. return [], [] property_fields = [] vertex_fields = [] seen_field_names = set() switched_to_vertices = False # Ensures that all property fields are before all vertex fields. for field_ast in ast.selection_set.selections: if not isinstance(field_ast, Field): # We are getting Fields only, ignore everything else. continue name = get_ast_field_name(field_ast) if name in seen_field_names: # If we ever allow repeated field names, # then we have to change the Location naming scheme to reflect the repetitions # and disambiguate between Recurse and Traverse visits to a Location. raise GraphQLCompilationError(u'Encountered repeated field name: {}'.format(name)) seen_field_names.add(name) # Vertex fields start with 'out_' or 'in_', denoting the edge direction to that vertex. if is_vertex_field_name(name): switched_to_vertices = True vertex_fields.append(field_ast) else: if switched_to_vertices: raise GraphQLCompilationError(u'Encountered property field {} ' u'after vertex fields!'.format(name)) property_fields.append(field_ast) return vertex_fields, property_fields
Return the inline fragment at the current AST node, or None if no fragment exists.
def _get_inline_fragment(ast): """Return the inline fragment at the current AST node, or None if no fragment exists.""" if not ast.selection_set: # There is nothing selected here, so no fragment. return None fragments = [ ast_node for ast_node in ast.selection_set.selections if isinstance(ast_node, InlineFragment) ] if not fragments: return None if len(fragments) > 1: raise GraphQLCompilationError(u'Cannot compile GraphQL with more than one fragment in ' u'a given selection set.') return fragments[0]
Process the output_source directive, modifying the context as appropriate. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! local_unique_directives: dict, directive name string -> directive object, containing unique directives present on the current AST node *only* Returns: an OutputSource block, if one should be emitted, or None otherwise
def _process_output_source_directive(schema, current_schema_type, ast, location, context, local_unique_directives): """Process the output_source directive, modifying the context as appropriate. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! local_unique_directives: dict, directive name string -> directive object, containing unique directives present on the current AST node *only* Returns: an OutputSource block, if one should be emitted, or None otherwise """ # The 'ast' variable is only for function signature uniformity, and is currently not used. output_source_directive = local_unique_directives.get('output_source', None) if output_source_directive: if has_encountered_output_source(context): raise GraphQLCompilationError(u'Cannot have more than one output source!') if is_in_optional_scope(context): raise GraphQLCompilationError(u'Cannot have the output source in an optional block!') set_output_source_data(context, location) return blocks.OutputSource() else: return None
Process property directives at this AST node, updating the query context as appropriate. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library. Only for function signature uniformity at the moment -- it is currently not used. location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! unique_local_directives: dict, directive name string -> directive object, containing unique directives present on the current AST node *only*
def _compile_property_ast(schema, current_schema_type, ast, location, context, unique_local_directives): """Process property directives at this AST node, updating the query context as appropriate. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library. Only for function signature uniformity at the moment -- it is currently not used. location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! unique_local_directives: dict, directive name string -> directive object, containing unique directives present on the current AST node *only* """ validate_property_directives(unique_local_directives) if location.field == COUNT_META_FIELD_NAME: # Verify that uses of this field are within a @fold scope. if not is_in_fold_scope(context): raise GraphQLCompilationError(u'Cannot use the "{}" meta field when not within a @fold ' u'vertex field, as counting elements only makes sense ' u'in a fold. Location: {}' .format(COUNT_META_FIELD_NAME, location)) # step P-2: process property-only directives tag_directive = unique_local_directives.get('tag', None) if tag_directive: if is_in_fold_scope(context): raise GraphQLCompilationError(u'Tagging values within a @fold vertex field is ' u'not allowed! Location: {}'.format(location)) if location.field == COUNT_META_FIELD_NAME: raise AssertionError(u'Tags are prohibited within @fold, but unexpectedly found use of ' u'a tag on the {} meta field that is only allowed within a @fold!' u'Location: {}' .format(COUNT_META_FIELD_NAME, location)) # Schema validation has ensured that the fields below exist. tag_name = tag_directive.arguments[0].value.value if tag_name in context['tags']: raise GraphQLCompilationError(u'Cannot reuse tag name: {}'.format(tag_name)) validate_safe_string(tag_name) context['tags'][tag_name] = { 'location': location, 'optional': is_in_optional_scope(context), 'type': strip_non_null_from_type(current_schema_type), } context['metadata'].record_tag_info(tag_name, TagInfo(location=location)) output_directive = unique_local_directives.get('output', None) if output_directive: # Schema validation has ensured that the fields below exist. output_name = output_directive.arguments[0].value.value if output_name in context['outputs']: raise GraphQLCompilationError(u'Cannot reuse output name: ' u'{}, {}'.format(output_name, context)) validate_safe_string(output_name) validate_output_name(output_name) graphql_type = strip_non_null_from_type(current_schema_type) if is_in_fold_scope(context): # Fold outputs are only allowed at the last level of traversal. set_fold_innermost_scope(context) if location.field != COUNT_META_FIELD_NAME: graphql_type = GraphQLList(graphql_type) context['outputs'][output_name] = { 'location': location, 'optional': is_in_optional_scope(context), 'type': graphql_type, 'fold': context.get('fold', None), }
Validate and return the depth parameter of the recurse directive.
def _get_recurse_directive_depth(field_name, field_directives): """Validate and return the depth parameter of the recurse directive.""" recurse_directive = field_directives['recurse'] optional_directive = field_directives.get('optional', None) if optional_directive: raise GraphQLCompilationError(u'Found both @optional and @recurse on ' u'the same vertex field: {}'.format(field_name)) recurse_args = get_uniquely_named_objects_by_name(recurse_directive.arguments) recurse_depth = int(recurse_args['depth'].value.value) if recurse_depth < 1: raise GraphQLCompilationError(u'Found recurse directive with disallowed depth: ' u'{}'.format(recurse_depth)) return recurse_depth
Perform type checks on the enclosing type and the recursed type for a recurse directive. Args: current_schema_type: GraphQLType, the schema type at the current location field_schema_type: GraphQLType, the schema type at the inner scope context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function!
def _validate_recurse_directive_types(current_schema_type, field_schema_type, context): """Perform type checks on the enclosing type and the recursed type for a recurse directive. Args: current_schema_type: GraphQLType, the schema type at the current location field_schema_type: GraphQLType, the schema type at the inner scope context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! """ # Get the set of all allowed types in the current scope. type_hints = context['type_equivalence_hints'].get(field_schema_type) type_hints_inverse = context['type_equivalence_hints_inverse'].get(field_schema_type) allowed_current_types = {field_schema_type} if type_hints and isinstance(type_hints, GraphQLUnionType): allowed_current_types.update(type_hints.types) if type_hints_inverse and isinstance(type_hints_inverse, GraphQLUnionType): allowed_current_types.update(type_hints_inverse.types) # The current scope must be of the same type as the field scope, or an acceptable subtype. current_scope_is_allowed = current_schema_type in allowed_current_types is_implemented_interface = ( isinstance(field_schema_type, GraphQLInterfaceType) and isinstance(current_schema_type, GraphQLObjectType) and field_schema_type in current_schema_type.interfaces ) if not any((current_scope_is_allowed, is_implemented_interface)): raise GraphQLCompilationError(u'Edges expanded with a @recurse directive must either ' u'be of the same type as their enclosing scope, a supertype ' u'of the enclosing scope, or be of an interface type that is ' u'implemented by the type of their enclosing scope. ' u'Enclosing scope type: {}, edge type: ' u'{}'.format(current_schema_type, field_schema_type))
Return a list of basic blocks corresponding to the vertex AST node. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! unique_local_directives: dict, directive name string -> directive object, containing unique directives present on the current AST node *only* fields: tuple of lists (property_fields, vertex_fields), with lists of field objects present on the current vertex AST node Returns: list of basic blocks, the compiled output of the vertex AST node
def _compile_vertex_ast(schema, current_schema_type, ast, location, context, unique_local_directives, fields): """Return a list of basic blocks corresponding to the vertex AST node. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! unique_local_directives: dict, directive name string -> directive object, containing unique directives present on the current AST node *only* fields: tuple of lists (property_fields, vertex_fields), with lists of field objects present on the current vertex AST node Returns: list of basic blocks, the compiled output of the vertex AST node """ basic_blocks = [] query_metadata_table = context['metadata'] current_location_info = query_metadata_table.get_location_info(location) vertex_fields, property_fields = fields validate_vertex_directives(unique_local_directives) # step V-2: step into property fields for field_ast in property_fields: field_name = get_ast_field_name(field_ast) property_schema_type = get_field_type_from_schema(current_schema_type, field_name) inner_location = location.navigate_to_field(field_name) inner_basic_blocks = _compile_ast_node_to_ir(schema, property_schema_type, field_ast, inner_location, context) basic_blocks.extend(inner_basic_blocks) # step V-3: mark the graph position, and process output_source directive basic_blocks.append(_mark_location(location)) output_source = _process_output_source_directive(schema, current_schema_type, ast, location, context, unique_local_directives) if output_source: basic_blocks.append(output_source) # step V-4: step into vertex fields for field_ast in vertex_fields: field_name = get_ast_field_name(field_ast) validate_context_for_visiting_vertex_field(location, field_name, context) field_schema_type = get_vertex_field_type(current_schema_type, field_name) hinted_base = context['type_equivalence_hints_inverse'].get(field_schema_type, None) if hinted_base: field_schema_type = hinted_base inner_unique_directives = get_unique_directives(field_ast) validate_vertex_field_directive_interactions(location, field_name, inner_unique_directives) validate_vertex_field_directive_in_context( location, field_name, inner_unique_directives, context) recurse_directive = inner_unique_directives.get('recurse', None) optional_directive = inner_unique_directives.get('optional', None) fold_directive = inner_unique_directives.get('fold', None) in_topmost_optional_block = False edge_traversal_is_optional = optional_directive is not None edge_traversal_is_folded = fold_directive is not None edge_traversal_is_recursive = recurse_directive is not None # This is true for any vertex expanded within an @optional scope. within_optional_scope = is_in_optional_scope(context) if edge_traversal_is_optional: # Invariant: There must always be a marked location corresponding to the query position # immediately before any optional Traverse. # # This invariant is verified in the IR sanity checks module (ir_sanity_checks.py), # in the function named _sanity_check_mark_location_preceding_optional_traverse(). # # This marked location is the one that the @optional directive's corresponding # optional Backtrack will jump back to. If such a marked location isn't present, # the backtrack could rewind to an old marked location and might ignore # entire stretches of applied filtering. # # Assumption: The only way there might not be a marked location here is # if the current location already traversed into child locations, not including folds. non_fold_child_locations = { child_location for child_location in query_metadata_table.get_child_locations(location) if not isinstance(child_location, FoldScopeLocation) } if non_fold_child_locations: location = query_metadata_table.revisit_location(location) basic_blocks.append(_mark_location(location)) if fold_directive: inner_location = location.navigate_to_fold(field_name) else: inner_location = location.navigate_to_subpath(field_name) inner_location_info = LocationInfo( parent_location=location, type=strip_non_null_from_type(field_schema_type), coerced_from_type=None, optional_scopes_depth=( current_location_info.optional_scopes_depth + edge_traversal_is_optional), recursive_scopes_depth=( current_location_info.recursive_scopes_depth + edge_traversal_is_recursive), is_within_fold=(current_location_info.is_within_fold or edge_traversal_is_folded), ) query_metadata_table.register_location(inner_location, inner_location_info) if edge_traversal_is_optional: # Remember where the topmost optional context started. topmost_optional = get_optional_scope_or_none(context) if topmost_optional is None: set_optional_scope_data(context, inner_location) in_topmost_optional_block = True edge_direction, edge_name = get_edge_direction_and_name(field_name) if fold_directive: fold_block = blocks.Fold(inner_location) basic_blocks.append(fold_block) set_fold_scope_data(context, inner_location) elif recurse_directive: _validate_recurse_directive_types(current_schema_type, field_schema_type, context) recurse_depth = _get_recurse_directive_depth(field_name, inner_unique_directives) basic_blocks.append(blocks.Recurse(edge_direction, edge_name, recurse_depth, within_optional_scope=within_optional_scope)) query_metadata_table.record_recurse_info(location, RecurseInfo(edge_direction=edge_direction, edge_name=edge_name, depth=recurse_depth)) else: basic_blocks.append(blocks.Traverse(edge_direction, edge_name, optional=edge_traversal_is_optional, within_optional_scope=within_optional_scope)) inner_basic_blocks = _compile_ast_node_to_ir(schema, field_schema_type, field_ast, inner_location, context) basic_blocks.extend(inner_basic_blocks) if edge_traversal_is_folded: has_count_filter = has_fold_count_filter(context) _validate_fold_has_outputs_or_count_filter( get_context_fold_info(context), has_count_filter, context['outputs']) basic_blocks.append(blocks.Unfold()) unmark_context_fold_scope(context) if has_count_filter: unmark_fold_count_filter(context) if is_in_fold_innermost_scope(context): unmark_fold_innermost_scope(context) if in_topmost_optional_block: unmark_optional_scope(context) # If we are currently evaluating a @fold vertex, # we didn't Traverse into it, so we don't need to backtrack out either. # We also don't backtrack if we've reached an @output_source. backtracking_required = ( (not fold_directive) and (not has_encountered_output_source(context)) ) if backtracking_required: if edge_traversal_is_optional: basic_blocks.append(blocks.EndOptional()) basic_blocks.append(blocks.Backtrack(location, optional=True)) # Exiting optional block! # Revisit the location so that there is a marked location right after the optional, # so that future Backtrack blocks return after the optional set of blocks, and # don't accidentally return to a prior location instead. location = query_metadata_table.revisit_location(location) basic_blocks.append(_mark_location(location)) else: basic_blocks.append(blocks.Backtrack(location)) return basic_blocks
Ensure the @fold scope has at least one output, or filters on the size of the fold.
def _validate_fold_has_outputs_or_count_filter(fold_scope_location, fold_has_count_filter, outputs): """Ensure the @fold scope has at least one output, or filters on the size of the fold.""" # This function makes sure that the @fold scope has an effect. # Folds either output data, or filter the data enclosing the fold based on the size of the fold. if fold_has_count_filter: # This fold has a filter on the "_x_count" property, so it is legal and has an effect. return True # At least one output in the outputs list must point to the fold_scope_location, # or the scope corresponding to fold_scope_location had no @outputs and is illegal. for output in six.itervalues(outputs): if output['fold'] == fold_scope_location: return True raise GraphQLCompilationError(u'Found a @fold scope that has no effect on the query. ' u'Each @fold scope must either perform filtering, or contain at ' u'least one field marked for output. Fold location: {}' .format(fold_scope_location))
Return a list of basic blocks corresponding to the inline fragment at this AST node. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library. location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: list of basic blocks, the compiled output of the vertex AST node
def _compile_fragment_ast(schema, current_schema_type, ast, location, context): """Return a list of basic blocks corresponding to the inline fragment at this AST node. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library. location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: list of basic blocks, the compiled output of the vertex AST node """ query_metadata_table = context['metadata'] # step F-2. Emit a type coercion block if appropriate, # then recurse into the fragment's selection. coerces_to_type_name = ast.type_condition.name.value coerces_to_type_obj = schema.get_type(coerces_to_type_name) basic_blocks = [] # Check if the coercion is necessary. # No coercion is necessary if coercing to the current type of the scope, # or if the scope is of union type, to the base type of the union as defined by # the type_equivalence_hints compilation parameter. is_same_type_as_scope = current_schema_type.is_same_type(coerces_to_type_obj) equivalent_union_type = context['type_equivalence_hints'].get(coerces_to_type_obj, None) is_base_type_of_union = ( isinstance(current_schema_type, GraphQLUnionType) and current_schema_type.is_same_type(equivalent_union_type) ) if not (is_same_type_as_scope or is_base_type_of_union): # Coercion is required. query_metadata_table.record_coercion_at_location(location, coerces_to_type_obj) basic_blocks.append(blocks.CoerceType({coerces_to_type_name})) inner_basic_blocks = _compile_ast_node_to_ir( schema, coerces_to_type_obj, ast, location, context) basic_blocks.extend(inner_basic_blocks) return basic_blocks
Compile the given GraphQL AST node into a list of basic blocks. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: the current GraphQL AST node, obtained from the graphql library location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: list of basic blocks corresponding to this GraphQL AST node
def _compile_ast_node_to_ir(schema, current_schema_type, ast, location, context): """Compile the given GraphQL AST node into a list of basic blocks. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: the current GraphQL AST node, obtained from the graphql library location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: list of basic blocks corresponding to this GraphQL AST node """ basic_blocks = [] # step 0: preprocessing local_unique_directives = get_unique_directives(ast) fields = _get_fields(ast) vertex_fields, property_fields = fields fragment = _get_inline_fragment(ast) filter_operations = get_local_filter_directives( ast, current_schema_type, vertex_fields) # We don't support type coercion while at the same time selecting fields. # Either there are no fields, or there is no fragment, otherwise we raise a compilation error. fragment_exists = fragment is not None fields_exist = vertex_fields or property_fields if fragment_exists and fields_exist: raise GraphQLCompilationError(u'Cannot compile GraphQL that has inline fragment and ' u'selected fields in the same selection. Please move the ' u'selected fields inside the inline fragment.') if location.field is not None: # we're at a property field # sanity-check: cannot have an inline fragment at a property field if fragment_exists: raise AssertionError(u'Found inline fragment at a property field: ' u'{} {}'.format(location, fragment)) # sanity-check: locations at properties don't have their own property locations if len(property_fields) > 0: raise AssertionError(u'Found property fields on a property field: ' u'{} {}'.format(location, property_fields)) # step 1: apply local filter, if any for filter_operation_info in filter_operations: filter_block = process_filter_directive(filter_operation_info, location, context) if isinstance(location, FoldScopeLocation) and location.field == COUNT_META_FIELD_NAME: # Filtering on the fold count field is only allowed at the innermost scope of a fold. set_fold_innermost_scope(context) # This Filter is going in the global operations section of the query, so it cannot # use LocalField expressions since there is no "local" location to use. # Rewrite it so that all references of data at a location instead use ContextFields. expected_field = expressions.LocalField(COUNT_META_FIELD_NAME) replacement_field = expressions.FoldedContextField(location, GraphQLInt) visitor_fn = expressions.make_replacement_visitor(expected_field, replacement_field) filter_block = filter_block.visit_and_update_expressions(visitor_fn) visitor_fn = expressions.make_type_replacement_visitor( expressions.ContextField, lambda context_field: expressions.GlobalContextField( context_field.location, context_field.field_type)) filter_block = filter_block.visit_and_update_expressions(visitor_fn) set_fold_count_filter(context) context['global_filters'].append(filter_block) else: basic_blocks.append(filter_block) if location.field is not None: # The location is at a property, compile the property data following P-steps. _compile_property_ast(schema, current_schema_type, ast, location, context, local_unique_directives) else: # The location is at a vertex. if fragment_exists: # Compile the fragment data following F-steps. # N.B.: Note that the "fragment" variable is the fragment's AST. Since we've asserted # that the fragment is the only part of the selection set at the current AST node, # we pass the "fragment" in the AST parameter of the _compile_fragment_ast() # function, rather than the current AST node as in the other compilation steps. basic_blocks.extend( _compile_fragment_ast(schema, current_schema_type, fragment, location, context)) else: # Compile the vertex data following V-steps. basic_blocks.extend( _compile_vertex_ast(schema, current_schema_type, ast, location, context, local_unique_directives, fields)) return basic_blocks
Ensure all tags are used in some filter.
def _validate_all_tags_are_used(metadata): """Ensure all tags are used in some filter.""" tag_names = set([tag_name for tag_name, _ in metadata.tags]) filter_arg_names = set() for location, _ in metadata.registered_locations: for filter_info in metadata.get_filter_infos(location): for filter_arg in filter_info.args: if is_tag_argument(filter_arg): filter_arg_names.add(get_directive_argument_name(filter_arg)) unused_tags = tag_names - filter_arg_names if unused_tags: raise GraphQLCompilationError(u'This GraphQL query contains @tag directives whose values ' u'are not used: {}. This is not allowed. Please either use ' u'them in a filter or remove them entirely.' .format(unused_tags))
Compile a full GraphQL abstract syntax tree (AST) to intermediate representation. Args: schema: GraphQL schema object, obtained from the graphql library ast: the root GraphQL AST node for the query, obtained from the graphql library, and already validated against the schema for type-correctness type_equivalence_hints: optional dict of GraphQL type to equivalent GraphQL union Returns: IrAndMetadata named tuple, containing fields: - ir_blocks: a list of IR basic block objects - input_metadata: a dict of expected input parameters (string) -> inferred GraphQL type - output_metadata: a dict of output name (string) -> OutputMetadata object - location_types: a dict of location objects -> GraphQL type objects at that location - coerced_locations: a set of location objects indicating where type coercions have happened
def _compile_root_ast_to_ir(schema, ast, type_equivalence_hints=None): """Compile a full GraphQL abstract syntax tree (AST) to intermediate representation. Args: schema: GraphQL schema object, obtained from the graphql library ast: the root GraphQL AST node for the query, obtained from the graphql library, and already validated against the schema for type-correctness type_equivalence_hints: optional dict of GraphQL type to equivalent GraphQL union Returns: IrAndMetadata named tuple, containing fields: - ir_blocks: a list of IR basic block objects - input_metadata: a dict of expected input parameters (string) -> inferred GraphQL type - output_metadata: a dict of output name (string) -> OutputMetadata object - location_types: a dict of location objects -> GraphQL type objects at that location - coerced_locations: a set of location objects indicating where type coercions have happened """ if len(ast.selection_set.selections) != 1: raise GraphQLCompilationError(u'Cannot process AST with more than one root selection!') base_ast = ast.selection_set.selections[0] base_start_type = get_ast_field_name(base_ast) # This is the type at which querying starts. # Validation passed, so the base_start_type must exist as a field of the root query. current_schema_type = get_field_type_from_schema(schema.get_query_type(), base_start_type) # Construct the start location of the query and its associated metadata. location = Location((base_start_type,)) base_location_info = LocationInfo( parent_location=None, type=current_schema_type, coerced_from_type=None, optional_scopes_depth=0, recursive_scopes_depth=0, is_within_fold=False, ) query_metadata_table = QueryMetadataTable(location, base_location_info) # Default argument value is empty dict if not type_equivalence_hints: type_equivalence_hints = dict() # Construct the starting context object. context = { # 'metadata' is the QueryMetadataTable describing all the metadata collected during query # processing, including location metadata (e.g. which locations are folded or optional). 'metadata': query_metadata_table, # 'tags' is a dict containing # - location: Location where the tag was defined # - optional: boolean representing whether the tag was defined within an @optional scope # - type: GraphQLType of the tagged value 'tags': dict(), # 'global_filters' is a list that may contain Filter blocks that are generated during # query processing, but apply to the global query scope and should be appended to the # IR blocks only after the GlobalOperationsStart block has been emitted. 'global_filters': [], # 'outputs' is a dict mapping each output name to another dict which contains # - location: Location where to output from # - optional: boolean representing whether the output was defined within an @optional scope # - type: GraphQLType of the output # - fold: FoldScopeLocation object if the current output was defined within a fold scope, # and None otherwise 'outputs': dict(), # 'inputs' is a dict mapping input parameter names to their respective expected GraphQL # types, as automatically inferred by inspecting the query structure 'inputs': dict(), # 'type_equivalence_hints' is a dict mapping GraphQL types to equivalent GraphQL unions 'type_equivalence_hints': type_equivalence_hints, # 'type_equivalence_hints_inverse' is the inverse of type_equivalence_hints, # which is always invertible. 'type_equivalence_hints_inverse': invert_dict(type_equivalence_hints), } # Add the query root basic block to the output. basic_blocks = [ blocks.QueryRoot({base_start_type}) ] # Ensure the GraphQL query root doesn't immediately have a fragment (type coercion). # Instead of starting at one type and coercing to another, # users should simply start at the type to which they are coercing. immediate_fragment = _get_inline_fragment(base_ast) if immediate_fragment is not None: msg_args = { 'coerce_to': immediate_fragment.type_condition.name.value, 'type_from': base_start_type, } raise GraphQLCompilationError(u'Found inline fragment coercing to type {coerce_to}, ' u'immediately inside query root asking for type {type_from}. ' u'This is a contrived pattern -- you should simply start ' u'your query at {coerce_to}.'.format(**msg_args)) # Ensure the GraphQL query root doesn't have any vertex directives # that are disallowed on the root node. validate_root_vertex_directives(base_ast) # Compile and add the basic blocks for the query's base AST vertex. new_basic_blocks = _compile_ast_node_to_ir( schema, current_schema_type, base_ast, location, context) basic_blocks.extend(new_basic_blocks) _validate_all_tags_are_used(context['metadata']) # All operations after this point affect the global query scope, and are not related to # the "current" location in the query produced by the sequence of Traverse/Backtrack blocks. basic_blocks.append(blocks.GlobalOperationsStart()) # Add any filters that apply to the global query scope. basic_blocks.extend(context['global_filters']) # Based on the outputs context data, add an output step and construct the output metadata. outputs_context = context['outputs'] basic_blocks.append(_compile_output_step(outputs_context)) output_metadata = { name: OutputMetadata(type=value['type'], optional=value['optional']) for name, value in six.iteritems(outputs_context) } return IrAndMetadata( ir_blocks=basic_blocks, input_metadata=context['inputs'], output_metadata=output_metadata, query_metadata_table=context['metadata'])
Construct the final ConstructResult basic block that defines the output format of the query. Args: outputs: dict, output name (string) -> output data dict, specifying the location from where to get the data, and whether the data is optional (and therefore may be missing); missing optional data is replaced with 'null' Returns: a ConstructResult basic block that constructs appropriate outputs for the query
def _compile_output_step(outputs): """Construct the final ConstructResult basic block that defines the output format of the query. Args: outputs: dict, output name (string) -> output data dict, specifying the location from where to get the data, and whether the data is optional (and therefore may be missing); missing optional data is replaced with 'null' Returns: a ConstructResult basic block that constructs appropriate outputs for the query """ if not outputs: raise GraphQLCompilationError(u'No fields were selected for output! Please mark at least ' u'one field with the @output directive.') output_fields = {} for output_name, output_context in six.iteritems(outputs): location = output_context['location'] optional = output_context['optional'] graphql_type = output_context['type'] expression = None existence_check = None # pylint: disable=redefined-variable-type if isinstance(location, FoldScopeLocation): if optional: raise AssertionError(u'Unreachable state reached, optional in fold: ' u'{}'.format(output_context)) if location.field == COUNT_META_FIELD_NAME: expression = expressions.FoldCountContextField(location) else: expression = expressions.FoldedContextField(location, graphql_type) else: expression = expressions.OutputContextField(location, graphql_type) if optional: existence_check = expressions.ContextFieldExistence(location.at_vertex()) if existence_check: expression = expressions.TernaryConditional( existence_check, expression, expressions.NullLiteral) # pylint: enable=redefined-variable-type output_fields[output_name] = expression return blocks.ConstructResult(output_fields)
Validate the supplied graphql schema and ast. This method wraps around graphql-core's validation to enforce a stricter requirement of the schema -- all directives supported by the compiler must be declared by the schema, regardless of whether each directive is used in the query or not. Args: schema: GraphQL schema object, created using the GraphQL library ast: abstract syntax tree representation of a graphql query Returns: list containing schema and/or query validation errors
def _validate_schema_and_ast(schema, ast): """Validate the supplied graphql schema and ast. This method wraps around graphql-core's validation to enforce a stricter requirement of the schema -- all directives supported by the compiler must be declared by the schema, regardless of whether each directive is used in the query or not. Args: schema: GraphQL schema object, created using the GraphQL library ast: abstract syntax tree representation of a graphql query Returns: list containing schema and/or query validation errors """ core_graphql_errors = validate(schema, ast) # The following directives appear in the core-graphql library, but are not supported by the # graphql compiler. unsupported_default_directives = frozenset([ frozenset([ 'include', frozenset(['FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT']), frozenset(['if']) ]), frozenset([ 'skip', frozenset(['FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT']), frozenset(['if']) ]), frozenset([ 'deprecated', frozenset(['ENUM_VALUE', 'FIELD_DEFINITION']), frozenset(['reason']) ]) ]) # Directives expected by the graphql compiler. expected_directives = { frozenset([ directive.name, frozenset(directive.locations), frozenset(six.viewkeys(directive.args)) ]) for directive in DIRECTIVES } # Directives provided in the parsed graphql schema. actual_directives = { frozenset([ directive.name, frozenset(directive.locations), frozenset(six.viewkeys(directive.args)) ]) for directive in schema.get_directives() } # Directives missing from the actual directives provided. missing_directives = expected_directives - actual_directives if missing_directives: missing_message = (u'The following directives were missing from the ' u'provided schema: {}'.format(missing_directives)) core_graphql_errors.append(missing_message) # Directives that are not specified by the core graphql library. Note that Graphql-core # automatically injects default directives into the schema, regardless of whether # the schema supports said directives. Hence, while the directives contained in # unsupported_default_directives are incompatible with the graphql-compiler, we allow them to # be present in the parsed schema string. extra_directives = actual_directives - expected_directives - unsupported_default_directives if extra_directives: extra_message = (u'The following directives were supplied in the given schema, but are not ' u'not supported by the GraphQL compiler: {}'.format(extra_directives)) core_graphql_errors.append(extra_message) return core_graphql_errors
Convert the given GraphQL string into compiler IR, using the given schema object. Args: schema: GraphQL schema object, created using the GraphQL library graphql_string: string containing the GraphQL to compile to compiler IR type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: IrAndMetadata named tuple, containing fields: - ir_blocks: a list of IR basic block objects - input_metadata: a dict of expected input parameters (string) -> inferred GraphQL type - output_metadata: a dict of output name (string) -> OutputMetadata object - query_metadata_table: a QueryMetadataTable object containing location metadata Raises flavors of GraphQLError in the following cases: - if the query is invalid GraphQL (GraphQLParsingError); - if the query doesn't match the schema (GraphQLValidationError); - if the query has more than one definition block (GraphQLValidationError); - if the query has more than one selection in the root object (GraphQLCompilationError); - if the query does not obey directive usage rules (GraphQLCompilationError); - if the query provides invalid / disallowed / wrong number of arguments for a directive (GraphQLCompilationError). In the case of implementation bugs, could also raise ValueError, TypeError, or AssertionError.
def graphql_to_ir(schema, graphql_string, type_equivalence_hints=None): """Convert the given GraphQL string into compiler IR, using the given schema object. Args: schema: GraphQL schema object, created using the GraphQL library graphql_string: string containing the GraphQL to compile to compiler IR type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: IrAndMetadata named tuple, containing fields: - ir_blocks: a list of IR basic block objects - input_metadata: a dict of expected input parameters (string) -> inferred GraphQL type - output_metadata: a dict of output name (string) -> OutputMetadata object - query_metadata_table: a QueryMetadataTable object containing location metadata Raises flavors of GraphQLError in the following cases: - if the query is invalid GraphQL (GraphQLParsingError); - if the query doesn't match the schema (GraphQLValidationError); - if the query has more than one definition block (GraphQLValidationError); - if the query has more than one selection in the root object (GraphQLCompilationError); - if the query does not obey directive usage rules (GraphQLCompilationError); - if the query provides invalid / disallowed / wrong number of arguments for a directive (GraphQLCompilationError). In the case of implementation bugs, could also raise ValueError, TypeError, or AssertionError. """ graphql_string = _preprocess_graphql_string(graphql_string) try: ast = parse(graphql_string) except GraphQLSyntaxError as e: raise GraphQLParsingError(e) validation_errors = _validate_schema_and_ast(schema, ast) if validation_errors: raise GraphQLValidationError(u'String does not validate: {}'.format(validation_errors)) if len(ast.definitions) != 1: raise AssertionError(u'Unsupported graphql string with multiple definitions, should have ' u'been caught in validation: \n{}\n{}'.format(graphql_string, ast)) base_ast = ast.definitions[0] return _compile_root_ast_to_ir(schema, base_ast, type_equivalence_hints=type_equivalence_hints)
Return a human-readable representation of a gremlin command string.
def pretty_print_gremlin(gremlin): """Return a human-readable representation of a gremlin command string.""" gremlin = remove_custom_formatting(gremlin) too_many_parts = re.split(r'([)}]|scatter)[ ]?\.', gremlin) # Put the ) and } back on. parts = [ too_many_parts[i] + too_many_parts[i + 1] for i in six.moves.xrange(0, len(too_many_parts) - 1, 2) ] parts.append(too_many_parts[-1]) # Put the . back on. for i in six.moves.xrange(1, len(parts)): parts[i] = '.' + parts[i] indentation = 0 indentation_increment = 4 output = [] for current_part in parts: if any([current_part.startswith('.out'), current_part.startswith('.in'), current_part.startswith('.ifThenElse')]): indentation += indentation_increment elif current_part.startswith('.back') or current_part.startswith('.optional'): indentation -= indentation_increment if indentation < 0: raise AssertionError(u'Indentation became negative: {}'.format(indentation)) output.append((' ' * indentation) + current_part) return '\n'.join(output).strip()
Return a human-readable representation of a parameterized MATCH query string.
def pretty_print_match(match, parameterized=True): """Return a human-readable representation of a parameterized MATCH query string.""" left_curly = '{{' if parameterized else '{' right_curly = '}}' if parameterized else '}' match = remove_custom_formatting(match) parts = re.split('({}|{})'.format(left_curly, right_curly), match) inside_braces = False indent_size = 4 indent = ' ' * indent_size output = [parts[0]] for current_index, current_part in enumerate(parts[1:]): if current_part == left_curly: if inside_braces: raise AssertionError(u'Found open-braces pair while already inside braces: ' u'{} {} {}'.format(current_index, parts, match)) inside_braces = True output.append(current_part + '\n') elif current_part == right_curly: if not inside_braces: raise AssertionError(u'Found close-braces pair while not inside braces: ' u'{} {} {}'.format(current_index, parts, match)) inside_braces = False output.append(current_part) else: if not inside_braces: stripped_part = current_part.lstrip() if stripped_part.startswith('.'): # Strip whitespace before traversal steps. output.append(stripped_part) else: # Do not strip whitespace before e.g. the RETURN keyword. output.append(current_part) else: # Split out the keywords, initially getting rid of commas. separate_keywords = re.split(', ([a-z]+:)', current_part) # The first item in the separated list is the full first "keyword: value" pair. # For every subsequent item, the keyword and value are separated; join them # back together, outputting the comma, newline and indentation before them. output.append(indent + separate_keywords[0].lstrip()) for i in six.moves.xrange(1, len(separate_keywords) - 1, 2): output.append(',\n{indent}{keyword} {value}'.format( keyword=separate_keywords[i].strip(), value=separate_keywords[i + 1].strip(), indent=indent)) output.append('\n') return ''.join(output).strip()
Represent a float as a string without losing precision.
def represent_float_as_str(value): """Represent a float as a string without losing precision.""" # In Python 2, calling str() on a float object loses precision: # # In [1]: 1.23456789012345678 # Out[1]: 1.2345678901234567 # # In [2]: 1.2345678901234567 # Out[2]: 1.2345678901234567 # # In [3]: str(1.2345678901234567) # Out[3]: '1.23456789012' # # The best way to ensure precision is not lost is to convert to string via Decimal: # https://github.com/mogui/pyorient/pull/226/files if not isinstance(value, float): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-float as a float: ' u'{}'.format(value)) with decimal.localcontext() as ctx: ctx.prec = 20 # floats are max 80-bits wide = 20 significant digits return u'{:f}'.format(decimal.Decimal(value))
Type-check the value, and then just return str(value).
def type_check_and_str(python_type, value): """Type-check the value, and then just return str(value).""" if not isinstance(value, python_type): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-{type} as a {type}: ' u'{value}'.format(type=python_type, value=value)) return str(value)
Attempt to coerce the value to a Decimal, or raise an error if unable to do so.
def coerce_to_decimal(value): """Attempt to coerce the value to a Decimal, or raise an error if unable to do so.""" if isinstance(value, decimal.Decimal): return value else: try: return decimal.Decimal(value) except decimal.InvalidOperation as e: raise GraphQLInvalidArgumentError(e)
Return a visitor function that replaces every instance of one expression with another one.
def make_replacement_visitor(find_expression, replace_expression): """Return a visitor function that replaces every instance of one expression with another one.""" def visitor_fn(expression): """Return the replacement if this expression matches the expression we're looking for.""" if expression == find_expression: return replace_expression else: return expression return visitor_fn
Return a visitor function that replaces expressions of a given type with new expressions.
def make_type_replacement_visitor(find_types, replacement_func): """Return a visitor function that replaces expressions of a given type with new expressions.""" def visitor_fn(expression): """Return a replacement expression if the original expression is of the correct type.""" if isinstance(expression, find_types): return replacement_func(expression) else: return expression return visitor_fn
Ensure the named operator is valid and supported.
def _validate_operator_name(operator, supported_operators): """Ensure the named operator is valid and supported.""" if not isinstance(operator, six.text_type): raise TypeError(u'Expected operator as unicode string, got: {} {}'.format( type(operator).__name__, operator)) if operator not in supported_operators: raise GraphQLCompilationError(u'Unrecognized operator: {}'.format(operator))
Validate that the Literal is correctly representable.
def validate(self): """Validate that the Literal is correctly representable.""" # Literals representing boolean values or None are correctly representable and supported. if self.value is None or self.value is True or self.value is False: return # Literal safe strings are correctly representable and supported. if isinstance(self.value, six.string_types): validate_safe_string(self.value) return # Literal ints are correctly representable and supported. if isinstance(self.value, int): return # Literal empty lists, and non-empty lists of safe strings, are # correctly representable and supported. if isinstance(self.value, list): if len(self.value) > 0: for x in self.value: validate_safe_string(x) return raise GraphQLCompilationError(u'Cannot represent literal: {}'.format(self.value))
Return a unicode object with the Gremlin/MATCH representation of this Literal.
def _to_output_code(self): """Return a unicode object with the Gremlin/MATCH representation of this Literal.""" # All supported Literal objects serialize to identical strings both in Gremlin and MATCH. self.validate() if self.value is None: return u'null' elif self.value is True: return u'true' elif self.value is False: return u'false' elif isinstance(self.value, six.string_types): return safe_quoted_string(self.value) elif isinstance(self.value, int): return six.text_type(self.value) elif isinstance(self.value, list): if len(self.value) == 0: return '[]' elif all(isinstance(x, six.string_types) for x in self.value): list_contents = ', '.join(safe_quoted_string(x) for x in sorted(self.value)) return '[' + list_contents + ']' else: pass # Fall through to assertion error below. raise AssertionError(u'Unreachable state reached: {}'.format(self))
Validate that the Variable is correctly representable.
def validate(self): """Validate that the Variable is correctly representable.""" # Get the first letter, or empty string if it doesn't exist. if not self.variable_name.startswith(u'$'): raise GraphQLCompilationError(u'Expected variable name to start with $, but was: ' u'{}'.format(self.variable_name)) if self.variable_name in RESERVED_MATCH_KEYWORDS: raise GraphQLCompilationError(u'Cannot use reserved MATCH keyword {} as variable ' u'name!'.format(self.variable_name)) validate_safe_string(self.variable_name[1:]) if not is_graphql_type(self.inferred_type): raise ValueError(u'Invalid value of "inferred_type": {}'.format(self.inferred_type)) if isinstance(self.inferred_type, GraphQLNonNull): raise ValueError(u'GraphQL non-null types are not supported as "inferred_type": ' u'{}'.format(self.inferred_type)) if isinstance(self.inferred_type, GraphQLList): inner_type = strip_non_null_from_type(self.inferred_type.of_type) if GraphQLDate.is_same_type(inner_type) or GraphQLDateTime.is_same_type(inner_type): # This is a compilation error rather than a ValueError as # it can be caused by an invalid GraphQL query on an otherwise valid schema. # In other words, it's an error in writing the GraphQL query, rather than # a programming error within the library. raise GraphQLCompilationError( u'Lists of Date or DateTime cannot currently be represented as ' u'Variable objects: {}'.format(self.inferred_type))
Return a unicode object with the MATCH representation of this Variable.
def to_match(self): """Return a unicode object with the MATCH representation of this Variable.""" self.validate() # We don't want the dollar sign as part of the variable name. variable_with_no_dollar_sign = self.variable_name[1:] match_variable_name = '{%s}' % (six.text_type(variable_with_no_dollar_sign),) # We can't directly pass a Date or DateTime object, so we have to pass it as a string # and then parse it inline. For date format parameter meanings, see: # http://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html # For the semantics of the date() OrientDB SQL function, see: # http://orientdb.com/docs/last/SQL-Functions.html#date if GraphQLDate.is_same_type(self.inferred_type): return u'date(%s, "%s")' % (match_variable_name, STANDARD_DATE_FORMAT) elif GraphQLDateTime.is_same_type(self.inferred_type): return u'date(%s, "%s")' % (match_variable_name, STANDARD_DATETIME_FORMAT) else: return match_variable_name
Return a unicode object with the Gremlin representation of this expression.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this expression.""" # We can't directly pass a Date or a DateTime object, so we have to pass it as a string # and then parse it inline. For date format parameter meanings, see: # http://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html if GraphQLDate.is_same_type(self.inferred_type): return u'Date.parse("{}", {})'.format(STANDARD_DATE_FORMAT, self.variable_name) elif GraphQLDateTime.is_same_type(self.inferred_type): return u'Date.parse("{}", {})'.format(STANDARD_DATETIME_FORMAT, self.variable_name) else: return six.text_type(self.variable_name)
Return a unicode object with the Gremlin representation of this expression.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this expression.""" self.validate() local_object_name = self.get_local_object_gremlin_name() if self.field_name == '@this': return local_object_name if '@' in self.field_name: return u'{}[\'{}\']'.format(local_object_name, self.field_name) else: return u'{}.{}'.format(local_object_name, self.field_name)
Validate that the GlobalContextField is correctly representable.
def validate(self): """Validate that the GlobalContextField is correctly representable.""" if not isinstance(self.location, Location): raise TypeError(u'Expected Location location, got: {} {}' .format(type(self.location).__name__, self.location)) if self.location.field is None: raise AssertionError(u'Received Location without a field: {}' .format(self.location)) if not is_graphql_type(self.field_type): raise ValueError(u'Invalid value of "field_type": {}'.format(self.field_type))
Return a unicode object with the MATCH representation of this GlobalContextField.
def to_match(self): """Return a unicode object with the MATCH representation of this GlobalContextField.""" self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) validate_safe_string(field_name) return u'%s.%s' % (mark_name, field_name)
Return a unicode object with the MATCH representation of this ContextField.
def to_match(self): """Return a unicode object with the MATCH representation of this ContextField.""" self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) if field_name is None: return u'$matched.%s' % (mark_name,) else: validate_safe_string(field_name) return u'$matched.%s.%s' % (mark_name, field_name)
Return a unicode object with the Gremlin representation of this expression.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this expression.""" self.validate() mark_name, field_name = self.location.get_location_name() if field_name is not None: validate_safe_string(field_name) if '@' in field_name: template = u'm.{mark_name}[\'{field_name}\']' else: template = u'm.{mark_name}.{field_name}' else: template = u'm.{mark_name}' validate_safe_string(mark_name) return template.format(mark_name=mark_name, field_name=field_name)
Validate that the OutputContextField is correctly representable.
def validate(self): """Validate that the OutputContextField is correctly representable.""" if not isinstance(self.location, Location): raise TypeError(u'Expected Location location, got: {} {}'.format( type(self.location).__name__, self.location)) if not self.location.field: raise ValueError(u'Expected Location object that points to a field, got: ' u'{}'.format(self.location)) if not is_graphql_type(self.field_type): raise ValueError(u'Invalid value of "field_type": {}'.format(self.field_type)) stripped_field_type = strip_non_null_from_type(self.field_type) if isinstance(stripped_field_type, GraphQLList): inner_type = strip_non_null_from_type(stripped_field_type.of_type) if GraphQLDate.is_same_type(inner_type) or GraphQLDateTime.is_same_type(inner_type): # This is a compilation error rather than a ValueError as # it can be caused by an invalid GraphQL query on an otherwise valid schema. # In other words, it's an error in writing the GraphQL query, rather than # a programming error within the library. raise GraphQLCompilationError( u'Lists of Date or DateTime cannot currently be represented as ' u'OutputContextField objects: {}'.format(self.field_type))
Return a unicode object with the MATCH representation of this expression.
def to_match(self): """Return a unicode object with the MATCH representation of this expression.""" self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) validate_safe_string(field_name) stripped_field_type = strip_non_null_from_type(self.field_type) if GraphQLDate.is_same_type(stripped_field_type): return u'%s.%s.format("%s")' % (mark_name, field_name, STANDARD_DATE_FORMAT) elif GraphQLDateTime.is_same_type(stripped_field_type): return u'%s.%s.format("%s")' % (mark_name, field_name, STANDARD_DATETIME_FORMAT) else: return u'%s.%s' % (mark_name, field_name)
Return a unicode object with the Gremlin representation of this expression.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this expression.""" self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) validate_safe_string(field_name) if '@' in field_name: template = u'm.{mark_name}[\'{field_name}\']' else: template = u'm.{mark_name}.{field_name}' format_value = None stripped_field_type = strip_non_null_from_type(self.field_type) if GraphQLDate.is_same_type(stripped_field_type): template += '.format("{format}")' format_value = STANDARD_DATE_FORMAT elif GraphQLDateTime.is_same_type(stripped_field_type): template += '.format("{format}")' format_value = STANDARD_DATETIME_FORMAT return template.format(mark_name=mark_name, field_name=field_name, format=format_value)
Validate that the FoldedContextField is correctly representable.
def validate(self): """Validate that the FoldedContextField is correctly representable.""" if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format( type(self.fold_scope_location), self.fold_scope_location)) if self.fold_scope_location.field is None: raise ValueError(u'Expected FoldScopeLocation at a field, but got: {}' .format(self.fold_scope_location)) if self.fold_scope_location.field == COUNT_META_FIELD_NAME: if not GraphQLInt.is_same_type(self.field_type): raise TypeError(u'Expected the _x_count meta-field to be of GraphQLInt type, but ' u'encountered type {} instead: {}' .format(self.field_type, self.fold_scope_location)) else: if not isinstance(self.field_type, GraphQLList): raise ValueError(u'Invalid value of "field_type" for a field that is not ' u'a meta-field, expected a list type but got: {} {}' .format(self.field_type, self.fold_scope_location)) inner_type = strip_non_null_from_type(self.field_type.of_type) if isinstance(inner_type, GraphQLList): raise GraphQLCompilationError( u'Outputting list-valued fields in a @fold context is currently not supported: ' u'{} {}'.format(self.fold_scope_location, self.field_type.of_type))
Return a unicode object with the MATCH representation of this expression.
def to_match(self): """Return a unicode object with the MATCH representation of this expression.""" self.validate() mark_name, field_name = self.fold_scope_location.get_location_name() validate_safe_string(mark_name) template = u'$%(mark_name)s.%(field_name)s' template_data = { 'mark_name': mark_name, } if field_name == COUNT_META_FIELD_NAME: template_data['field_name'] = 'size()' else: inner_type = strip_non_null_from_type(self.field_type.of_type) if GraphQLDate.is_same_type(inner_type): # Known OrientDB bug may cause trouble here, and incorrect data may be returned: # https://github.com/orientechnologies/orientdb/issues/7289 template += '.format("' + STANDARD_DATE_FORMAT + '")' elif GraphQLDateTime.is_same_type(inner_type): # Known OrientDB bug may cause trouble here, and incorrect data may be returned: # https://github.com/orientechnologies/orientdb/issues/7289 template += '.format("' + STANDARD_DATETIME_FORMAT + '")' template_data['field_name'] = field_name return template % template_data
Validate that the FoldCountContextField is correctly representable.
def validate(self): """Validate that the FoldCountContextField is correctly representable.""" if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format( type(self.fold_scope_location), self.fold_scope_location)) if self.fold_scope_location.field != COUNT_META_FIELD_NAME: raise AssertionError(u'Unexpected field in the FoldScopeLocation of this ' u'FoldCountContextField object: {} {}' .format(self.fold_scope_location, self))
Return a unicode object with the MATCH representation of this expression.
def to_match(self): """Return a unicode object with the MATCH representation of this expression.""" self.validate() mark_name, _ = self.fold_scope_location.get_location_name() validate_safe_string(mark_name) template = u'$%(mark_name)s.size()' template_data = { 'mark_name': mark_name, } return template % template_data
Validate that the ContextFieldExistence is correctly representable.
def validate(self): """Validate that the ContextFieldExistence is correctly representable.""" if not isinstance(self.location, Location): raise TypeError(u'Expected Location location, got: {} {}'.format( type(self.location).__name__, self.location)) if self.location.field: raise ValueError(u'Expected location to point to a vertex, ' u'but found a field: {}'.format(self.location))
Validate that the UnaryTransformation is correctly representable.
def validate(self): """Validate that the UnaryTransformation is correctly representable.""" _validate_operator_name(self.operator, UnaryTransformation.SUPPORTED_OPERATORS) if not isinstance(self.inner_expression, Expression): raise TypeError(u'Expected Expression inner_expression, got {} {}'.format( type(self.inner_expression).__name__, self.inner_expression))
Create an updated version (if needed) of UnaryTransformation via the visitor pattern.
def visit_and_update(self, visitor_fn): """Create an updated version (if needed) of UnaryTransformation via the visitor pattern.""" new_inner = self.inner_expression.visit_and_update(visitor_fn) if new_inner is not self.inner_expression: return visitor_fn(UnaryTransformation(self.operator, new_inner)) else: return visitor_fn(self)
Return a unicode object with the MATCH representation of this UnaryTransformation.
def to_match(self): """Return a unicode object with the MATCH representation of this UnaryTransformation.""" self.validate() translation_table = { u'size': u'size()', } match_operator = translation_table.get(self.operator) if not match_operator: raise AssertionError(u'Unrecognized operator used: ' u'{} {}'.format(self.operator, self)) template = u'%(inner)s.%(operator)s' args = { 'inner': self.inner_expression.to_match(), 'operator': match_operator, } return template % args
Return a unicode object with the Gremlin representation of this expression.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this expression.""" translation_table = { u'size': u'count()', } gremlin_operator = translation_table.get(self.operator) if not gremlin_operator: raise AssertionError(u'Unrecognized operator used: ' u'{} {}'.format(self.operator, self)) template = u'{inner}.{operator}' args = { 'inner': self.inner_expression.to_gremlin(), 'operator': gremlin_operator, } return template.format(**args)
Validate that the BinaryComposition is correctly representable.
def validate(self): """Validate that the BinaryComposition is correctly representable.""" _validate_operator_name(self.operator, BinaryComposition.SUPPORTED_OPERATORS) if not isinstance(self.left, Expression): raise TypeError(u'Expected Expression left, got: {} {} {}'.format( type(self.left).__name__, self.left, self)) if not isinstance(self.right, Expression): raise TypeError(u'Expected Expression right, got: {} {}'.format( type(self.right).__name__, self.right))
Create an updated version (if needed) of BinaryComposition via the visitor pattern.
def visit_and_update(self, visitor_fn): """Create an updated version (if needed) of BinaryComposition via the visitor pattern.""" new_left = self.left.visit_and_update(visitor_fn) new_right = self.right.visit_and_update(visitor_fn) if new_left is not self.left or new_right is not self.right: return visitor_fn(BinaryComposition(self.operator, new_left, new_right)) else: return visitor_fn(self)
Return a unicode object with the MATCH representation of this BinaryComposition.
def to_match(self): """Return a unicode object with the MATCH representation of this BinaryComposition.""" self.validate() # The MATCH versions of some operators require an inverted order of arguments. # pylint: disable=unused-variable regular_operator_format = '(%(left)s %(operator)s %(right)s)' inverted_operator_format = '(%(right)s %(operator)s %(left)s)' # noqa intersects_operator_format = '(%(operator)s(%(left)s, %(right)s).asList().size() > 0)' # pylint: enable=unused-variable # Null literals use 'is/is not' as (in)equality operators, while other values use '=/<>'. if any((isinstance(self.left, Literal) and self.left.value is None, isinstance(self.right, Literal) and self.right.value is None)): translation_table = { u'=': (u'IS', regular_operator_format), u'!=': (u'IS NOT', regular_operator_format), } else: translation_table = { u'=': (u'=', regular_operator_format), u'!=': (u'<>', regular_operator_format), u'>=': (u'>=', regular_operator_format), u'<=': (u'<=', regular_operator_format), u'>': (u'>', regular_operator_format), u'<': (u'<', regular_operator_format), u'+': (u'+', regular_operator_format), u'||': (u'OR', regular_operator_format), u'&&': (u'AND', regular_operator_format), u'contains': (u'CONTAINS', regular_operator_format), u'intersects': (u'intersect', intersects_operator_format), u'has_substring': (None, None), # must be lowered into compatible form using LIKE # MATCH-specific operators u'LIKE': (u'LIKE', regular_operator_format), u'INSTANCEOF': (u'INSTANCEOF', regular_operator_format), } match_operator, format_spec = translation_table.get(self.operator, (None, None)) if not match_operator: raise AssertionError(u'Unrecognized operator used: ' u'{} {}'.format(self.operator, self)) return format_spec % dict(operator=match_operator, left=self.left.to_match(), right=self.right.to_match())
Return a unicode object with the Gremlin representation of this expression.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this expression.""" self.validate() immediate_operator_format = u'({left} {operator} {right})' dotted_operator_format = u'{left}.{operator}({right})' intersects_operator_format = u'(!{left}.{operator}({right}).empty)' translation_table = { u'=': (u'==', immediate_operator_format), u'!=': (u'!=', immediate_operator_format), u'>=': (u'>=', immediate_operator_format), u'<=': (u'<=', immediate_operator_format), u'>': (u'>', immediate_operator_format), u'<': (u'<', immediate_operator_format), u'+': (u'+', immediate_operator_format), u'||': (u'||', immediate_operator_format), u'&&': (u'&&', immediate_operator_format), u'contains': (u'contains', dotted_operator_format), u'intersects': (u'intersect', intersects_operator_format), u'has_substring': (u'contains', dotted_operator_format), } gremlin_operator, format_spec = translation_table.get(self.operator, (None, None)) if not gremlin_operator: raise AssertionError(u'Unrecognized operator used: ' u'{} {}'.format(self.operator, self)) return format_spec.format(operator=gremlin_operator, left=self.left.to_gremlin(), right=self.right.to_gremlin())
Validate that the TernaryConditional is correctly representable.
def validate(self): """Validate that the TernaryConditional is correctly representable.""" if not isinstance(self.predicate, Expression): raise TypeError(u'Expected Expression predicate, got: {} {}'.format( type(self.predicate).__name__, self.predicate)) if not isinstance(self.if_true, Expression): raise TypeError(u'Expected Expression if_true, got: {} {}'.format( type(self.if_true).__name__, self.if_true)) if not isinstance(self.if_false, Expression): raise TypeError(u'Expected Expression if_false, got: {} {}'.format( type(self.if_false).__name__, self.if_false))
Create an updated version (if needed) of TernaryConditional via the visitor pattern.
def visit_and_update(self, visitor_fn): """Create an updated version (if needed) of TernaryConditional via the visitor pattern.""" new_predicate = self.predicate.visit_and_update(visitor_fn) new_if_true = self.if_true.visit_and_update(visitor_fn) new_if_false = self.if_false.visit_and_update(visitor_fn) if any((new_predicate is not self.predicate, new_if_true is not self.if_true, new_if_false is not self.if_false)): return visitor_fn(TernaryConditional(new_predicate, new_if_true, new_if_false)) else: return visitor_fn(self)
Return a unicode object with the MATCH representation of this TernaryConditional.
def to_match(self): """Return a unicode object with the MATCH representation of this TernaryConditional.""" self.validate() # For MATCH, an additional validation step is needed -- we currently do not support # emitting MATCH code for TernaryConditional that contains another TernaryConditional # anywhere within the predicate expression. This is because the predicate expression # must be surrounded in quotes, and it is unclear whether nested/escaped quotes would work. def visitor_fn(expression): """Visitor function that ensures the predicate does not contain TernaryConditionals.""" if isinstance(expression, TernaryConditional): raise ValueError(u'Cannot emit MATCH code for TernaryConditional that contains ' u'in its predicate another TernaryConditional: ' u'{} {}'.format(expression, self)) return expression self.predicate.visit_and_update(visitor_fn) format_spec = u'if(eval("%(predicate)s"), %(if_true)s, %(if_false)s)' predicate_string = self.predicate.to_match() if u'"' in predicate_string: raise AssertionError(u'Found a double-quote within the predicate string, this would ' u'have terminated the if(eval()) early and should be fixed: ' u'{} {}'.format(predicate_string, self)) return format_spec % dict(predicate=predicate_string, if_true=self.if_true.to_match(), if_false=self.if_false.to_match())
Return a unicode object with the Gremlin representation of this expression.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this expression.""" self.validate() return u'({predicate} ? {if_true} : {if_false})'.format( predicate=self.predicate.to_gremlin(), if_true=self.if_true.to_gremlin(), if_false=self.if_false.to_gremlin())
Assert that IR blocks originating from the frontend do not have nonsensical structure. Args: ir_blocks: list of BasicBlocks representing the IR to sanity-check Raises: AssertionError, if the IR has unexpected structure. If the IR produced by the front-end cannot be successfully and correctly used to generate MATCH or Gremlin due to a bug, this is the method that should catch the problem.
def sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table): """Assert that IR blocks originating from the frontend do not have nonsensical structure. Args: ir_blocks: list of BasicBlocks representing the IR to sanity-check Raises: AssertionError, if the IR has unexpected structure. If the IR produced by the front-end cannot be successfully and correctly used to generate MATCH or Gremlin due to a bug, this is the method that should catch the problem. """ if not ir_blocks: raise AssertionError(u'Received no ir_blocks: {}'.format(ir_blocks)) _sanity_check_fold_scope_locations_are_unique(ir_blocks) _sanity_check_no_nested_folds(ir_blocks) _sanity_check_query_root_block(ir_blocks) _sanity_check_output_source_follower_blocks(ir_blocks) _sanity_check_block_pairwise_constraints(ir_blocks) _sanity_check_mark_location_preceding_optional_traverse(ir_blocks) _sanity_check_every_location_is_marked(ir_blocks) _sanity_check_coerce_type_outside_of_fold(ir_blocks) _sanity_check_all_marked_locations_are_registered(ir_blocks, query_metadata_table) _sanity_check_registered_locations_parent_locations(query_metadata_table)
Assert that all registered locations' parent locations are also registered.
def _sanity_check_registered_locations_parent_locations(query_metadata_table): """Assert that all registered locations' parent locations are also registered.""" for location, location_info in query_metadata_table.registered_locations: if (location != query_metadata_table.root_location and not query_metadata_table.root_location.is_revisited_at(location)): # If the location is not the root location and is not a revisit of the root, # then it must have a parent location. if location_info.parent_location is None: raise AssertionError(u'Found a location that is not the root location of the query ' u'or a revisit of the root, but does not have a parent: ' u'{} {}'.format(location, location_info)) if location_info.parent_location is not None: # Make sure the parent_location is also registered. # If the location is not registered, the following line will raise an error. query_metadata_table.get_location_info(location_info.parent_location)
Assert that all locations in MarkLocation blocks have registered and valid metadata.
def _sanity_check_all_marked_locations_are_registered(ir_blocks, query_metadata_table): """Assert that all locations in MarkLocation blocks have registered and valid metadata.""" # Grab all the registered locations, then make sure that: # - Any location that appears in a MarkLocation block is also registered. # - There are no registered locations that do not appear in a MarkLocation block. registered_locations = { location for location, _ in query_metadata_table.registered_locations } ir_encountered_locations = { block.location for block in ir_blocks if isinstance(block, MarkLocation) } unregistered_locations = ir_encountered_locations - registered_locations unencountered_locations = registered_locations - ir_encountered_locations if unregistered_locations: raise AssertionError(u'IR blocks unexpectedly contain locations not registered in the ' u'QueryMetadataTable: {}'.format(unregistered_locations)) if unencountered_locations: raise AssertionError(u'QueryMetadataTable unexpectedly contains registered locations that ' u'never appear in the IR blocks: {}'.format(unencountered_locations))
Assert that every FoldScopeLocation that exists on a Fold block is unique.
def _sanity_check_fold_scope_locations_are_unique(ir_blocks): """Assert that every FoldScopeLocation that exists on a Fold block is unique.""" observed_locations = dict() for block in ir_blocks: if isinstance(block, Fold): alternate = observed_locations.get(block.fold_scope_location, None) if alternate is not None: raise AssertionError(u'Found two Fold blocks with identical FoldScopeLocations: ' u'{} {} {}'.format(alternate, block, ir_blocks)) observed_locations[block.fold_scope_location] = block
Assert that there are no nested Fold contexts, and that every Fold has a matching Unfold.
def _sanity_check_no_nested_folds(ir_blocks): """Assert that there are no nested Fold contexts, and that every Fold has a matching Unfold.""" fold_seen = False for block in ir_blocks: if isinstance(block, Fold): if fold_seen: raise AssertionError(u'Found a nested Fold contexts: {}'.format(ir_blocks)) else: fold_seen = True elif isinstance(block, Unfold): if not fold_seen: raise AssertionError(u'Found an Unfold block without a matching Fold: ' u'{}'.format(ir_blocks)) else: fold_seen = False
Assert that QueryRoot is always the first block, and only the first block.
def _sanity_check_query_root_block(ir_blocks): """Assert that QueryRoot is always the first block, and only the first block.""" if not isinstance(ir_blocks[0], QueryRoot): raise AssertionError(u'The first block was not QueryRoot: {}'.format(ir_blocks)) for block in ir_blocks[1:]: if isinstance(block, QueryRoot): raise AssertionError(u'Found QueryRoot after the first block: {}'.format(ir_blocks))
Assert that ConstructResult is always the last block, and only the last block.
def _sanity_check_construct_result_block(ir_blocks): """Assert that ConstructResult is always the last block, and only the last block.""" if not isinstance(ir_blocks[-1], ConstructResult): raise AssertionError(u'The last block was not ConstructResult: {}'.format(ir_blocks)) for block in ir_blocks[:-1]: if isinstance(block, ConstructResult): raise AssertionError(u'Found ConstructResult before the last block: ' u'{}'.format(ir_blocks))
Ensure there are no Traverse / Backtrack / Recurse blocks after an OutputSource block.
def _sanity_check_output_source_follower_blocks(ir_blocks): """Ensure there are no Traverse / Backtrack / Recurse blocks after an OutputSource block.""" seen_output_source = False for block in ir_blocks: if isinstance(block, OutputSource): seen_output_source = True elif seen_output_source: if isinstance(block, (Backtrack, Traverse, Recurse)): raise AssertionError(u'Found Backtrack / Traverse / Recurse ' u'after OutputSource block: ' u'{}'.format(ir_blocks))
Assert that adjacent blocks obey all invariants.
def _sanity_check_block_pairwise_constraints(ir_blocks): """Assert that adjacent blocks obey all invariants.""" for first_block, second_block in pairwise(ir_blocks): # Always Filter before MarkLocation, never after. if isinstance(first_block, MarkLocation) and isinstance(second_block, Filter): raise AssertionError(u'Found Filter after MarkLocation block: {}'.format(ir_blocks)) # There's no point in marking the same location twice in a row. if isinstance(first_block, MarkLocation) and isinstance(second_block, MarkLocation): raise AssertionError(u'Found consecutive MarkLocation blocks: {}'.format(ir_blocks)) # Traverse blocks with optional=True are immediately followed # by a MarkLocation, CoerceType or Filter block. if isinstance(first_block, Traverse) and first_block.optional: if not isinstance(second_block, (MarkLocation, CoerceType, Filter)): raise AssertionError(u'Expected MarkLocation, CoerceType or Filter after Traverse ' u'with optional=True. Found: {}'.format(ir_blocks)) # Backtrack blocks with optional=True are immediately followed by a MarkLocation block. if isinstance(first_block, Backtrack) and first_block.optional: if not isinstance(second_block, MarkLocation): raise AssertionError(u'Expected MarkLocation after Backtrack with optional=True, ' u'but none was found: {}'.format(ir_blocks)) # Recurse blocks are immediately preceded by a MarkLocation or Backtrack block. if isinstance(second_block, Recurse): if not (isinstance(first_block, MarkLocation) or isinstance(first_block, Backtrack)): raise AssertionError(u'Expected MarkLocation or Backtrack before Recurse, but none ' u'was found: {}'.format(ir_blocks))
Ensure that every new location is marked with a MarkLocation block.
def _sanity_check_every_location_is_marked(ir_blocks): """Ensure that every new location is marked with a MarkLocation block.""" # Exactly one MarkLocation block is found between any block that starts an interval of blocks # that all affect the same query position, and the first subsequent block that affects a # different position in the query. Such intervals include the following examples: # - from Fold to Unfold # - from QueryRoot to Traverse/Recurse # - from one Traverse to the next Traverse # - from Traverse to Backtrack found_start_block = False mark_location_blocks_count = 0 start_interval_types = (QueryRoot, Traverse, Recurse, Fold) end_interval_types = (Backtrack, ConstructResult, Recurse, Traverse, Unfold) for block in ir_blocks: # Terminate started intervals before opening new ones. if isinstance(block, end_interval_types) and found_start_block: found_start_block = False if mark_location_blocks_count != 1: raise AssertionError(u'Expected 1 MarkLocation block between traversals, found: ' u'{} {}'.format(mark_location_blocks_count, ir_blocks)) # Now consider opening new intervals or processing MarkLocation blocks. if isinstance(block, MarkLocation): mark_location_blocks_count += 1 elif isinstance(block, start_interval_types): found_start_block = True mark_location_blocks_count = 0
Assert that optional Traverse blocks are preceded by a MarkLocation.
def _sanity_check_mark_location_preceding_optional_traverse(ir_blocks): """Assert that optional Traverse blocks are preceded by a MarkLocation.""" # Once all fold blocks are removed, each optional Traverse must have # a MarkLocation block immediately before it. _, new_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) for first_block, second_block in pairwise(new_ir_blocks): # Traverse blocks with optional=True are immediately preceded by a MarkLocation block. if isinstance(second_block, Traverse) and second_block.optional: if not isinstance(first_block, MarkLocation): raise AssertionError(u'Expected MarkLocation before Traverse with optional=True, ' u'but none was found: {}'.format(ir_blocks))
Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block.
def _sanity_check_coerce_type_outside_of_fold(ir_blocks): """Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block.""" is_in_fold = False for first_block, second_block in pairwise(ir_blocks): if isinstance(first_block, Fold): is_in_fold = True if not is_in_fold and isinstance(first_block, CoerceType): if not isinstance(second_block, (MarkLocation, Filter)): raise AssertionError(u'Expected MarkLocation or Filter after CoerceType, ' u'but none was found: {}'.format(ir_blocks)) if isinstance(second_block, Unfold): is_in_fold = False
Ensure that the given property type_id is supported by the graph.
def validate_supported_property_type_id(property_name, property_type_id): """Ensure that the given property type_id is supported by the graph.""" if property_type_id not in PROPERTY_TYPE_ID_TO_NAME: raise AssertionError(u'Property "{}" has unsupported property type id: ' u'{}'.format(property_name, property_type_id))
Parse and return the default value for a boolean property.
def _parse_bool_default_value(property_name, default_value_string): """Parse and return the default value for a boolean property.""" lowercased_value_string = default_value_string.lower() if lowercased_value_string in {'0', 'false'}: return False elif lowercased_value_string in {'1', 'true'}: return True else: raise AssertionError(u'Unsupported default value for boolean property "{}": ' u'{}'.format(property_name, default_value_string))
Parse and return the default value for a datetime property.
def _parse_datetime_default_value(property_name, default_value_string): """Parse and return the default value for a datetime property.""" # OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually # and then turn it into a python datetime object. strptime() will raise an exception # if the provided value cannot be parsed correctly. parsed_value = time.strptime(default_value_string, ORIENTDB_DATETIME_FORMAT) return datetime.datetime( parsed_value.tm_year, parsed_value.tm_mon, parsed_value.tm_mday, parsed_value.tm_hour, parsed_value.tm_min, parsed_value.tm_sec, 0, None)
Parse and return the default value for a date property.
def _parse_date_default_value(property_name, default_value_string): """Parse and return the default value for a date property.""" # OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually # and then turn it into a python datetime object. strptime() will raise an exception # if the provided value cannot be parsed correctly. parsed_value = time.strptime(default_value_string, ORIENTDB_DATE_FORMAT) return datetime.date(parsed_value.tm_year, parsed_value.tm_mon, parsed_value.tm_mday)
Parse the default value string into its proper form given the property type ID. Args: property_name: string, the name of the property whose default value is being parsed. Used primarily to construct meaningful error messages, should the default value prove invalid. property_type_id: int, one of the property type ID constants defined in this file that OrientDB uses to designate the native type of a given property. default_value_string: string, the textual representation of the default value for for the property, as returned by OrientDB's schema introspection code. Returns: an object of type matching the property that can be used as the property's default value. For example, if the property is of string type, the return type will be a string, and if the property is of list type, the return type will be a list. Raises: AssertionError, if the default value is not supported or does not match the property's declared type (e.g. if a default of "[]" is set on an integer property).
def parse_default_property_value(property_name, property_type_id, default_value_string): """Parse the default value string into its proper form given the property type ID. Args: property_name: string, the name of the property whose default value is being parsed. Used primarily to construct meaningful error messages, should the default value prove invalid. property_type_id: int, one of the property type ID constants defined in this file that OrientDB uses to designate the native type of a given property. default_value_string: string, the textual representation of the default value for for the property, as returned by OrientDB's schema introspection code. Returns: an object of type matching the property that can be used as the property's default value. For example, if the property is of string type, the return type will be a string, and if the property is of list type, the return type will be a list. Raises: AssertionError, if the default value is not supported or does not match the property's declared type (e.g. if a default of "[]" is set on an integer property). """ if property_type_id == PROPERTY_TYPE_EMBEDDED_SET_ID and default_value_string == '{}': return set() elif property_type_id == PROPERTY_TYPE_EMBEDDED_LIST_ID and default_value_string == '[]': return list() elif (property_type_id == PROPERTY_TYPE_STRING_ID and isinstance(default_value_string, six.string_types)): return default_value_string elif property_type_id == PROPERTY_TYPE_BOOLEAN_ID: return _parse_bool_default_value(property_name, default_value_string) elif property_type_id == PROPERTY_TYPE_DATETIME_ID: return _parse_datetime_default_value(property_name, default_value_string) elif property_type_id == PROPERTY_TYPE_DATE_ID: return _parse_date_default_value(property_name, default_value_string) else: raise AssertionError(u'Unsupported default value for property "{}" with type id {}: ' u'{}'.format(property_name, property_type_id, default_value_string))
Compile the GraphQL input using the schema into a MATCH query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_string: the GraphQL query to compile to MATCH, as a string type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: a CompilationResult object
def compile_graphql_to_match(schema, graphql_string, type_equivalence_hints=None): """Compile the GraphQL input using the schema into a MATCH query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_string: the GraphQL query to compile to MATCH, as a string type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: a CompilationResult object """ lowering_func = ir_lowering_match.lower_ir query_emitter_func = emit_match.emit_code_from_ir return _compile_graphql_generic( MATCH_LANGUAGE, lowering_func, query_emitter_func, schema, graphql_string, type_equivalence_hints, None)
Compile the GraphQL input using the schema into a Gremlin query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_string: the GraphQL query to compile to Gremlin, as a string type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: a CompilationResult object
def compile_graphql_to_gremlin(schema, graphql_string, type_equivalence_hints=None): """Compile the GraphQL input using the schema into a Gremlin query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_string: the GraphQL query to compile to Gremlin, as a string type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: a CompilationResult object """ lowering_func = ir_lowering_gremlin.lower_ir query_emitter_func = emit_gremlin.emit_code_from_ir return _compile_graphql_generic( GREMLIN_LANGUAGE, lowering_func, query_emitter_func, schema, graphql_string, type_equivalence_hints, None)
Compile the GraphQL input using the schema into a SQL query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_string: the GraphQL query to compile to SQL, as a string compiler_metadata: SQLAlchemy metadata containing tables for use during compilation. type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: a CompilationResult object
def compile_graphql_to_sql(schema, graphql_string, compiler_metadata, type_equivalence_hints=None): """Compile the GraphQL input using the schema into a SQL query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_string: the GraphQL query to compile to SQL, as a string compiler_metadata: SQLAlchemy metadata containing tables for use during compilation. type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: a CompilationResult object """ lowering_func = ir_lowering_sql.lower_ir query_emitter_func = emit_sql.emit_code_from_ir return _compile_graphql_generic( SQL_LANGUAGE, lowering_func, query_emitter_func, schema, graphql_string, type_equivalence_hints, compiler_metadata)
Compile the GraphQL input, lowering and emitting the query using the given functions. Args: language: string indicating the target language to compile to. lowering_func: Function to lower the compiler IR into a compatible form for the target language backend. query_emitter_func: Function that emits a query in the target language from the lowered IR. schema: GraphQL schema object describing the schema of the graph to be queried. graphql_string: the GraphQL query to compile to the target language, as a string. type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. compiler_metadata: optional target specific metadata for usage by the query_emitter_func. Returns: a CompilationResult object
def _compile_graphql_generic(language, lowering_func, query_emitter_func, schema, graphql_string, type_equivalence_hints, compiler_metadata): """Compile the GraphQL input, lowering and emitting the query using the given functions. Args: language: string indicating the target language to compile to. lowering_func: Function to lower the compiler IR into a compatible form for the target language backend. query_emitter_func: Function that emits a query in the target language from the lowered IR. schema: GraphQL schema object describing the schema of the graph to be queried. graphql_string: the GraphQL query to compile to the target language, as a string. type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. compiler_metadata: optional target specific metadata for usage by the query_emitter_func. Returns: a CompilationResult object """ ir_and_metadata = graphql_to_ir( schema, graphql_string, type_equivalence_hints=type_equivalence_hints) lowered_ir_blocks = lowering_func( ir_and_metadata.ir_blocks, ir_and_metadata.query_metadata_table, type_equivalence_hints=type_equivalence_hints) query = query_emitter_func(lowered_ir_blocks, compiler_metadata) return CompilationResult( query=query, language=language, output_metadata=ir_and_metadata.output_metadata, input_metadata=ir_and_metadata.input_metadata)
Ensure the filter function is only applied to scalar leaf types.
def scalar_leaf_only(operator): """Ensure the filter function is only applied to scalar leaf types.""" def decorator(f): """Decorate the supplied function with the "scalar_leaf_only" logic.""" @wraps(f) def wrapper(filter_operation_info, context, parameters, *args, **kwargs): """Check that the type on which the operator operates is a scalar leaf type.""" if 'operator' in kwargs: current_operator = kwargs['operator'] else: # Because "operator" is from an enclosing scope, it is immutable in Python 2.x. current_operator = operator if not is_leaf_type(filter_operation_info.field_type): raise GraphQLCompilationError(u'Cannot apply "{}" filter to non-leaf type' u'{}'.format(current_operator, filter_operation_info)) return f(filter_operation_info, context, parameters, *args, **kwargs) return wrapper return decorator
Ensure the filter function is only applied to vertex field types.
def vertex_field_only(operator): """Ensure the filter function is only applied to vertex field types.""" def decorator(f): """Decorate the supplied function with the "vertex_field_only" logic.""" @wraps(f) def wrapper(filter_operation_info, context, parameters, *args, **kwargs): """Check that the type on which the operator operates is a vertex field type.""" if 'operator' in kwargs: current_operator = kwargs['operator'] else: # Because "operator" is from an enclosing scope, it is immutable in Python 2.x. current_operator = operator if not is_vertex_field_type(filter_operation_info.field_type): raise GraphQLCompilationError( u'Cannot apply "{}" filter to non-vertex field: ' u'{}'.format(current_operator, filter_operation_info.field_name)) return f(filter_operation_info, context, parameters, *args, **kwargs) return wrapper return decorator
Ensure the filter function has "count" parameters specified.
def takes_parameters(count): """Ensure the filter function has "count" parameters specified.""" def decorator(f): """Decorate the supplied function with the "takes_parameters" logic.""" @wraps(f) def wrapper(filter_operation_info, location, context, parameters, *args, **kwargs): """Check that the supplied number of parameters equals the expected number.""" if len(parameters) != count: raise GraphQLCompilationError(u'Incorrect number of parameters, expected {} got ' u'{}: {}'.format(count, len(parameters), parameters)) return f(filter_operation_info, location, context, parameters, *args, **kwargs) return wrapper return decorator
Return a two-element tuple that represents the argument to the directive being processed. Args: directive_location: Location where the directive is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! argument: string, the name of the argument to the directive inferred_type: GraphQL type object specifying the inferred type of the argument Returns: (argument_expression, non_existence_expression) - argument_expression: an Expression object that captures the semantics of the argument - non_existence_expression: None or Expression object; If the current block is not optional, this is set to None. Otherwise, it is an expression that will evaluate to True if the argument is skipped as optional and therefore not present, and False otherwise.
def _represent_argument(directive_location, context, argument, inferred_type): """Return a two-element tuple that represents the argument to the directive being processed. Args: directive_location: Location where the directive is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! argument: string, the name of the argument to the directive inferred_type: GraphQL type object specifying the inferred type of the argument Returns: (argument_expression, non_existence_expression) - argument_expression: an Expression object that captures the semantics of the argument - non_existence_expression: None or Expression object; If the current block is not optional, this is set to None. Otherwise, it is an expression that will evaluate to True if the argument is skipped as optional and therefore not present, and False otherwise. """ # Regardless of what kind of variable we are dealing with, # we want to ensure its name is valid. argument_name = argument[1:] validate_safe_string(argument_name) if is_variable_argument(argument): existing_type = context['inputs'].get(argument_name, inferred_type) if not inferred_type.is_same_type(existing_type): raise GraphQLCompilationError(u'Incompatible types inferred for argument {}. ' u'The argument cannot simultaneously be ' u'{} and {}.'.format(argument, existing_type, inferred_type)) context['inputs'][argument_name] = inferred_type return (expressions.Variable(argument, inferred_type), None) elif is_tag_argument(argument): argument_context = context['tags'].get(argument_name, None) if argument_context is None: raise GraphQLCompilationError(u'Undeclared argument used: {}'.format(argument)) location = argument_context['location'] optional = argument_context['optional'] tag_inferred_type = argument_context['type'] if location is None: raise AssertionError(u'Argument declared without location: {}'.format(argument_name)) if location.field is None: raise AssertionError(u'Argument location is not a property field: {}'.format(location)) if not inferred_type.is_same_type(tag_inferred_type): raise GraphQLCompilationError(u'The inferred type of the matching @tag directive does ' u'not match the inferred required type for this filter: ' u'{} vs {}'.format(tag_inferred_type, inferred_type)) # Check whether the argument is a field on the vertex on which the directive is applied. field_is_local = directive_location.at_vertex() == location.at_vertex() non_existence_expression = None if optional: if field_is_local: non_existence_expression = expressions.FalseLiteral else: non_existence_expression = expressions.BinaryComposition( u'=', expressions.ContextFieldExistence(location.at_vertex()), expressions.FalseLiteral) if field_is_local: representation = expressions.LocalField(argument_name) else: representation = expressions.ContextField(location, tag_inferred_type) return (representation, non_existence_expression) else: # If we want to support literal arguments, add them here. raise GraphQLCompilationError(u'Non-argument type found: {}'.format(argument))
Return a Filter basic block that performs the given comparison against the property field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to perform the comparison against; if the parameter is optional and missing, the check will return True operator: unicode, a comparison operator, like '=', '!=', '>=' etc. This is a kwarg only to preserve the same positional arguments in the function signature, to ease validation. Returns: a Filter basic block that performs the requested comparison
def _process_comparison_filter_directive(filter_operation_info, location, context, parameters, operator=None): """Return a Filter basic block that performs the given comparison against the property field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to perform the comparison against; if the parameter is optional and missing, the check will return True operator: unicode, a comparison operator, like '=', '!=', '>=' etc. This is a kwarg only to preserve the same positional arguments in the function signature, to ease validation. Returns: a Filter basic block that performs the requested comparison """ comparison_operators = {u'=', u'!=', u'>', u'<', u'>=', u'<='} if operator not in comparison_operators: raise AssertionError(u'Expected a valid comparison operator ({}), but got ' u'{}'.format(comparison_operators, operator)) filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = strip_non_null_from_type(filtered_field_type) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) comparison_expression = expressions.BinaryComposition( operator, expressions.LocalField(filtered_field_name), argument_expression) final_expression = None if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. final_expression = expressions.BinaryComposition( u'||', non_existence_expression, comparison_expression) else: final_expression = comparison_expression return blocks.Filter(final_expression)
Return a Filter basic block that checks the degree of the edge to the given vertex field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to check the edge degree against; if the parameter is optional and missing, the check will return True Returns: a Filter basic block that performs the check
def _process_has_edge_degree_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks the degree of the edge to the given vertex field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to check the edge degree against; if the parameter is optional and missing, the check will return True Returns: a Filter basic block that performs the check """ if isinstance(filter_operation_info.field_ast, InlineFragment): raise AssertionError(u'Received InlineFragment AST node in "has_edge_degree" filter ' u'handler. This should have been caught earlier: ' u'{}'.format(filter_operation_info.field_ast)) filtered_field_name = filter_operation_info.field_name if filtered_field_name is None or not is_vertex_field_name(filtered_field_name): raise AssertionError(u'Invalid value for "filtered_field_name" in "has_edge_degree" ' u'filter: {}'.format(filtered_field_name)) if not is_vertex_field_type(filter_operation_info.field_type): raise AssertionError(u'Invalid value for "filter_operation_info.field_type" in ' u'"has_edge_degree" filter: {}'.format(filter_operation_info)) argument = parameters[0] if not is_variable_argument(argument): raise GraphQLCompilationError(u'The "has_edge_degree" filter only supports runtime ' u'variable arguments. Tagged values are not supported.' u'Argument name: {}'.format(argument)) argument_inferred_type = GraphQLInt argument_expression, non_existence_expression = _represent_argument( location, context, argument, argument_inferred_type) if non_existence_expression is not None: raise AssertionError(u'Since we do not support tagged values, non_existence_expression ' u'should have been None. However, it was: ' u'{}'.format(non_existence_expression)) # If no edges to the vertex field exist, the edges' field in the database may be "null". # We also don't know ahead of time whether the supplied argument is zero or not. # We have to accommodate these facts in our generated comparison code. # We construct the following expression to check if the edge degree is zero: # ({argument} == 0) && (edge_field == null) argument_is_zero = expressions.BinaryComposition( u'=', argument_expression, expressions.ZeroLiteral) edge_field_is_null = expressions.BinaryComposition( u'=', expressions.LocalField(filtered_field_name), expressions.NullLiteral) edge_degree_is_zero = expressions.BinaryComposition( u'&&', argument_is_zero, edge_field_is_null) # The following expression will check for a non-zero edge degree equal to the argument. # (edge_field != null) && (edge_field.size() == {argument}) edge_field_is_not_null = expressions.BinaryComposition( u'!=', expressions.LocalField(filtered_field_name), expressions.NullLiteral) edge_degree = expressions.UnaryTransformation( u'size', expressions.LocalField(filtered_field_name)) edge_degree_matches_argument = expressions.BinaryComposition( u'=', edge_degree, argument_expression) edge_degree_is_non_zero = expressions.BinaryComposition( u'&&', edge_field_is_not_null, edge_degree_matches_argument) # We combine the two cases with a logical-or to handle both situations: filter_predicate = expressions.BinaryComposition( u'||', edge_degree_is_zero, edge_degree_is_non_zero) return blocks.Filter(filter_predicate)
Return a Filter basic block that checks for a match against an Entity's name or alias. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to check the name or alias against; if the parameter is optional and missing, the check will return True Returns: a Filter basic block that performs the check against the name or alias
def _process_name_or_alias_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks for a match against an Entity's name or alias. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to check the name or alias against; if the parameter is optional and missing, the check will return True Returns: a Filter basic block that performs the check against the name or alias """ filtered_field_type = filter_operation_info.field_type if isinstance(filtered_field_type, GraphQLUnionType): raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to union type ' u'{}'.format(filtered_field_type)) current_type_fields = filtered_field_type.fields name_field = current_type_fields.get('name', None) alias_field = current_type_fields.get('alias', None) if not name_field or not alias_field: raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because it lacks a ' u'"name" or "alias" field.'.format(filtered_field_type)) name_field_type = strip_non_null_from_type(name_field.type) alias_field_type = strip_non_null_from_type(alias_field.type) if not isinstance(name_field_type, GraphQLScalarType): raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its "name" ' u'field is not a scalar.'.format(filtered_field_type)) if not isinstance(alias_field_type, GraphQLList): raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its ' u'"alias" field is not a list.'.format(filtered_field_type)) alias_field_inner_type = strip_non_null_from_type(alias_field_type.of_type) if alias_field_inner_type != name_field_type: raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because the ' u'"name" field and the inner type of the "alias" field ' u'do not match: {} vs {}'.format(filtered_field_type, name_field_type, alias_field_inner_type)) argument_inferred_type = name_field_type argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) check_against_name = expressions.BinaryComposition( u'=', expressions.LocalField('name'), argument_expression) check_against_alias = expressions.BinaryComposition( u'contains', expressions.LocalField('alias'), argument_expression) filter_predicate = expressions.BinaryComposition( u'||', check_against_name, check_against_alias) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
Return a Filter basic block that checks that a field is between two values, inclusive. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 2 elements, specifying the time range in which the data must lie; if either of the elements is optional and missing, their side of the check is assumed to be True Returns: a Filter basic block that performs the range check
def _process_between_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks that a field is between two values, inclusive. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 2 elements, specifying the time range in which the data must lie; if either of the elements is optional and missing, their side of the check is assumed to be True Returns: a Filter basic block that performs the range check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = strip_non_null_from_type(filtered_field_type) arg1_expression, arg1_non_existence = _represent_argument( location, context, parameters[0], argument_inferred_type) arg2_expression, arg2_non_existence = _represent_argument( location, context, parameters[1], argument_inferred_type) lower_bound_clause = expressions.BinaryComposition( u'>=', expressions.LocalField(filtered_field_name), arg1_expression) if arg1_non_existence is not None: # The argument is optional, and if it doesn't exist, this side of the check should pass. lower_bound_clause = expressions.BinaryComposition( u'||', arg1_non_existence, lower_bound_clause) upper_bound_clause = expressions.BinaryComposition( u'<=', expressions.LocalField(filtered_field_name), arg2_expression) if arg2_non_existence is not None: # The argument is optional, and if it doesn't exist, this side of the check should pass. upper_bound_clause = expressions.BinaryComposition( u'||', arg2_non_existence, upper_bound_clause) filter_predicate = expressions.BinaryComposition( u'&&', lower_bound_clause, upper_bound_clause) return blocks.Filter(filter_predicate)
Return a Filter basic block that checks for a value's existence in a collection. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the collection existence check
def _process_in_collection_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks for a value's existence in a collection. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the collection existence check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = GraphQLList(strip_non_null_from_type(filtered_field_type)) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'contains', argument_expression, expressions.LocalField(filtered_field_name)) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
Return a Filter basic block that checks if the directive arg is a substring of the field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the substring check
def _process_has_substring_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks if the directive arg is a substring of the field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the substring check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name if not strip_non_null_from_type(filtered_field_type).is_same_type(GraphQLString): raise GraphQLCompilationError(u'Cannot apply "has_substring" to non-string ' u'type {}'.format(filtered_field_type)) argument_inferred_type = GraphQLString argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'has_substring', expressions.LocalField(filtered_field_name), argument_expression) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
Return a Filter basic block that checks if the directive arg is contained in the field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the contains check
def _process_contains_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks if the directive arg is contained in the field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the contains check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name base_field_type = strip_non_null_from_type(filtered_field_type) if not isinstance(base_field_type, GraphQLList): raise GraphQLCompilationError(u'Cannot apply "contains" to non-list ' u'type {}'.format(filtered_field_type)) argument_inferred_type = strip_non_null_from_type(base_field_type.of_type) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'contains', expressions.LocalField(filtered_field_name), argument_expression) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
Return a Filter basic block that checks if the directive arg and the field intersect. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the intersects check
def _process_intersects_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks if the directive arg and the field intersect. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the intersects check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = strip_non_null_from_type(filtered_field_type) if not isinstance(argument_inferred_type, GraphQLList): raise GraphQLCompilationError(u'Cannot apply "intersects" to non-list ' u'type {}'.format(filtered_field_type)) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'intersects', expressions.LocalField(filtered_field_name), argument_expression) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)