repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/gremlin_formatting.py | insert_arguments_into_gremlin_query | def insert_arguments_into_gremlin_query(compilation_result, arguments):
"""Insert the arguments into the compiled Gremlin query to form a complete query.
The GraphQL compiler attempts to use single-quoted string literals ('abc') in Gremlin output.
Double-quoted strings allow inline interpolation with the $ symbol, see here for details:
http://www.groovy-lang.org/syntax.html#all-strings
If the compiler needs to emit a literal '$' character as part of the Gremlin query,
it must be doubled ('$$') to avoid being interpreted as a query parameter.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a Gremlin query with inserted argument data
"""
if compilation_result.language != GREMLIN_LANGUAGE:
raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result))
base_query = compilation_result.query
argument_types = compilation_result.input_metadata
# The arguments are assumed to have already been validated against the query.
sanitized_arguments = {
key: _safe_gremlin_argument(argument_types[key], value)
for key, value in six.iteritems(arguments)
}
return Template(base_query).substitute(sanitized_arguments) | python | def insert_arguments_into_gremlin_query(compilation_result, arguments):
"""Insert the arguments into the compiled Gremlin query to form a complete query.
The GraphQL compiler attempts to use single-quoted string literals ('abc') in Gremlin output.
Double-quoted strings allow inline interpolation with the $ symbol, see here for details:
http://www.groovy-lang.org/syntax.html#all-strings
If the compiler needs to emit a literal '$' character as part of the Gremlin query,
it must be doubled ('$$') to avoid being interpreted as a query parameter.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a Gremlin query with inserted argument data
"""
if compilation_result.language != GREMLIN_LANGUAGE:
raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result))
base_query = compilation_result.query
argument_types = compilation_result.input_metadata
# The arguments are assumed to have already been validated against the query.
sanitized_arguments = {
key: _safe_gremlin_argument(argument_types[key], value)
for key, value in six.iteritems(arguments)
}
return Template(base_query).substitute(sanitized_arguments) | [
"def",
"insert_arguments_into_gremlin_query",
"(",
"compilation_result",
",",
"arguments",
")",
":",
"if",
"compilation_result",
".",
"language",
"!=",
"GREMLIN_LANGUAGE",
":",
"raise",
"AssertionError",
"(",
"u'Unexpected query output language: {}'",
".",
"format",
"(",
... | Insert the arguments into the compiled Gremlin query to form a complete query.
The GraphQL compiler attempts to use single-quoted string literals ('abc') in Gremlin output.
Double-quoted strings allow inline interpolation with the $ symbol, see here for details:
http://www.groovy-lang.org/syntax.html#all-strings
If the compiler needs to emit a literal '$' character as part of the Gremlin query,
it must be doubled ('$$') to avoid being interpreted as a query parameter.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a Gremlin query with inserted argument data | [
"Insert",
"the",
"arguments",
"into",
"the",
"compiled",
"Gremlin",
"query",
"to",
"form",
"a",
"complete",
"query",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/gremlin_formatting.py#L138-L167 | train | 227,800 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_match.py | _get_vertex_location_name | def _get_vertex_location_name(location):
"""Get the location name from a location that is expected to point to a vertex."""
mark_name, field_name = location.get_location_name()
if field_name is not None:
raise AssertionError(u'Location unexpectedly pointed to a field: {}'.format(location))
return mark_name | python | def _get_vertex_location_name(location):
"""Get the location name from a location that is expected to point to a vertex."""
mark_name, field_name = location.get_location_name()
if field_name is not None:
raise AssertionError(u'Location unexpectedly pointed to a field: {}'.format(location))
return mark_name | [
"def",
"_get_vertex_location_name",
"(",
"location",
")",
":",
"mark_name",
",",
"field_name",
"=",
"location",
".",
"get_location_name",
"(",
")",
"if",
"field_name",
"is",
"not",
"None",
":",
"raise",
"AssertionError",
"(",
"u'Location unexpectedly pointed to a fiel... | Get the location name from a location that is expected to point to a vertex. | [
"Get",
"the",
"location",
"name",
"from",
"a",
"location",
"that",
"is",
"expected",
"to",
"point",
"to",
"a",
"vertex",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L12-L18 | train | 227,801 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_match.py | _first_step_to_match | def _first_step_to_match(match_step):
"""Transform the very first MATCH step into a MATCH query string."""
parts = []
if match_step.root_block is not None:
if not isinstance(match_step.root_block, QueryRoot):
raise AssertionError(u'Expected None or QueryRoot root block, received: '
u'{} {}'.format(match_step.root_block, match_step))
match_step.root_block.validate()
start_class = get_only_element_from_collection(match_step.root_block.start_class)
parts.append(u'class: %s' % (start_class,))
# MATCH steps with a QueryRoot root block shouldn't have a 'coerce_type_block'.
if match_step.coerce_type_block is not None:
raise AssertionError(u'Invalid MATCH step: {}'.format(match_step))
if match_step.where_block:
match_step.where_block.validate()
parts.append(u'where: (%s)' % (match_step.where_block.predicate.to_match(),))
if match_step.as_block is None:
raise AssertionError(u'Found a MATCH step without a corresponding Location. '
u'This should never happen: {}'.format(match_step))
else:
match_step.as_block.validate()
parts.append(u'as: %s' % (_get_vertex_location_name(match_step.as_block.location),))
return u'{{ %s }}' % (u', '.join(parts),) | python | def _first_step_to_match(match_step):
"""Transform the very first MATCH step into a MATCH query string."""
parts = []
if match_step.root_block is not None:
if not isinstance(match_step.root_block, QueryRoot):
raise AssertionError(u'Expected None or QueryRoot root block, received: '
u'{} {}'.format(match_step.root_block, match_step))
match_step.root_block.validate()
start_class = get_only_element_from_collection(match_step.root_block.start_class)
parts.append(u'class: %s' % (start_class,))
# MATCH steps with a QueryRoot root block shouldn't have a 'coerce_type_block'.
if match_step.coerce_type_block is not None:
raise AssertionError(u'Invalid MATCH step: {}'.format(match_step))
if match_step.where_block:
match_step.where_block.validate()
parts.append(u'where: (%s)' % (match_step.where_block.predicate.to_match(),))
if match_step.as_block is None:
raise AssertionError(u'Found a MATCH step without a corresponding Location. '
u'This should never happen: {}'.format(match_step))
else:
match_step.as_block.validate()
parts.append(u'as: %s' % (_get_vertex_location_name(match_step.as_block.location),))
return u'{{ %s }}' % (u', '.join(parts),) | [
"def",
"_first_step_to_match",
"(",
"match_step",
")",
":",
"parts",
"=",
"[",
"]",
"if",
"match_step",
".",
"root_block",
"is",
"not",
"None",
":",
"if",
"not",
"isinstance",
"(",
"match_step",
".",
"root_block",
",",
"QueryRoot",
")",
":",
"raise",
"Asse... | Transform the very first MATCH step into a MATCH query string. | [
"Transform",
"the",
"very",
"first",
"MATCH",
"step",
"into",
"a",
"MATCH",
"query",
"string",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L21-L50 | train | 227,802 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_match.py | _represent_match_traversal | def _represent_match_traversal(match_traversal):
"""Emit MATCH query code for an entire MATCH traversal sequence."""
output = []
output.append(_first_step_to_match(match_traversal[0]))
for step in match_traversal[1:]:
output.append(_subsequent_step_to_match(step))
return u''.join(output) | python | def _represent_match_traversal(match_traversal):
"""Emit MATCH query code for an entire MATCH traversal sequence."""
output = []
output.append(_first_step_to_match(match_traversal[0]))
for step in match_traversal[1:]:
output.append(_subsequent_step_to_match(step))
return u''.join(output) | [
"def",
"_represent_match_traversal",
"(",
"match_traversal",
")",
":",
"output",
"=",
"[",
"]",
"output",
".",
"append",
"(",
"_first_step_to_match",
"(",
"match_traversal",
"[",
"0",
"]",
")",
")",
"for",
"step",
"in",
"match_traversal",
"[",
"1",
":",
"]",... | Emit MATCH query code for an entire MATCH traversal sequence. | [
"Emit",
"MATCH",
"query",
"code",
"for",
"an",
"entire",
"MATCH",
"traversal",
"sequence",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L94-L102 | train | 227,803 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_match.py | _represent_fold | def _represent_fold(fold_location, fold_ir_blocks):
"""Emit a LET clause corresponding to the IR blocks for a @fold scope."""
start_let_template = u'$%(mark_name)s = %(base_location)s'
traverse_edge_template = u'.%(direction)s("%(edge_name)s")'
base_template = start_let_template + traverse_edge_template
edge_direction, edge_name = fold_location.get_first_folded_edge()
mark_name, _ = fold_location.get_location_name()
base_location_name, _ = fold_location.base_location.get_location_name()
validate_safe_string(mark_name)
validate_safe_string(base_location_name)
validate_safe_string(edge_direction)
validate_safe_string(edge_name)
template_data = {
'mark_name': mark_name,
'base_location': base_location_name,
'direction': edge_direction,
'edge_name': edge_name,
}
final_string = base_template % template_data
for block in fold_ir_blocks:
if isinstance(block, Filter):
final_string += u'[' + block.predicate.to_match() + u']'
elif isinstance(block, Traverse):
template_data = {
'direction': block.direction,
'edge_name': block.edge_name,
}
final_string += traverse_edge_template % template_data
elif isinstance(block, MarkLocation):
# MarkLocation blocks inside a fold do not result in any MATCH output.
pass
else:
raise AssertionError(u'Found an unexpected IR block in the folded IR blocks: '
u'{} {} {}'.format(type(block), block, fold_ir_blocks))
# Workaround for OrientDB's inconsistent return type when filtering a list.
# https://github.com/orientechnologies/orientdb/issues/7811
final_string += '.asList()'
return final_string | python | def _represent_fold(fold_location, fold_ir_blocks):
"""Emit a LET clause corresponding to the IR blocks for a @fold scope."""
start_let_template = u'$%(mark_name)s = %(base_location)s'
traverse_edge_template = u'.%(direction)s("%(edge_name)s")'
base_template = start_let_template + traverse_edge_template
edge_direction, edge_name = fold_location.get_first_folded_edge()
mark_name, _ = fold_location.get_location_name()
base_location_name, _ = fold_location.base_location.get_location_name()
validate_safe_string(mark_name)
validate_safe_string(base_location_name)
validate_safe_string(edge_direction)
validate_safe_string(edge_name)
template_data = {
'mark_name': mark_name,
'base_location': base_location_name,
'direction': edge_direction,
'edge_name': edge_name,
}
final_string = base_template % template_data
for block in fold_ir_blocks:
if isinstance(block, Filter):
final_string += u'[' + block.predicate.to_match() + u']'
elif isinstance(block, Traverse):
template_data = {
'direction': block.direction,
'edge_name': block.edge_name,
}
final_string += traverse_edge_template % template_data
elif isinstance(block, MarkLocation):
# MarkLocation blocks inside a fold do not result in any MATCH output.
pass
else:
raise AssertionError(u'Found an unexpected IR block in the folded IR blocks: '
u'{} {} {}'.format(type(block), block, fold_ir_blocks))
# Workaround for OrientDB's inconsistent return type when filtering a list.
# https://github.com/orientechnologies/orientdb/issues/7811
final_string += '.asList()'
return final_string | [
"def",
"_represent_fold",
"(",
"fold_location",
",",
"fold_ir_blocks",
")",
":",
"start_let_template",
"=",
"u'$%(mark_name)s = %(base_location)s'",
"traverse_edge_template",
"=",
"u'.%(direction)s(\"%(edge_name)s\")'",
"base_template",
"=",
"start_let_template",
"+",
"traverse_e... | Emit a LET clause corresponding to the IR blocks for a @fold scope. | [
"Emit",
"a",
"LET",
"clause",
"corresponding",
"to",
"the",
"IR",
"blocks",
"for",
"a"
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L105-L147 | train | 227,804 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_match.py | _construct_output_to_match | def _construct_output_to_match(output_block):
"""Transform a ConstructResult block into a MATCH query string."""
output_block.validate()
selections = (
u'%s AS `%s`' % (output_block.fields[key].to_match(), key)
for key in sorted(output_block.fields.keys()) # Sort keys for deterministic output order.
)
return u'SELECT %s FROM' % (u', '.join(selections),) | python | def _construct_output_to_match(output_block):
"""Transform a ConstructResult block into a MATCH query string."""
output_block.validate()
selections = (
u'%s AS `%s`' % (output_block.fields[key].to_match(), key)
for key in sorted(output_block.fields.keys()) # Sort keys for deterministic output order.
)
return u'SELECT %s FROM' % (u', '.join(selections),) | [
"def",
"_construct_output_to_match",
"(",
"output_block",
")",
":",
"output_block",
".",
"validate",
"(",
")",
"selections",
"=",
"(",
"u'%s AS `%s`'",
"%",
"(",
"output_block",
".",
"fields",
"[",
"key",
"]",
".",
"to_match",
"(",
")",
",",
"key",
")",
"f... | Transform a ConstructResult block into a MATCH query string. | [
"Transform",
"a",
"ConstructResult",
"block",
"into",
"a",
"MATCH",
"query",
"string",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L150-L159 | train | 227,805 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_match.py | _construct_where_to_match | def _construct_where_to_match(where_block):
"""Transform a Filter block into a MATCH query string."""
if where_block.predicate == TrueLiteral:
raise AssertionError(u'Received WHERE block with TrueLiteral predicate: {}'
.format(where_block))
return u'WHERE ' + where_block.predicate.to_match() | python | def _construct_where_to_match(where_block):
"""Transform a Filter block into a MATCH query string."""
if where_block.predicate == TrueLiteral:
raise AssertionError(u'Received WHERE block with TrueLiteral predicate: {}'
.format(where_block))
return u'WHERE ' + where_block.predicate.to_match() | [
"def",
"_construct_where_to_match",
"(",
"where_block",
")",
":",
"if",
"where_block",
".",
"predicate",
"==",
"TrueLiteral",
":",
"raise",
"AssertionError",
"(",
"u'Received WHERE block with TrueLiteral predicate: {}'",
".",
"format",
"(",
"where_block",
")",
")",
"ret... | Transform a Filter block into a MATCH query string. | [
"Transform",
"a",
"Filter",
"block",
"into",
"a",
"MATCH",
"query",
"string",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L162-L167 | train | 227,806 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_match.py | emit_code_from_multiple_match_queries | def emit_code_from_multiple_match_queries(match_queries):
"""Return a MATCH query string from a list of MatchQuery namedtuples."""
optional_variable_base_name = '$optional__'
union_variable_name = '$result'
query_data = deque([u'SELECT EXPAND(', union_variable_name, u')', u' LET '])
optional_variables = []
sub_queries = [emit_code_from_single_match_query(match_query)
for match_query in match_queries]
for (i, sub_query) in enumerate(sub_queries):
variable_name = optional_variable_base_name + str(i)
variable_assignment = variable_name + u' = ('
sub_query_end = u'),'
query_data.append(variable_assignment)
query_data.append(sub_query)
query_data.append(sub_query_end)
optional_variables.append(variable_name)
query_data.append(union_variable_name)
query_data.append(u' = UNIONALL(')
query_data.append(u', '.join(optional_variables))
query_data.append(u')')
return u' '.join(query_data) | python | def emit_code_from_multiple_match_queries(match_queries):
"""Return a MATCH query string from a list of MatchQuery namedtuples."""
optional_variable_base_name = '$optional__'
union_variable_name = '$result'
query_data = deque([u'SELECT EXPAND(', union_variable_name, u')', u' LET '])
optional_variables = []
sub_queries = [emit_code_from_single_match_query(match_query)
for match_query in match_queries]
for (i, sub_query) in enumerate(sub_queries):
variable_name = optional_variable_base_name + str(i)
variable_assignment = variable_name + u' = ('
sub_query_end = u'),'
query_data.append(variable_assignment)
query_data.append(sub_query)
query_data.append(sub_query_end)
optional_variables.append(variable_name)
query_data.append(union_variable_name)
query_data.append(u' = UNIONALL(')
query_data.append(u', '.join(optional_variables))
query_data.append(u')')
return u' '.join(query_data) | [
"def",
"emit_code_from_multiple_match_queries",
"(",
"match_queries",
")",
":",
"optional_variable_base_name",
"=",
"'$optional__'",
"union_variable_name",
"=",
"'$result'",
"query_data",
"=",
"deque",
"(",
"[",
"u'SELECT EXPAND('",
",",
"union_variable_name",
",",
"u')'",
... | Return a MATCH query string from a list of MatchQuery namedtuples. | [
"Return",
"a",
"MATCH",
"query",
"string",
"from",
"a",
"list",
"of",
"MatchQuery",
"namedtuples",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L218-L241 | train | 227,807 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_match.py | emit_code_from_ir | def emit_code_from_ir(compound_match_query, compiler_metadata):
"""Return a MATCH query string from a CompoundMatchQuery."""
# If the compound match query contains only one match query,
# just call `emit_code_from_single_match_query`
# If there are multiple match queries, construct the query string for each
# individual query and combine them as follows.
#
# SELECT EXPAND($result)
# LET
# $optional__0 = (
# <query_string_0>
# ),
# $optional__1 = (
# <query_string_1>
# ),
# $optional__2 = (
# <query_string_2>
# ),
#
# . . .
#
# $result = UNIONALL($optional__0, $optional__1, . . . )
match_queries = compound_match_query.match_queries
if len(match_queries) == 1:
query_string = emit_code_from_single_match_query(match_queries[0])
elif len(match_queries) > 1:
query_string = emit_code_from_multiple_match_queries(match_queries)
else:
raise AssertionError(u'Received CompoundMatchQuery with an empty list of MatchQueries: '
u'{}'.format(match_queries))
return query_string | python | def emit_code_from_ir(compound_match_query, compiler_metadata):
"""Return a MATCH query string from a CompoundMatchQuery."""
# If the compound match query contains only one match query,
# just call `emit_code_from_single_match_query`
# If there are multiple match queries, construct the query string for each
# individual query and combine them as follows.
#
# SELECT EXPAND($result)
# LET
# $optional__0 = (
# <query_string_0>
# ),
# $optional__1 = (
# <query_string_1>
# ),
# $optional__2 = (
# <query_string_2>
# ),
#
# . . .
#
# $result = UNIONALL($optional__0, $optional__1, . . . )
match_queries = compound_match_query.match_queries
if len(match_queries) == 1:
query_string = emit_code_from_single_match_query(match_queries[0])
elif len(match_queries) > 1:
query_string = emit_code_from_multiple_match_queries(match_queries)
else:
raise AssertionError(u'Received CompoundMatchQuery with an empty list of MatchQueries: '
u'{}'.format(match_queries))
return query_string | [
"def",
"emit_code_from_ir",
"(",
"compound_match_query",
",",
"compiler_metadata",
")",
":",
"# If the compound match query contains only one match query,",
"# just call `emit_code_from_single_match_query`",
"# If there are multiple match queries, construct the query string for each",
"# indiv... | Return a MATCH query string from a CompoundMatchQuery. | [
"Return",
"a",
"MATCH",
"query",
"string",
"from",
"a",
"CompoundMatchQuery",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L244-L276 | train | 227,808 |
kensho-technologies/graphql-compiler | graphql_compiler/schema.py | _serialize_date | def _serialize_date(value):
"""Serialize a Date object to its proper ISO-8601 representation."""
if not isinstance(value, date):
raise ValueError(u'The received object was not a date: '
u'{} {}'.format(type(value), value))
return value.isoformat() | python | def _serialize_date(value):
"""Serialize a Date object to its proper ISO-8601 representation."""
if not isinstance(value, date):
raise ValueError(u'The received object was not a date: '
u'{} {}'.format(type(value), value))
return value.isoformat() | [
"def",
"_serialize_date",
"(",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"date",
")",
":",
"raise",
"ValueError",
"(",
"u'The received object was not a date: '",
"u'{} {}'",
".",
"format",
"(",
"type",
"(",
"value",
")",
",",
"value",
... | Serialize a Date object to its proper ISO-8601 representation. | [
"Serialize",
"a",
"Date",
"object",
"to",
"its",
"proper",
"ISO",
"-",
"8601",
"representation",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema.py#L201-L206 | train | 227,809 |
kensho-technologies/graphql-compiler | graphql_compiler/schema.py | _serialize_datetime | def _serialize_datetime(value):
"""Serialize a DateTime object to its proper ISO-8601 representation."""
if not isinstance(value, (datetime, arrow.Arrow)):
raise ValueError(u'The received object was not a datetime: '
u'{} {}'.format(type(value), value))
return value.isoformat() | python | def _serialize_datetime(value):
"""Serialize a DateTime object to its proper ISO-8601 representation."""
if not isinstance(value, (datetime, arrow.Arrow)):
raise ValueError(u'The received object was not a datetime: '
u'{} {}'.format(type(value), value))
return value.isoformat() | [
"def",
"_serialize_datetime",
"(",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"(",
"datetime",
",",
"arrow",
".",
"Arrow",
")",
")",
":",
"raise",
"ValueError",
"(",
"u'The received object was not a datetime: '",
"u'{} {}'",
".",
"format",
... | Serialize a DateTime object to its proper ISO-8601 representation. | [
"Serialize",
"a",
"DateTime",
"object",
"to",
"its",
"proper",
"ISO",
"-",
"8601",
"representation",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema.py#L214-L219 | train | 227,810 |
kensho-technologies/graphql-compiler | graphql_compiler/schema.py | _parse_datetime_value | def _parse_datetime_value(value):
"""Deserialize a DateTime object from its proper ISO-8601 representation."""
if value.endswith('Z'):
# Arrow doesn't support the "Z" literal to denote UTC time.
# Strip the "Z" and add an explicit time zone instead.
value = value[:-1] + '+00:00'
return arrow.get(value, 'YYYY-MM-DDTHH:mm:ssZ').datetime | python | def _parse_datetime_value(value):
"""Deserialize a DateTime object from its proper ISO-8601 representation."""
if value.endswith('Z'):
# Arrow doesn't support the "Z" literal to denote UTC time.
# Strip the "Z" and add an explicit time zone instead.
value = value[:-1] + '+00:00'
return arrow.get(value, 'YYYY-MM-DDTHH:mm:ssZ').datetime | [
"def",
"_parse_datetime_value",
"(",
"value",
")",
":",
"if",
"value",
".",
"endswith",
"(",
"'Z'",
")",
":",
"# Arrow doesn't support the \"Z\" literal to denote UTC time.",
"# Strip the \"Z\" and add an explicit time zone instead.",
"value",
"=",
"value",
"[",
":",
"-",
... | Deserialize a DateTime object from its proper ISO-8601 representation. | [
"Deserialize",
"a",
"DateTime",
"object",
"from",
"its",
"proper",
"ISO",
"-",
"8601",
"representation",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema.py#L222-L229 | train | 227,811 |
kensho-technologies/graphql-compiler | graphql_compiler/schema.py | insert_meta_fields_into_existing_schema | def insert_meta_fields_into_existing_schema(graphql_schema):
"""Add compiler-specific meta-fields into all interfaces and types of the specified schema.
It is preferable to use the EXTENDED_META_FIELD_DEFINITIONS constant above to directly inject
the meta-fields during the initial process of building the schema, as that approach
is more robust. This function does its best to not mutate unexpected definitions, but
may break unexpectedly as the GraphQL standard is extended and the underlying
GraphQL library is updated.
Use this function at your own risk. Don't say you haven't been warned.
Properties added include:
- "_x_count", which allows filtering folds based on the number of elements they capture.
Args:
graphql_schema: GraphQLSchema object describing the schema that is going to be used with
the compiler. N.B.: MUTATED IN-PLACE in this method.
"""
root_type_name = graphql_schema.get_query_type().name
for type_name, type_obj in six.iteritems(graphql_schema.get_type_map()):
if type_name.startswith('__') or type_name == root_type_name:
# Ignore the types that are built into GraphQL itself, as well as the root query type.
continue
if not isinstance(type_obj, (GraphQLObjectType, GraphQLInterfaceType)):
# Ignore definitions that are not interfaces or types.
continue
for meta_field_name, meta_field in six.iteritems(EXTENDED_META_FIELD_DEFINITIONS):
if meta_field_name in type_obj.fields:
raise AssertionError(u'Unexpectedly encountered an existing field named {} while '
u'attempting to add a meta-field of the same name. Make sure '
u'you are not attempting to add meta-fields twice.'
.format(meta_field_name))
type_obj.fields[meta_field_name] = meta_field | python | def insert_meta_fields_into_existing_schema(graphql_schema):
"""Add compiler-specific meta-fields into all interfaces and types of the specified schema.
It is preferable to use the EXTENDED_META_FIELD_DEFINITIONS constant above to directly inject
the meta-fields during the initial process of building the schema, as that approach
is more robust. This function does its best to not mutate unexpected definitions, but
may break unexpectedly as the GraphQL standard is extended and the underlying
GraphQL library is updated.
Use this function at your own risk. Don't say you haven't been warned.
Properties added include:
- "_x_count", which allows filtering folds based on the number of elements they capture.
Args:
graphql_schema: GraphQLSchema object describing the schema that is going to be used with
the compiler. N.B.: MUTATED IN-PLACE in this method.
"""
root_type_name = graphql_schema.get_query_type().name
for type_name, type_obj in six.iteritems(graphql_schema.get_type_map()):
if type_name.startswith('__') or type_name == root_type_name:
# Ignore the types that are built into GraphQL itself, as well as the root query type.
continue
if not isinstance(type_obj, (GraphQLObjectType, GraphQLInterfaceType)):
# Ignore definitions that are not interfaces or types.
continue
for meta_field_name, meta_field in six.iteritems(EXTENDED_META_FIELD_DEFINITIONS):
if meta_field_name in type_obj.fields:
raise AssertionError(u'Unexpectedly encountered an existing field named {} while '
u'attempting to add a meta-field of the same name. Make sure '
u'you are not attempting to add meta-fields twice.'
.format(meta_field_name))
type_obj.fields[meta_field_name] = meta_field | [
"def",
"insert_meta_fields_into_existing_schema",
"(",
"graphql_schema",
")",
":",
"root_type_name",
"=",
"graphql_schema",
".",
"get_query_type",
"(",
")",
".",
"name",
"for",
"type_name",
",",
"type_obj",
"in",
"six",
".",
"iteritems",
"(",
"graphql_schema",
".",
... | Add compiler-specific meta-fields into all interfaces and types of the specified schema.
It is preferable to use the EXTENDED_META_FIELD_DEFINITIONS constant above to directly inject
the meta-fields during the initial process of building the schema, as that approach
is more robust. This function does its best to not mutate unexpected definitions, but
may break unexpectedly as the GraphQL standard is extended and the underlying
GraphQL library is updated.
Use this function at your own risk. Don't say you haven't been warned.
Properties added include:
- "_x_count", which allows filtering folds based on the number of elements they capture.
Args:
graphql_schema: GraphQLSchema object describing the schema that is going to be used with
the compiler. N.B.: MUTATED IN-PLACE in this method. | [
"Add",
"compiler",
"-",
"specific",
"meta",
"-",
"fields",
"into",
"all",
"interfaces",
"and",
"types",
"of",
"the",
"specified",
"schema",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema.py#L302-L338 | train | 227,812 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/context_helpers.py | validate_context_for_visiting_vertex_field | def validate_context_for_visiting_vertex_field(parent_location, vertex_field_name, context):
"""Ensure that the current context allows for visiting a vertex field."""
if is_in_fold_innermost_scope(context):
raise GraphQLCompilationError(
u'Traversing inside a @fold block after filtering on {} or outputting fields '
u'is not supported! Parent location: {}, vertex field name: {}'
.format(COUNT_META_FIELD_NAME, parent_location, vertex_field_name)) | python | def validate_context_for_visiting_vertex_field(parent_location, vertex_field_name, context):
"""Ensure that the current context allows for visiting a vertex field."""
if is_in_fold_innermost_scope(context):
raise GraphQLCompilationError(
u'Traversing inside a @fold block after filtering on {} or outputting fields '
u'is not supported! Parent location: {}, vertex field name: {}'
.format(COUNT_META_FIELD_NAME, parent_location, vertex_field_name)) | [
"def",
"validate_context_for_visiting_vertex_field",
"(",
"parent_location",
",",
"vertex_field_name",
",",
"context",
")",
":",
"if",
"is_in_fold_innermost_scope",
"(",
"context",
")",
":",
"raise",
"GraphQLCompilationError",
"(",
"u'Traversing inside a @fold block after filte... | Ensure that the current context allows for visiting a vertex field. | [
"Ensure",
"that",
"the",
"current",
"context",
"allows",
"for",
"visiting",
"a",
"vertex",
"field",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/context_helpers.py#L95-L101 | train | 227,813 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/graphql_formatting.py | pretty_print_graphql | def pretty_print_graphql(query, use_four_spaces=True):
"""Take a GraphQL query, pretty print it, and return it."""
# Use our custom visitor, which fixes directive argument order
# to get the canonical representation
output = visit(parse(query), CustomPrintingVisitor())
# Using four spaces for indentation makes it easier to edit in
# Python source files.
if use_four_spaces:
return fix_indentation_depth(output)
return output | python | def pretty_print_graphql(query, use_four_spaces=True):
"""Take a GraphQL query, pretty print it, and return it."""
# Use our custom visitor, which fixes directive argument order
# to get the canonical representation
output = visit(parse(query), CustomPrintingVisitor())
# Using four spaces for indentation makes it easier to edit in
# Python source files.
if use_four_spaces:
return fix_indentation_depth(output)
return output | [
"def",
"pretty_print_graphql",
"(",
"query",
",",
"use_four_spaces",
"=",
"True",
")",
":",
"# Use our custom visitor, which fixes directive argument order",
"# to get the canonical representation",
"output",
"=",
"visit",
"(",
"parse",
"(",
"query",
")",
",",
"CustomPrinti... | Take a GraphQL query, pretty print it, and return it. | [
"Take",
"a",
"GraphQL",
"query",
"pretty",
"print",
"it",
"and",
"return",
"it",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/graphql_formatting.py#L10-L20 | train | 227,814 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/graphql_formatting.py | fix_indentation_depth | def fix_indentation_depth(query):
"""Make indentation use 4 spaces, rather than the 2 spaces GraphQL normally uses."""
lines = query.split('\n')
final_lines = []
for line in lines:
consecutive_spaces = 0
for char in line:
if char == ' ':
consecutive_spaces += 1
else:
break
if consecutive_spaces % 2 != 0:
raise AssertionError(u'Indentation was not a multiple of two: '
u'{}'.format(consecutive_spaces))
final_lines.append((' ' * consecutive_spaces) + line[consecutive_spaces:])
return '\n'.join(final_lines) | python | def fix_indentation_depth(query):
"""Make indentation use 4 spaces, rather than the 2 spaces GraphQL normally uses."""
lines = query.split('\n')
final_lines = []
for line in lines:
consecutive_spaces = 0
for char in line:
if char == ' ':
consecutive_spaces += 1
else:
break
if consecutive_spaces % 2 != 0:
raise AssertionError(u'Indentation was not a multiple of two: '
u'{}'.format(consecutive_spaces))
final_lines.append((' ' * consecutive_spaces) + line[consecutive_spaces:])
return '\n'.join(final_lines) | [
"def",
"fix_indentation_depth",
"(",
"query",
")",
":",
"lines",
"=",
"query",
".",
"split",
"(",
"'\\n'",
")",
"final_lines",
"=",
"[",
"]",
"for",
"line",
"in",
"lines",
":",
"consecutive_spaces",
"=",
"0",
"for",
"char",
"in",
"line",
":",
"if",
"ch... | Make indentation use 4 spaces, rather than the 2 spaces GraphQL normally uses. | [
"Make",
"indentation",
"use",
"4",
"spaces",
"rather",
"than",
"the",
"2",
"spaces",
"GraphQL",
"normally",
"uses",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/graphql_formatting.py#L67-L86 | train | 227,815 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/graphql_formatting.py | CustomPrintingVisitor.leave_Directive | def leave_Directive(self, node, *args):
"""Call when exiting a directive node in the ast."""
name_to_arg_value = {
# Taking [0] is ok here because the GraphQL parser checks for the
# existence of ':' in directive arguments.
arg.split(':', 1)[0]: arg
for arg in node.arguments
}
ordered_args = node.arguments
directive = DIRECTIVES_BY_NAME.get(node.name)
if directive:
sorted_args = []
encountered_argument_names = set()
# Iterate through all defined arguments in the directive schema.
for defined_arg_name in six.iterkeys(directive.args):
if defined_arg_name in name_to_arg_value:
# The argument was present in the query, print it in the correct order.
encountered_argument_names.add(defined_arg_name)
sorted_args.append(name_to_arg_value[defined_arg_name])
# Get all the arguments that weren't defined in the directive schema.
# They will be printed after all the arguments that were in the schema.
unsorted_args = [
value
for name, value in six.iteritems(name_to_arg_value)
if name not in encountered_argument_names
]
ordered_args = sorted_args + unsorted_args
return '@' + node.name + wrap('(', join(ordered_args, ', '), ')') | python | def leave_Directive(self, node, *args):
"""Call when exiting a directive node in the ast."""
name_to_arg_value = {
# Taking [0] is ok here because the GraphQL parser checks for the
# existence of ':' in directive arguments.
arg.split(':', 1)[0]: arg
for arg in node.arguments
}
ordered_args = node.arguments
directive = DIRECTIVES_BY_NAME.get(node.name)
if directive:
sorted_args = []
encountered_argument_names = set()
# Iterate through all defined arguments in the directive schema.
for defined_arg_name in six.iterkeys(directive.args):
if defined_arg_name in name_to_arg_value:
# The argument was present in the query, print it in the correct order.
encountered_argument_names.add(defined_arg_name)
sorted_args.append(name_to_arg_value[defined_arg_name])
# Get all the arguments that weren't defined in the directive schema.
# They will be printed after all the arguments that were in the schema.
unsorted_args = [
value
for name, value in six.iteritems(name_to_arg_value)
if name not in encountered_argument_names
]
ordered_args = sorted_args + unsorted_args
return '@' + node.name + wrap('(', join(ordered_args, ', '), ')') | [
"def",
"leave_Directive",
"(",
"self",
",",
"node",
",",
"*",
"args",
")",
":",
"name_to_arg_value",
"=",
"{",
"# Taking [0] is ok here because the GraphQL parser checks for the",
"# existence of ':' in directive arguments.",
"arg",
".",
"split",
"(",
"':'",
",",
"1",
"... | Call when exiting a directive node in the ast. | [
"Call",
"when",
"exiting",
"a",
"directive",
"node",
"in",
"the",
"ast",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/graphql_formatting.py#L32-L64 | train | 227,816 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/__init__.py | lower_ir | def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None):
"""Lower the IR into an IR form that can be represented in MATCH queries.
Args:
ir_blocks: list of IR blocks to lower into MATCH-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
MatchQuery object containing the IR blocks organized in a MATCH-like structure
"""
sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table)
# Construct the mapping of each location to its corresponding GraphQL type.
location_types = {
location: location_info.type
for location, location_info in query_metadata_table.registered_locations
}
# Compute the set of all locations that have associated type coercions.
coerced_locations = {
location
for location, location_info in query_metadata_table.registered_locations
if location_info.coerced_from_type is not None
}
# Extract information for both simple and complex @optional traverses
location_to_optional_results = extract_optional_location_root_info(ir_blocks)
complex_optional_roots, location_to_optional_roots = location_to_optional_results
simple_optional_root_info = extract_simple_optional_location_info(
ir_blocks, complex_optional_roots, location_to_optional_roots)
ir_blocks = remove_end_optionals(ir_blocks)
# Append global operation block(s) to filter out incorrect results
# from simple optional match traverses (using a WHERE statement)
if len(simple_optional_root_info) > 0:
where_filter_predicate = construct_where_filter_predicate(
query_metadata_table, simple_optional_root_info)
ir_blocks.insert(-1, GlobalOperationsStart())
ir_blocks.insert(-1, Filter(where_filter_predicate))
# These lowering / optimization passes work on IR blocks.
ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table)
ir_blocks = optimize_boolean_expression_comparisons(ir_blocks)
ir_blocks = rewrite_binary_composition_inside_ternary_conditional(ir_blocks)
ir_blocks = merge_consecutive_filter_clauses(ir_blocks)
ir_blocks = lower_has_substring_binary_compositions(ir_blocks)
ir_blocks = orientdb_eval_scheduling.workaround_lowering_pass(ir_blocks, query_metadata_table)
# Here, we lower from raw IR blocks into a MatchQuery object.
# From this point on, the lowering / optimization passes work on the MatchQuery representation.
match_query = convert_to_match_query(ir_blocks)
match_query = lower_comparisons_to_between(match_query)
match_query = lower_backtrack_blocks(match_query, location_types)
match_query = truncate_repeated_single_step_traversals(match_query)
match_query = orientdb_class_with_while.workaround_type_coercions_in_recursions(match_query)
# Optimize and lower the IR blocks inside @fold scopes.
new_folds = {
key: merge_consecutive_filter_clauses(
remove_backtrack_blocks_from_fold(
lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks)
)
)
for key, folded_ir_blocks in six.iteritems(match_query.folds)
}
match_query = match_query._replace(folds=new_folds)
compound_match_query = convert_optional_traversals_to_compound_match_query(
match_query, complex_optional_roots, location_to_optional_roots)
compound_match_query = prune_non_existent_outputs(compound_match_query)
compound_match_query = collect_filters_to_first_location_occurrence(compound_match_query)
compound_match_query = lower_context_field_expressions(compound_match_query)
compound_match_query = truncate_repeated_single_step_traversals_in_sub_queries(
compound_match_query)
compound_match_query = orientdb_query_execution.expose_ideal_query_execution_start_points(
compound_match_query, location_types, coerced_locations)
return compound_match_query | python | def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None):
"""Lower the IR into an IR form that can be represented in MATCH queries.
Args:
ir_blocks: list of IR blocks to lower into MATCH-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
MatchQuery object containing the IR blocks organized in a MATCH-like structure
"""
sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table)
# Construct the mapping of each location to its corresponding GraphQL type.
location_types = {
location: location_info.type
for location, location_info in query_metadata_table.registered_locations
}
# Compute the set of all locations that have associated type coercions.
coerced_locations = {
location
for location, location_info in query_metadata_table.registered_locations
if location_info.coerced_from_type is not None
}
# Extract information for both simple and complex @optional traverses
location_to_optional_results = extract_optional_location_root_info(ir_blocks)
complex_optional_roots, location_to_optional_roots = location_to_optional_results
simple_optional_root_info = extract_simple_optional_location_info(
ir_blocks, complex_optional_roots, location_to_optional_roots)
ir_blocks = remove_end_optionals(ir_blocks)
# Append global operation block(s) to filter out incorrect results
# from simple optional match traverses (using a WHERE statement)
if len(simple_optional_root_info) > 0:
where_filter_predicate = construct_where_filter_predicate(
query_metadata_table, simple_optional_root_info)
ir_blocks.insert(-1, GlobalOperationsStart())
ir_blocks.insert(-1, Filter(where_filter_predicate))
# These lowering / optimization passes work on IR blocks.
ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table)
ir_blocks = optimize_boolean_expression_comparisons(ir_blocks)
ir_blocks = rewrite_binary_composition_inside_ternary_conditional(ir_blocks)
ir_blocks = merge_consecutive_filter_clauses(ir_blocks)
ir_blocks = lower_has_substring_binary_compositions(ir_blocks)
ir_blocks = orientdb_eval_scheduling.workaround_lowering_pass(ir_blocks, query_metadata_table)
# Here, we lower from raw IR blocks into a MatchQuery object.
# From this point on, the lowering / optimization passes work on the MatchQuery representation.
match_query = convert_to_match_query(ir_blocks)
match_query = lower_comparisons_to_between(match_query)
match_query = lower_backtrack_blocks(match_query, location_types)
match_query = truncate_repeated_single_step_traversals(match_query)
match_query = orientdb_class_with_while.workaround_type_coercions_in_recursions(match_query)
# Optimize and lower the IR blocks inside @fold scopes.
new_folds = {
key: merge_consecutive_filter_clauses(
remove_backtrack_blocks_from_fold(
lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks)
)
)
for key, folded_ir_blocks in six.iteritems(match_query.folds)
}
match_query = match_query._replace(folds=new_folds)
compound_match_query = convert_optional_traversals_to_compound_match_query(
match_query, complex_optional_roots, location_to_optional_roots)
compound_match_query = prune_non_existent_outputs(compound_match_query)
compound_match_query = collect_filters_to_first_location_occurrence(compound_match_query)
compound_match_query = lower_context_field_expressions(compound_match_query)
compound_match_query = truncate_repeated_single_step_traversals_in_sub_queries(
compound_match_query)
compound_match_query = orientdb_query_execution.expose_ideal_query_execution_start_points(
compound_match_query, location_types, coerced_locations)
return compound_match_query | [
"def",
"lower_ir",
"(",
"ir_blocks",
",",
"query_metadata_table",
",",
"type_equivalence_hints",
"=",
"None",
")",
":",
"sanity_check_ir_blocks_from_frontend",
"(",
"ir_blocks",
",",
"query_metadata_table",
")",
"# Construct the mapping of each location to its corresponding Graph... | Lower the IR into an IR form that can be represented in MATCH queries.
Args:
ir_blocks: list of IR blocks to lower into MATCH-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
MatchQuery object containing the IR blocks organized in a MATCH-like structure | [
"Lower",
"the",
"IR",
"into",
"an",
"IR",
"form",
"that",
"can",
"be",
"represented",
"in",
"MATCH",
"queries",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/__init__.py#L31-L128 | train | 227,817 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/utils.py | toposort_classes | def toposort_classes(classes):
"""Sort class metadatas so that a superclass is always before the subclass"""
def get_class_topolist(class_name, name_to_class, processed_classes, current_trace):
"""Return a topologically sorted list of this class's dependencies and class itself
Args:
class_name: string, name of the class to process
name_to_class: dict, class_name -> descriptor
processed_classes: set of strings, a set of classes that have already been processed
current_trace: list of strings, list of classes traversed during the recursion
Returns:
list of dicts, list of classes sorted in topological order
"""
# Check if this class has already been handled
if class_name in processed_classes:
return []
if class_name in current_trace:
raise AssertionError(
'Encountered self-reference in dependency chain of {}'.format(class_name))
cls = name_to_class[class_name]
# Collect the dependency classes
# These are bases and classes from linked properties
dependencies = _list_superclasses(cls)
# Recursively process linked edges
properties = cls['properties'] if 'properties' in cls else []
for prop in properties:
if 'linkedClass' in prop:
dependencies.append(prop['linkedClass'])
class_list = []
# Recursively process superclasses
current_trace.add(class_name)
for dependency in dependencies:
class_list.extend(get_class_topolist(
dependency, name_to_class, processed_classes, current_trace))
current_trace.remove(class_name)
# Do the bookkeeping
class_list.append(name_to_class[class_name])
processed_classes.add(class_name)
return class_list
# Map names to classes
class_map = {c['name']: c for c in classes}
seen_classes = set()
toposorted = []
for name in class_map.keys():
toposorted.extend(get_class_topolist(name, class_map, seen_classes, set()))
return toposorted | python | def toposort_classes(classes):
"""Sort class metadatas so that a superclass is always before the subclass"""
def get_class_topolist(class_name, name_to_class, processed_classes, current_trace):
"""Return a topologically sorted list of this class's dependencies and class itself
Args:
class_name: string, name of the class to process
name_to_class: dict, class_name -> descriptor
processed_classes: set of strings, a set of classes that have already been processed
current_trace: list of strings, list of classes traversed during the recursion
Returns:
list of dicts, list of classes sorted in topological order
"""
# Check if this class has already been handled
if class_name in processed_classes:
return []
if class_name in current_trace:
raise AssertionError(
'Encountered self-reference in dependency chain of {}'.format(class_name))
cls = name_to_class[class_name]
# Collect the dependency classes
# These are bases and classes from linked properties
dependencies = _list_superclasses(cls)
# Recursively process linked edges
properties = cls['properties'] if 'properties' in cls else []
for prop in properties:
if 'linkedClass' in prop:
dependencies.append(prop['linkedClass'])
class_list = []
# Recursively process superclasses
current_trace.add(class_name)
for dependency in dependencies:
class_list.extend(get_class_topolist(
dependency, name_to_class, processed_classes, current_trace))
current_trace.remove(class_name)
# Do the bookkeeping
class_list.append(name_to_class[class_name])
processed_classes.add(class_name)
return class_list
# Map names to classes
class_map = {c['name']: c for c in classes}
seen_classes = set()
toposorted = []
for name in class_map.keys():
toposorted.extend(get_class_topolist(name, class_map, seen_classes, set()))
return toposorted | [
"def",
"toposort_classes",
"(",
"classes",
")",
":",
"def",
"get_class_topolist",
"(",
"class_name",
",",
"name_to_class",
",",
"processed_classes",
",",
"current_trace",
")",
":",
"\"\"\"Return a topologically sorted list of this class's dependencies and class itself\n\n ... | Sort class metadatas so that a superclass is always before the subclass | [
"Sort",
"class",
"metadatas",
"so",
"that",
"a",
"superclass",
"is",
"always",
"before",
"the",
"subclass"
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/utils.py#L11-L63 | train | 227,818 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/utils.py | _list_superclasses | def _list_superclasses(class_def):
"""Return a list of the superclasses of the given class"""
superclasses = class_def.get('superClasses', [])
if superclasses:
# Make sure to duplicate the list
return list(superclasses)
sup = class_def.get('superClass', None)
if sup:
return [sup]
else:
return [] | python | def _list_superclasses(class_def):
"""Return a list of the superclasses of the given class"""
superclasses = class_def.get('superClasses', [])
if superclasses:
# Make sure to duplicate the list
return list(superclasses)
sup = class_def.get('superClass', None)
if sup:
return [sup]
else:
return [] | [
"def",
"_list_superclasses",
"(",
"class_def",
")",
":",
"superclasses",
"=",
"class_def",
".",
"get",
"(",
"'superClasses'",
",",
"[",
"]",
")",
"if",
"superclasses",
":",
"# Make sure to duplicate the list",
"return",
"list",
"(",
"superclasses",
")",
"sup",
"... | Return a list of the superclasses of the given class | [
"Return",
"a",
"list",
"of",
"the",
"superclasses",
"of",
"the",
"given",
"class"
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/utils.py#L66-L77 | train | 227,819 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _construct_location_stack_entry | def _construct_location_stack_entry(location, num_traverses):
"""Return a LocationStackEntry namedtuple with the specified parameters."""
if not isinstance(num_traverses, int) or num_traverses < 0:
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid '
u'value for "num_traverses" {}. This is not allowed.'
.format(num_traverses))
if not isinstance(location, Location):
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid '
u'value for "location" {}. This is not allowed.'
.format(location))
return LocationStackEntry(location=location, num_traverses=num_traverses) | python | def _construct_location_stack_entry(location, num_traverses):
"""Return a LocationStackEntry namedtuple with the specified parameters."""
if not isinstance(num_traverses, int) or num_traverses < 0:
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid '
u'value for "num_traverses" {}. This is not allowed.'
.format(num_traverses))
if not isinstance(location, Location):
raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid '
u'value for "location" {}. This is not allowed.'
.format(location))
return LocationStackEntry(location=location, num_traverses=num_traverses) | [
"def",
"_construct_location_stack_entry",
"(",
"location",
",",
"num_traverses",
")",
":",
"if",
"not",
"isinstance",
"(",
"num_traverses",
",",
"int",
")",
"or",
"num_traverses",
"<",
"0",
":",
"raise",
"AssertionError",
"(",
"u'Attempted to create a LocationStackEnt... | Return a LocationStackEntry namedtuple with the specified parameters. | [
"Return",
"a",
"LocationStackEntry",
"namedtuple",
"with",
"the",
"specified",
"parameters",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L103-L113 | train | 227,820 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _get_fields | def _get_fields(ast):
"""Return a list of vertex fields, and a list of property fields, for the given AST node.
Also verifies that all property fields for the AST node appear before all vertex fields,
raising GraphQLCompilationError if that is not the case.
Args:
ast: GraphQL AST node, obtained from the graphql library
Returns:
tuple of two lists
- the first list contains ASTs for vertex fields
- the second list contains ASTs for property fields
"""
if not ast.selection_set:
# There are no child fields.
return [], []
property_fields = []
vertex_fields = []
seen_field_names = set()
switched_to_vertices = False # Ensures that all property fields are before all vertex fields.
for field_ast in ast.selection_set.selections:
if not isinstance(field_ast, Field):
# We are getting Fields only, ignore everything else.
continue
name = get_ast_field_name(field_ast)
if name in seen_field_names:
# If we ever allow repeated field names,
# then we have to change the Location naming scheme to reflect the repetitions
# and disambiguate between Recurse and Traverse visits to a Location.
raise GraphQLCompilationError(u'Encountered repeated field name: {}'.format(name))
seen_field_names.add(name)
# Vertex fields start with 'out_' or 'in_', denoting the edge direction to that vertex.
if is_vertex_field_name(name):
switched_to_vertices = True
vertex_fields.append(field_ast)
else:
if switched_to_vertices:
raise GraphQLCompilationError(u'Encountered property field {} '
u'after vertex fields!'.format(name))
property_fields.append(field_ast)
return vertex_fields, property_fields | python | def _get_fields(ast):
"""Return a list of vertex fields, and a list of property fields, for the given AST node.
Also verifies that all property fields for the AST node appear before all vertex fields,
raising GraphQLCompilationError if that is not the case.
Args:
ast: GraphQL AST node, obtained from the graphql library
Returns:
tuple of two lists
- the first list contains ASTs for vertex fields
- the second list contains ASTs for property fields
"""
if not ast.selection_set:
# There are no child fields.
return [], []
property_fields = []
vertex_fields = []
seen_field_names = set()
switched_to_vertices = False # Ensures that all property fields are before all vertex fields.
for field_ast in ast.selection_set.selections:
if not isinstance(field_ast, Field):
# We are getting Fields only, ignore everything else.
continue
name = get_ast_field_name(field_ast)
if name in seen_field_names:
# If we ever allow repeated field names,
# then we have to change the Location naming scheme to reflect the repetitions
# and disambiguate between Recurse and Traverse visits to a Location.
raise GraphQLCompilationError(u'Encountered repeated field name: {}'.format(name))
seen_field_names.add(name)
# Vertex fields start with 'out_' or 'in_', denoting the edge direction to that vertex.
if is_vertex_field_name(name):
switched_to_vertices = True
vertex_fields.append(field_ast)
else:
if switched_to_vertices:
raise GraphQLCompilationError(u'Encountered property field {} '
u'after vertex fields!'.format(name))
property_fields.append(field_ast)
return vertex_fields, property_fields | [
"def",
"_get_fields",
"(",
"ast",
")",
":",
"if",
"not",
"ast",
".",
"selection_set",
":",
"# There are no child fields.",
"return",
"[",
"]",
",",
"[",
"]",
"property_fields",
"=",
"[",
"]",
"vertex_fields",
"=",
"[",
"]",
"seen_field_names",
"=",
"set",
... | Return a list of vertex fields, and a list of property fields, for the given AST node.
Also verifies that all property fields for the AST node appear before all vertex fields,
raising GraphQLCompilationError if that is not the case.
Args:
ast: GraphQL AST node, obtained from the graphql library
Returns:
tuple of two lists
- the first list contains ASTs for vertex fields
- the second list contains ASTs for property fields | [
"Return",
"a",
"list",
"of",
"vertex",
"fields",
"and",
"a",
"list",
"of",
"property",
"fields",
"for",
"the",
"given",
"AST",
"node",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L142-L187 | train | 227,821 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _get_inline_fragment | def _get_inline_fragment(ast):
"""Return the inline fragment at the current AST node, or None if no fragment exists."""
if not ast.selection_set:
# There is nothing selected here, so no fragment.
return None
fragments = [
ast_node
for ast_node in ast.selection_set.selections
if isinstance(ast_node, InlineFragment)
]
if not fragments:
return None
if len(fragments) > 1:
raise GraphQLCompilationError(u'Cannot compile GraphQL with more than one fragment in '
u'a given selection set.')
return fragments[0] | python | def _get_inline_fragment(ast):
"""Return the inline fragment at the current AST node, or None if no fragment exists."""
if not ast.selection_set:
# There is nothing selected here, so no fragment.
return None
fragments = [
ast_node
for ast_node in ast.selection_set.selections
if isinstance(ast_node, InlineFragment)
]
if not fragments:
return None
if len(fragments) > 1:
raise GraphQLCompilationError(u'Cannot compile GraphQL with more than one fragment in '
u'a given selection set.')
return fragments[0] | [
"def",
"_get_inline_fragment",
"(",
"ast",
")",
":",
"if",
"not",
"ast",
".",
"selection_set",
":",
"# There is nothing selected here, so no fragment.",
"return",
"None",
"fragments",
"=",
"[",
"ast_node",
"for",
"ast_node",
"in",
"ast",
".",
"selection_set",
".",
... | Return the inline fragment at the current AST node, or None if no fragment exists. | [
"Return",
"the",
"inline",
"fragment",
"at",
"the",
"current",
"AST",
"node",
"or",
"None",
"if",
"no",
"fragment",
"exists",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L190-L209 | train | 227,822 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _process_output_source_directive | def _process_output_source_directive(schema, current_schema_type, ast,
location, context, local_unique_directives):
"""Process the output_source directive, modifying the context as appropriate.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: GraphQL AST node, obtained from the graphql library
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
local_unique_directives: dict, directive name string -> directive object, containing
unique directives present on the current AST node *only*
Returns:
an OutputSource block, if one should be emitted, or None otherwise
"""
# The 'ast' variable is only for function signature uniformity, and is currently not used.
output_source_directive = local_unique_directives.get('output_source', None)
if output_source_directive:
if has_encountered_output_source(context):
raise GraphQLCompilationError(u'Cannot have more than one output source!')
if is_in_optional_scope(context):
raise GraphQLCompilationError(u'Cannot have the output source in an optional block!')
set_output_source_data(context, location)
return blocks.OutputSource()
else:
return None | python | def _process_output_source_directive(schema, current_schema_type, ast,
location, context, local_unique_directives):
"""Process the output_source directive, modifying the context as appropriate.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: GraphQL AST node, obtained from the graphql library
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
local_unique_directives: dict, directive name string -> directive object, containing
unique directives present on the current AST node *only*
Returns:
an OutputSource block, if one should be emitted, or None otherwise
"""
# The 'ast' variable is only for function signature uniformity, and is currently not used.
output_source_directive = local_unique_directives.get('output_source', None)
if output_source_directive:
if has_encountered_output_source(context):
raise GraphQLCompilationError(u'Cannot have more than one output source!')
if is_in_optional_scope(context):
raise GraphQLCompilationError(u'Cannot have the output source in an optional block!')
set_output_source_data(context, location)
return blocks.OutputSource()
else:
return None | [
"def",
"_process_output_source_directive",
"(",
"schema",
",",
"current_schema_type",
",",
"ast",
",",
"location",
",",
"context",
",",
"local_unique_directives",
")",
":",
"# The 'ast' variable is only for function signature uniformity, and is currently not used.",
"output_source_... | Process the output_source directive, modifying the context as appropriate.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: GraphQL AST node, obtained from the graphql library
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
local_unique_directives: dict, directive name string -> directive object, containing
unique directives present on the current AST node *only*
Returns:
an OutputSource block, if one should be emitted, or None otherwise | [
"Process",
"the",
"output_source",
"directive",
"modifying",
"the",
"context",
"as",
"appropriate",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L217-L244 | train | 227,823 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _compile_property_ast | def _compile_property_ast(schema, current_schema_type, ast, location,
context, unique_local_directives):
"""Process property directives at this AST node, updating the query context as appropriate.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: GraphQL AST node, obtained from the graphql library. Only for function signature
uniformity at the moment -- it is currently not used.
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
unique_local_directives: dict, directive name string -> directive object, containing
unique directives present on the current AST node *only*
"""
validate_property_directives(unique_local_directives)
if location.field == COUNT_META_FIELD_NAME:
# Verify that uses of this field are within a @fold scope.
if not is_in_fold_scope(context):
raise GraphQLCompilationError(u'Cannot use the "{}" meta field when not within a @fold '
u'vertex field, as counting elements only makes sense '
u'in a fold. Location: {}'
.format(COUNT_META_FIELD_NAME, location))
# step P-2: process property-only directives
tag_directive = unique_local_directives.get('tag', None)
if tag_directive:
if is_in_fold_scope(context):
raise GraphQLCompilationError(u'Tagging values within a @fold vertex field is '
u'not allowed! Location: {}'.format(location))
if location.field == COUNT_META_FIELD_NAME:
raise AssertionError(u'Tags are prohibited within @fold, but unexpectedly found use of '
u'a tag on the {} meta field that is only allowed within a @fold!'
u'Location: {}'
.format(COUNT_META_FIELD_NAME, location))
# Schema validation has ensured that the fields below exist.
tag_name = tag_directive.arguments[0].value.value
if tag_name in context['tags']:
raise GraphQLCompilationError(u'Cannot reuse tag name: {}'.format(tag_name))
validate_safe_string(tag_name)
context['tags'][tag_name] = {
'location': location,
'optional': is_in_optional_scope(context),
'type': strip_non_null_from_type(current_schema_type),
}
context['metadata'].record_tag_info(tag_name, TagInfo(location=location))
output_directive = unique_local_directives.get('output', None)
if output_directive:
# Schema validation has ensured that the fields below exist.
output_name = output_directive.arguments[0].value.value
if output_name in context['outputs']:
raise GraphQLCompilationError(u'Cannot reuse output name: '
u'{}, {}'.format(output_name, context))
validate_safe_string(output_name)
validate_output_name(output_name)
graphql_type = strip_non_null_from_type(current_schema_type)
if is_in_fold_scope(context):
# Fold outputs are only allowed at the last level of traversal.
set_fold_innermost_scope(context)
if location.field != COUNT_META_FIELD_NAME:
graphql_type = GraphQLList(graphql_type)
context['outputs'][output_name] = {
'location': location,
'optional': is_in_optional_scope(context),
'type': graphql_type,
'fold': context.get('fold', None),
} | python | def _compile_property_ast(schema, current_schema_type, ast, location,
context, unique_local_directives):
"""Process property directives at this AST node, updating the query context as appropriate.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: GraphQL AST node, obtained from the graphql library. Only for function signature
uniformity at the moment -- it is currently not used.
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
unique_local_directives: dict, directive name string -> directive object, containing
unique directives present on the current AST node *only*
"""
validate_property_directives(unique_local_directives)
if location.field == COUNT_META_FIELD_NAME:
# Verify that uses of this field are within a @fold scope.
if not is_in_fold_scope(context):
raise GraphQLCompilationError(u'Cannot use the "{}" meta field when not within a @fold '
u'vertex field, as counting elements only makes sense '
u'in a fold. Location: {}'
.format(COUNT_META_FIELD_NAME, location))
# step P-2: process property-only directives
tag_directive = unique_local_directives.get('tag', None)
if tag_directive:
if is_in_fold_scope(context):
raise GraphQLCompilationError(u'Tagging values within a @fold vertex field is '
u'not allowed! Location: {}'.format(location))
if location.field == COUNT_META_FIELD_NAME:
raise AssertionError(u'Tags are prohibited within @fold, but unexpectedly found use of '
u'a tag on the {} meta field that is only allowed within a @fold!'
u'Location: {}'
.format(COUNT_META_FIELD_NAME, location))
# Schema validation has ensured that the fields below exist.
tag_name = tag_directive.arguments[0].value.value
if tag_name in context['tags']:
raise GraphQLCompilationError(u'Cannot reuse tag name: {}'.format(tag_name))
validate_safe_string(tag_name)
context['tags'][tag_name] = {
'location': location,
'optional': is_in_optional_scope(context),
'type': strip_non_null_from_type(current_schema_type),
}
context['metadata'].record_tag_info(tag_name, TagInfo(location=location))
output_directive = unique_local_directives.get('output', None)
if output_directive:
# Schema validation has ensured that the fields below exist.
output_name = output_directive.arguments[0].value.value
if output_name in context['outputs']:
raise GraphQLCompilationError(u'Cannot reuse output name: '
u'{}, {}'.format(output_name, context))
validate_safe_string(output_name)
validate_output_name(output_name)
graphql_type = strip_non_null_from_type(current_schema_type)
if is_in_fold_scope(context):
# Fold outputs are only allowed at the last level of traversal.
set_fold_innermost_scope(context)
if location.field != COUNT_META_FIELD_NAME:
graphql_type = GraphQLList(graphql_type)
context['outputs'][output_name] = {
'location': location,
'optional': is_in_optional_scope(context),
'type': graphql_type,
'fold': context.get('fold', None),
} | [
"def",
"_compile_property_ast",
"(",
"schema",
",",
"current_schema_type",
",",
"ast",
",",
"location",
",",
"context",
",",
"unique_local_directives",
")",
":",
"validate_property_directives",
"(",
"unique_local_directives",
")",
"if",
"location",
".",
"field",
"==",... | Process property directives at this AST node, updating the query context as appropriate.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: GraphQL AST node, obtained from the graphql library. Only for function signature
uniformity at the moment -- it is currently not used.
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
unique_local_directives: dict, directive name string -> directive object, containing
unique directives present on the current AST node *only* | [
"Process",
"property",
"directives",
"at",
"this",
"AST",
"node",
"updating",
"the",
"query",
"context",
"as",
"appropriate",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L247-L320 | train | 227,824 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _get_recurse_directive_depth | def _get_recurse_directive_depth(field_name, field_directives):
"""Validate and return the depth parameter of the recurse directive."""
recurse_directive = field_directives['recurse']
optional_directive = field_directives.get('optional', None)
if optional_directive:
raise GraphQLCompilationError(u'Found both @optional and @recurse on '
u'the same vertex field: {}'.format(field_name))
recurse_args = get_uniquely_named_objects_by_name(recurse_directive.arguments)
recurse_depth = int(recurse_args['depth'].value.value)
if recurse_depth < 1:
raise GraphQLCompilationError(u'Found recurse directive with disallowed depth: '
u'{}'.format(recurse_depth))
return recurse_depth | python | def _get_recurse_directive_depth(field_name, field_directives):
"""Validate and return the depth parameter of the recurse directive."""
recurse_directive = field_directives['recurse']
optional_directive = field_directives.get('optional', None)
if optional_directive:
raise GraphQLCompilationError(u'Found both @optional and @recurse on '
u'the same vertex field: {}'.format(field_name))
recurse_args = get_uniquely_named_objects_by_name(recurse_directive.arguments)
recurse_depth = int(recurse_args['depth'].value.value)
if recurse_depth < 1:
raise GraphQLCompilationError(u'Found recurse directive with disallowed depth: '
u'{}'.format(recurse_depth))
return recurse_depth | [
"def",
"_get_recurse_directive_depth",
"(",
"field_name",
",",
"field_directives",
")",
":",
"recurse_directive",
"=",
"field_directives",
"[",
"'recurse'",
"]",
"optional_directive",
"=",
"field_directives",
".",
"get",
"(",
"'optional'",
",",
"None",
")",
"if",
"o... | Validate and return the depth parameter of the recurse directive. | [
"Validate",
"and",
"return",
"the",
"depth",
"parameter",
"of",
"the",
"recurse",
"directive",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L323-L338 | train | 227,825 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _validate_recurse_directive_types | def _validate_recurse_directive_types(current_schema_type, field_schema_type, context):
"""Perform type checks on the enclosing type and the recursed type for a recurse directive.
Args:
current_schema_type: GraphQLType, the schema type at the current location
field_schema_type: GraphQLType, the schema type at the inner scope
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
"""
# Get the set of all allowed types in the current scope.
type_hints = context['type_equivalence_hints'].get(field_schema_type)
type_hints_inverse = context['type_equivalence_hints_inverse'].get(field_schema_type)
allowed_current_types = {field_schema_type}
if type_hints and isinstance(type_hints, GraphQLUnionType):
allowed_current_types.update(type_hints.types)
if type_hints_inverse and isinstance(type_hints_inverse, GraphQLUnionType):
allowed_current_types.update(type_hints_inverse.types)
# The current scope must be of the same type as the field scope, or an acceptable subtype.
current_scope_is_allowed = current_schema_type in allowed_current_types
is_implemented_interface = (
isinstance(field_schema_type, GraphQLInterfaceType) and
isinstance(current_schema_type, GraphQLObjectType) and
field_schema_type in current_schema_type.interfaces
)
if not any((current_scope_is_allowed, is_implemented_interface)):
raise GraphQLCompilationError(u'Edges expanded with a @recurse directive must either '
u'be of the same type as their enclosing scope, a supertype '
u'of the enclosing scope, or be of an interface type that is '
u'implemented by the type of their enclosing scope. '
u'Enclosing scope type: {}, edge type: '
u'{}'.format(current_schema_type, field_schema_type)) | python | def _validate_recurse_directive_types(current_schema_type, field_schema_type, context):
"""Perform type checks on the enclosing type and the recursed type for a recurse directive.
Args:
current_schema_type: GraphQLType, the schema type at the current location
field_schema_type: GraphQLType, the schema type at the inner scope
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
"""
# Get the set of all allowed types in the current scope.
type_hints = context['type_equivalence_hints'].get(field_schema_type)
type_hints_inverse = context['type_equivalence_hints_inverse'].get(field_schema_type)
allowed_current_types = {field_schema_type}
if type_hints and isinstance(type_hints, GraphQLUnionType):
allowed_current_types.update(type_hints.types)
if type_hints_inverse and isinstance(type_hints_inverse, GraphQLUnionType):
allowed_current_types.update(type_hints_inverse.types)
# The current scope must be of the same type as the field scope, or an acceptable subtype.
current_scope_is_allowed = current_schema_type in allowed_current_types
is_implemented_interface = (
isinstance(field_schema_type, GraphQLInterfaceType) and
isinstance(current_schema_type, GraphQLObjectType) and
field_schema_type in current_schema_type.interfaces
)
if not any((current_scope_is_allowed, is_implemented_interface)):
raise GraphQLCompilationError(u'Edges expanded with a @recurse directive must either '
u'be of the same type as their enclosing scope, a supertype '
u'of the enclosing scope, or be of an interface type that is '
u'implemented by the type of their enclosing scope. '
u'Enclosing scope type: {}, edge type: '
u'{}'.format(current_schema_type, field_schema_type)) | [
"def",
"_validate_recurse_directive_types",
"(",
"current_schema_type",
",",
"field_schema_type",
",",
"context",
")",
":",
"# Get the set of all allowed types in the current scope.",
"type_hints",
"=",
"context",
"[",
"'type_equivalence_hints'",
"]",
".",
"get",
"(",
"field_... | Perform type checks on the enclosing type and the recursed type for a recurse directive.
Args:
current_schema_type: GraphQLType, the schema type at the current location
field_schema_type: GraphQLType, the schema type at the inner scope
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function! | [
"Perform",
"type",
"checks",
"on",
"the",
"enclosing",
"type",
"and",
"the",
"recursed",
"type",
"for",
"a",
"recurse",
"directive",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L341-L376 | train | 227,826 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _compile_fragment_ast | def _compile_fragment_ast(schema, current_schema_type, ast, location, context):
"""Return a list of basic blocks corresponding to the inline fragment at this AST node.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: GraphQL AST node, obtained from the graphql library.
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
Returns:
list of basic blocks, the compiled output of the vertex AST node
"""
query_metadata_table = context['metadata']
# step F-2. Emit a type coercion block if appropriate,
# then recurse into the fragment's selection.
coerces_to_type_name = ast.type_condition.name.value
coerces_to_type_obj = schema.get_type(coerces_to_type_name)
basic_blocks = []
# Check if the coercion is necessary.
# No coercion is necessary if coercing to the current type of the scope,
# or if the scope is of union type, to the base type of the union as defined by
# the type_equivalence_hints compilation parameter.
is_same_type_as_scope = current_schema_type.is_same_type(coerces_to_type_obj)
equivalent_union_type = context['type_equivalence_hints'].get(coerces_to_type_obj, None)
is_base_type_of_union = (
isinstance(current_schema_type, GraphQLUnionType) and
current_schema_type.is_same_type(equivalent_union_type)
)
if not (is_same_type_as_scope or is_base_type_of_union):
# Coercion is required.
query_metadata_table.record_coercion_at_location(location, coerces_to_type_obj)
basic_blocks.append(blocks.CoerceType({coerces_to_type_name}))
inner_basic_blocks = _compile_ast_node_to_ir(
schema, coerces_to_type_obj, ast, location, context)
basic_blocks.extend(inner_basic_blocks)
return basic_blocks | python | def _compile_fragment_ast(schema, current_schema_type, ast, location, context):
"""Return a list of basic blocks corresponding to the inline fragment at this AST node.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: GraphQL AST node, obtained from the graphql library.
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
Returns:
list of basic blocks, the compiled output of the vertex AST node
"""
query_metadata_table = context['metadata']
# step F-2. Emit a type coercion block if appropriate,
# then recurse into the fragment's selection.
coerces_to_type_name = ast.type_condition.name.value
coerces_to_type_obj = schema.get_type(coerces_to_type_name)
basic_blocks = []
# Check if the coercion is necessary.
# No coercion is necessary if coercing to the current type of the scope,
# or if the scope is of union type, to the base type of the union as defined by
# the type_equivalence_hints compilation parameter.
is_same_type_as_scope = current_schema_type.is_same_type(coerces_to_type_obj)
equivalent_union_type = context['type_equivalence_hints'].get(coerces_to_type_obj, None)
is_base_type_of_union = (
isinstance(current_schema_type, GraphQLUnionType) and
current_schema_type.is_same_type(equivalent_union_type)
)
if not (is_same_type_as_scope or is_base_type_of_union):
# Coercion is required.
query_metadata_table.record_coercion_at_location(location, coerces_to_type_obj)
basic_blocks.append(blocks.CoerceType({coerces_to_type_name}))
inner_basic_blocks = _compile_ast_node_to_ir(
schema, coerces_to_type_obj, ast, location, context)
basic_blocks.extend(inner_basic_blocks)
return basic_blocks | [
"def",
"_compile_fragment_ast",
"(",
"schema",
",",
"current_schema_type",
",",
"ast",
",",
"location",
",",
"context",
")",
":",
"query_metadata_table",
"=",
"context",
"[",
"'metadata'",
"]",
"# step F-2. Emit a type coercion block if appropriate,",
"# then recu... | Return a list of basic blocks corresponding to the inline fragment at this AST node.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: GraphQL AST node, obtained from the graphql library.
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
Returns:
list of basic blocks, the compiled output of the vertex AST node | [
"Return",
"a",
"list",
"of",
"basic",
"blocks",
"corresponding",
"to",
"the",
"inline",
"fragment",
"at",
"this",
"AST",
"node",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L583-L626 | train | 227,827 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _compile_ast_node_to_ir | def _compile_ast_node_to_ir(schema, current_schema_type, ast, location, context):
"""Compile the given GraphQL AST node into a list of basic blocks.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: the current GraphQL AST node, obtained from the graphql library
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
Returns:
list of basic blocks corresponding to this GraphQL AST node
"""
basic_blocks = []
# step 0: preprocessing
local_unique_directives = get_unique_directives(ast)
fields = _get_fields(ast)
vertex_fields, property_fields = fields
fragment = _get_inline_fragment(ast)
filter_operations = get_local_filter_directives(
ast, current_schema_type, vertex_fields)
# We don't support type coercion while at the same time selecting fields.
# Either there are no fields, or there is no fragment, otherwise we raise a compilation error.
fragment_exists = fragment is not None
fields_exist = vertex_fields or property_fields
if fragment_exists and fields_exist:
raise GraphQLCompilationError(u'Cannot compile GraphQL that has inline fragment and '
u'selected fields in the same selection. Please move the '
u'selected fields inside the inline fragment.')
if location.field is not None: # we're at a property field
# sanity-check: cannot have an inline fragment at a property field
if fragment_exists:
raise AssertionError(u'Found inline fragment at a property field: '
u'{} {}'.format(location, fragment))
# sanity-check: locations at properties don't have their own property locations
if len(property_fields) > 0:
raise AssertionError(u'Found property fields on a property field: '
u'{} {}'.format(location, property_fields))
# step 1: apply local filter, if any
for filter_operation_info in filter_operations:
filter_block = process_filter_directive(filter_operation_info, location, context)
if isinstance(location, FoldScopeLocation) and location.field == COUNT_META_FIELD_NAME:
# Filtering on the fold count field is only allowed at the innermost scope of a fold.
set_fold_innermost_scope(context)
# This Filter is going in the global operations section of the query, so it cannot
# use LocalField expressions since there is no "local" location to use.
# Rewrite it so that all references of data at a location instead use ContextFields.
expected_field = expressions.LocalField(COUNT_META_FIELD_NAME)
replacement_field = expressions.FoldedContextField(location, GraphQLInt)
visitor_fn = expressions.make_replacement_visitor(expected_field, replacement_field)
filter_block = filter_block.visit_and_update_expressions(visitor_fn)
visitor_fn = expressions.make_type_replacement_visitor(
expressions.ContextField,
lambda context_field: expressions.GlobalContextField(
context_field.location, context_field.field_type))
filter_block = filter_block.visit_and_update_expressions(visitor_fn)
set_fold_count_filter(context)
context['global_filters'].append(filter_block)
else:
basic_blocks.append(filter_block)
if location.field is not None:
# The location is at a property, compile the property data following P-steps.
_compile_property_ast(schema, current_schema_type, ast,
location, context, local_unique_directives)
else:
# The location is at a vertex.
if fragment_exists:
# Compile the fragment data following F-steps.
# N.B.: Note that the "fragment" variable is the fragment's AST. Since we've asserted
# that the fragment is the only part of the selection set at the current AST node,
# we pass the "fragment" in the AST parameter of the _compile_fragment_ast()
# function, rather than the current AST node as in the other compilation steps.
basic_blocks.extend(
_compile_fragment_ast(schema, current_schema_type, fragment, location, context))
else:
# Compile the vertex data following V-steps.
basic_blocks.extend(
_compile_vertex_ast(schema, current_schema_type, ast,
location, context, local_unique_directives, fields))
return basic_blocks | python | def _compile_ast_node_to_ir(schema, current_schema_type, ast, location, context):
"""Compile the given GraphQL AST node into a list of basic blocks.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: the current GraphQL AST node, obtained from the graphql library
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
Returns:
list of basic blocks corresponding to this GraphQL AST node
"""
basic_blocks = []
# step 0: preprocessing
local_unique_directives = get_unique_directives(ast)
fields = _get_fields(ast)
vertex_fields, property_fields = fields
fragment = _get_inline_fragment(ast)
filter_operations = get_local_filter_directives(
ast, current_schema_type, vertex_fields)
# We don't support type coercion while at the same time selecting fields.
# Either there are no fields, or there is no fragment, otherwise we raise a compilation error.
fragment_exists = fragment is not None
fields_exist = vertex_fields or property_fields
if fragment_exists and fields_exist:
raise GraphQLCompilationError(u'Cannot compile GraphQL that has inline fragment and '
u'selected fields in the same selection. Please move the '
u'selected fields inside the inline fragment.')
if location.field is not None: # we're at a property field
# sanity-check: cannot have an inline fragment at a property field
if fragment_exists:
raise AssertionError(u'Found inline fragment at a property field: '
u'{} {}'.format(location, fragment))
# sanity-check: locations at properties don't have their own property locations
if len(property_fields) > 0:
raise AssertionError(u'Found property fields on a property field: '
u'{} {}'.format(location, property_fields))
# step 1: apply local filter, if any
for filter_operation_info in filter_operations:
filter_block = process_filter_directive(filter_operation_info, location, context)
if isinstance(location, FoldScopeLocation) and location.field == COUNT_META_FIELD_NAME:
# Filtering on the fold count field is only allowed at the innermost scope of a fold.
set_fold_innermost_scope(context)
# This Filter is going in the global operations section of the query, so it cannot
# use LocalField expressions since there is no "local" location to use.
# Rewrite it so that all references of data at a location instead use ContextFields.
expected_field = expressions.LocalField(COUNT_META_FIELD_NAME)
replacement_field = expressions.FoldedContextField(location, GraphQLInt)
visitor_fn = expressions.make_replacement_visitor(expected_field, replacement_field)
filter_block = filter_block.visit_and_update_expressions(visitor_fn)
visitor_fn = expressions.make_type_replacement_visitor(
expressions.ContextField,
lambda context_field: expressions.GlobalContextField(
context_field.location, context_field.field_type))
filter_block = filter_block.visit_and_update_expressions(visitor_fn)
set_fold_count_filter(context)
context['global_filters'].append(filter_block)
else:
basic_blocks.append(filter_block)
if location.field is not None:
# The location is at a property, compile the property data following P-steps.
_compile_property_ast(schema, current_schema_type, ast,
location, context, local_unique_directives)
else:
# The location is at a vertex.
if fragment_exists:
# Compile the fragment data following F-steps.
# N.B.: Note that the "fragment" variable is the fragment's AST. Since we've asserted
# that the fragment is the only part of the selection set at the current AST node,
# we pass the "fragment" in the AST parameter of the _compile_fragment_ast()
# function, rather than the current AST node as in the other compilation steps.
basic_blocks.extend(
_compile_fragment_ast(schema, current_schema_type, fragment, location, context))
else:
# Compile the vertex data following V-steps.
basic_blocks.extend(
_compile_vertex_ast(schema, current_schema_type, ast,
location, context, local_unique_directives, fields))
return basic_blocks | [
"def",
"_compile_ast_node_to_ir",
"(",
"schema",
",",
"current_schema_type",
",",
"ast",
",",
"location",
",",
"context",
")",
":",
"basic_blocks",
"=",
"[",
"]",
"# step 0: preprocessing",
"local_unique_directives",
"=",
"get_unique_directives",
"(",
"ast",
")",
"f... | Compile the given GraphQL AST node into a list of basic blocks.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: the current GraphQL AST node, obtained from the graphql library
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
Returns:
list of basic blocks corresponding to this GraphQL AST node | [
"Compile",
"the",
"given",
"GraphQL",
"AST",
"node",
"into",
"a",
"list",
"of",
"basic",
"blocks",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L629-L720 | train | 227,828 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _validate_all_tags_are_used | def _validate_all_tags_are_used(metadata):
"""Ensure all tags are used in some filter."""
tag_names = set([tag_name for tag_name, _ in metadata.tags])
filter_arg_names = set()
for location, _ in metadata.registered_locations:
for filter_info in metadata.get_filter_infos(location):
for filter_arg in filter_info.args:
if is_tag_argument(filter_arg):
filter_arg_names.add(get_directive_argument_name(filter_arg))
unused_tags = tag_names - filter_arg_names
if unused_tags:
raise GraphQLCompilationError(u'This GraphQL query contains @tag directives whose values '
u'are not used: {}. This is not allowed. Please either use '
u'them in a filter or remove them entirely.'
.format(unused_tags)) | python | def _validate_all_tags_are_used(metadata):
"""Ensure all tags are used in some filter."""
tag_names = set([tag_name for tag_name, _ in metadata.tags])
filter_arg_names = set()
for location, _ in metadata.registered_locations:
for filter_info in metadata.get_filter_infos(location):
for filter_arg in filter_info.args:
if is_tag_argument(filter_arg):
filter_arg_names.add(get_directive_argument_name(filter_arg))
unused_tags = tag_names - filter_arg_names
if unused_tags:
raise GraphQLCompilationError(u'This GraphQL query contains @tag directives whose values '
u'are not used: {}. This is not allowed. Please either use '
u'them in a filter or remove them entirely.'
.format(unused_tags)) | [
"def",
"_validate_all_tags_are_used",
"(",
"metadata",
")",
":",
"tag_names",
"=",
"set",
"(",
"[",
"tag_name",
"for",
"tag_name",
",",
"_",
"in",
"metadata",
".",
"tags",
"]",
")",
"filter_arg_names",
"=",
"set",
"(",
")",
"for",
"location",
",",
"_",
"... | Ensure all tags are used in some filter. | [
"Ensure",
"all",
"tags",
"are",
"used",
"in",
"some",
"filter",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L723-L738 | train | 227,829 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _compile_output_step | def _compile_output_step(outputs):
"""Construct the final ConstructResult basic block that defines the output format of the query.
Args:
outputs: dict, output name (string) -> output data dict, specifying the location
from where to get the data, and whether the data is optional (and therefore
may be missing); missing optional data is replaced with 'null'
Returns:
a ConstructResult basic block that constructs appropriate outputs for the query
"""
if not outputs:
raise GraphQLCompilationError(u'No fields were selected for output! Please mark at least '
u'one field with the @output directive.')
output_fields = {}
for output_name, output_context in six.iteritems(outputs):
location = output_context['location']
optional = output_context['optional']
graphql_type = output_context['type']
expression = None
existence_check = None
# pylint: disable=redefined-variable-type
if isinstance(location, FoldScopeLocation):
if optional:
raise AssertionError(u'Unreachable state reached, optional in fold: '
u'{}'.format(output_context))
if location.field == COUNT_META_FIELD_NAME:
expression = expressions.FoldCountContextField(location)
else:
expression = expressions.FoldedContextField(location, graphql_type)
else:
expression = expressions.OutputContextField(location, graphql_type)
if optional:
existence_check = expressions.ContextFieldExistence(location.at_vertex())
if existence_check:
expression = expressions.TernaryConditional(
existence_check, expression, expressions.NullLiteral)
# pylint: enable=redefined-variable-type
output_fields[output_name] = expression
return blocks.ConstructResult(output_fields) | python | def _compile_output_step(outputs):
"""Construct the final ConstructResult basic block that defines the output format of the query.
Args:
outputs: dict, output name (string) -> output data dict, specifying the location
from where to get the data, and whether the data is optional (and therefore
may be missing); missing optional data is replaced with 'null'
Returns:
a ConstructResult basic block that constructs appropriate outputs for the query
"""
if not outputs:
raise GraphQLCompilationError(u'No fields were selected for output! Please mark at least '
u'one field with the @output directive.')
output_fields = {}
for output_name, output_context in six.iteritems(outputs):
location = output_context['location']
optional = output_context['optional']
graphql_type = output_context['type']
expression = None
existence_check = None
# pylint: disable=redefined-variable-type
if isinstance(location, FoldScopeLocation):
if optional:
raise AssertionError(u'Unreachable state reached, optional in fold: '
u'{}'.format(output_context))
if location.field == COUNT_META_FIELD_NAME:
expression = expressions.FoldCountContextField(location)
else:
expression = expressions.FoldedContextField(location, graphql_type)
else:
expression = expressions.OutputContextField(location, graphql_type)
if optional:
existence_check = expressions.ContextFieldExistence(location.at_vertex())
if existence_check:
expression = expressions.TernaryConditional(
existence_check, expression, expressions.NullLiteral)
# pylint: enable=redefined-variable-type
output_fields[output_name] = expression
return blocks.ConstructResult(output_fields) | [
"def",
"_compile_output_step",
"(",
"outputs",
")",
":",
"if",
"not",
"outputs",
":",
"raise",
"GraphQLCompilationError",
"(",
"u'No fields were selected for output! Please mark at least '",
"u'one field with the @output directive.'",
")",
"output_fields",
"=",
"{",
"}",
"for... | Construct the final ConstructResult basic block that defines the output format of the query.
Args:
outputs: dict, output name (string) -> output data dict, specifying the location
from where to get the data, and whether the data is optional (and therefore
may be missing); missing optional data is replaced with 'null'
Returns:
a ConstructResult basic block that constructs appropriate outputs for the query | [
"Construct",
"the",
"final",
"ConstructResult",
"basic",
"block",
"that",
"defines",
"the",
"output",
"format",
"of",
"the",
"query",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L866-L912 | train | 227,830 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | _validate_schema_and_ast | def _validate_schema_and_ast(schema, ast):
"""Validate the supplied graphql schema and ast.
This method wraps around graphql-core's validation to enforce a stricter requirement of the
schema -- all directives supported by the compiler must be declared by the schema, regardless of
whether each directive is used in the query or not.
Args:
schema: GraphQL schema object, created using the GraphQL library
ast: abstract syntax tree representation of a graphql query
Returns:
list containing schema and/or query validation errors
"""
core_graphql_errors = validate(schema, ast)
# The following directives appear in the core-graphql library, but are not supported by the
# graphql compiler.
unsupported_default_directives = frozenset([
frozenset([
'include',
frozenset(['FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT']),
frozenset(['if'])
]),
frozenset([
'skip',
frozenset(['FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT']),
frozenset(['if'])
]),
frozenset([
'deprecated',
frozenset(['ENUM_VALUE', 'FIELD_DEFINITION']),
frozenset(['reason'])
])
])
# Directives expected by the graphql compiler.
expected_directives = {
frozenset([
directive.name,
frozenset(directive.locations),
frozenset(six.viewkeys(directive.args))
])
for directive in DIRECTIVES
}
# Directives provided in the parsed graphql schema.
actual_directives = {
frozenset([
directive.name,
frozenset(directive.locations),
frozenset(six.viewkeys(directive.args))
])
for directive in schema.get_directives()
}
# Directives missing from the actual directives provided.
missing_directives = expected_directives - actual_directives
if missing_directives:
missing_message = (u'The following directives were missing from the '
u'provided schema: {}'.format(missing_directives))
core_graphql_errors.append(missing_message)
# Directives that are not specified by the core graphql library. Note that Graphql-core
# automatically injects default directives into the schema, regardless of whether
# the schema supports said directives. Hence, while the directives contained in
# unsupported_default_directives are incompatible with the graphql-compiler, we allow them to
# be present in the parsed schema string.
extra_directives = actual_directives - expected_directives - unsupported_default_directives
if extra_directives:
extra_message = (u'The following directives were supplied in the given schema, but are not '
u'not supported by the GraphQL compiler: {}'.format(extra_directives))
core_graphql_errors.append(extra_message)
return core_graphql_errors | python | def _validate_schema_and_ast(schema, ast):
"""Validate the supplied graphql schema and ast.
This method wraps around graphql-core's validation to enforce a stricter requirement of the
schema -- all directives supported by the compiler must be declared by the schema, regardless of
whether each directive is used in the query or not.
Args:
schema: GraphQL schema object, created using the GraphQL library
ast: abstract syntax tree representation of a graphql query
Returns:
list containing schema and/or query validation errors
"""
core_graphql_errors = validate(schema, ast)
# The following directives appear in the core-graphql library, but are not supported by the
# graphql compiler.
unsupported_default_directives = frozenset([
frozenset([
'include',
frozenset(['FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT']),
frozenset(['if'])
]),
frozenset([
'skip',
frozenset(['FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT']),
frozenset(['if'])
]),
frozenset([
'deprecated',
frozenset(['ENUM_VALUE', 'FIELD_DEFINITION']),
frozenset(['reason'])
])
])
# Directives expected by the graphql compiler.
expected_directives = {
frozenset([
directive.name,
frozenset(directive.locations),
frozenset(six.viewkeys(directive.args))
])
for directive in DIRECTIVES
}
# Directives provided in the parsed graphql schema.
actual_directives = {
frozenset([
directive.name,
frozenset(directive.locations),
frozenset(six.viewkeys(directive.args))
])
for directive in schema.get_directives()
}
# Directives missing from the actual directives provided.
missing_directives = expected_directives - actual_directives
if missing_directives:
missing_message = (u'The following directives were missing from the '
u'provided schema: {}'.format(missing_directives))
core_graphql_errors.append(missing_message)
# Directives that are not specified by the core graphql library. Note that Graphql-core
# automatically injects default directives into the schema, regardless of whether
# the schema supports said directives. Hence, while the directives contained in
# unsupported_default_directives are incompatible with the graphql-compiler, we allow them to
# be present in the parsed schema string.
extra_directives = actual_directives - expected_directives - unsupported_default_directives
if extra_directives:
extra_message = (u'The following directives were supplied in the given schema, but are not '
u'not supported by the GraphQL compiler: {}'.format(extra_directives))
core_graphql_errors.append(extra_message)
return core_graphql_errors | [
"def",
"_validate_schema_and_ast",
"(",
"schema",
",",
"ast",
")",
":",
"core_graphql_errors",
"=",
"validate",
"(",
"schema",
",",
"ast",
")",
"# The following directives appear in the core-graphql library, but are not supported by the",
"# graphql compiler.",
"unsupported_defau... | Validate the supplied graphql schema and ast.
This method wraps around graphql-core's validation to enforce a stricter requirement of the
schema -- all directives supported by the compiler must be declared by the schema, regardless of
whether each directive is used in the query or not.
Args:
schema: GraphQL schema object, created using the GraphQL library
ast: abstract syntax tree representation of a graphql query
Returns:
list containing schema and/or query validation errors | [
"Validate",
"the",
"supplied",
"graphql",
"schema",
"and",
"ast",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L922-L996 | train | 227,831 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/compiler_frontend.py | graphql_to_ir | def graphql_to_ir(schema, graphql_string, type_equivalence_hints=None):
"""Convert the given GraphQL string into compiler IR, using the given schema object.
Args:
schema: GraphQL schema object, created using the GraphQL library
graphql_string: string containing the GraphQL to compile to compiler IR
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
IrAndMetadata named tuple, containing fields:
- ir_blocks: a list of IR basic block objects
- input_metadata: a dict of expected input parameters (string) -> inferred GraphQL type
- output_metadata: a dict of output name (string) -> OutputMetadata object
- query_metadata_table: a QueryMetadataTable object containing location metadata
Raises flavors of GraphQLError in the following cases:
- if the query is invalid GraphQL (GraphQLParsingError);
- if the query doesn't match the schema (GraphQLValidationError);
- if the query has more than one definition block (GraphQLValidationError);
- if the query has more than one selection in the root object (GraphQLCompilationError);
- if the query does not obey directive usage rules (GraphQLCompilationError);
- if the query provides invalid / disallowed / wrong number of arguments
for a directive (GraphQLCompilationError).
In the case of implementation bugs, could also raise ValueError, TypeError, or AssertionError.
"""
graphql_string = _preprocess_graphql_string(graphql_string)
try:
ast = parse(graphql_string)
except GraphQLSyntaxError as e:
raise GraphQLParsingError(e)
validation_errors = _validate_schema_and_ast(schema, ast)
if validation_errors:
raise GraphQLValidationError(u'String does not validate: {}'.format(validation_errors))
if len(ast.definitions) != 1:
raise AssertionError(u'Unsupported graphql string with multiple definitions, should have '
u'been caught in validation: \n{}\n{}'.format(graphql_string, ast))
base_ast = ast.definitions[0]
return _compile_root_ast_to_ir(schema, base_ast, type_equivalence_hints=type_equivalence_hints) | python | def graphql_to_ir(schema, graphql_string, type_equivalence_hints=None):
"""Convert the given GraphQL string into compiler IR, using the given schema object.
Args:
schema: GraphQL schema object, created using the GraphQL library
graphql_string: string containing the GraphQL to compile to compiler IR
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
IrAndMetadata named tuple, containing fields:
- ir_blocks: a list of IR basic block objects
- input_metadata: a dict of expected input parameters (string) -> inferred GraphQL type
- output_metadata: a dict of output name (string) -> OutputMetadata object
- query_metadata_table: a QueryMetadataTable object containing location metadata
Raises flavors of GraphQLError in the following cases:
- if the query is invalid GraphQL (GraphQLParsingError);
- if the query doesn't match the schema (GraphQLValidationError);
- if the query has more than one definition block (GraphQLValidationError);
- if the query has more than one selection in the root object (GraphQLCompilationError);
- if the query does not obey directive usage rules (GraphQLCompilationError);
- if the query provides invalid / disallowed / wrong number of arguments
for a directive (GraphQLCompilationError).
In the case of implementation bugs, could also raise ValueError, TypeError, or AssertionError.
"""
graphql_string = _preprocess_graphql_string(graphql_string)
try:
ast = parse(graphql_string)
except GraphQLSyntaxError as e:
raise GraphQLParsingError(e)
validation_errors = _validate_schema_and_ast(schema, ast)
if validation_errors:
raise GraphQLValidationError(u'String does not validate: {}'.format(validation_errors))
if len(ast.definitions) != 1:
raise AssertionError(u'Unsupported graphql string with multiple definitions, should have '
u'been caught in validation: \n{}\n{}'.format(graphql_string, ast))
base_ast = ast.definitions[0]
return _compile_root_ast_to_ir(schema, base_ast, type_equivalence_hints=type_equivalence_hints) | [
"def",
"graphql_to_ir",
"(",
"schema",
",",
"graphql_string",
",",
"type_equivalence_hints",
"=",
"None",
")",
":",
"graphql_string",
"=",
"_preprocess_graphql_string",
"(",
"graphql_string",
")",
"try",
":",
"ast",
"=",
"parse",
"(",
"graphql_string",
")",
"excep... | Convert the given GraphQL string into compiler IR, using the given schema object.
Args:
schema: GraphQL schema object, created using the GraphQL library
graphql_string: string containing the GraphQL to compile to compiler IR
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
IrAndMetadata named tuple, containing fields:
- ir_blocks: a list of IR basic block objects
- input_metadata: a dict of expected input parameters (string) -> inferred GraphQL type
- output_metadata: a dict of output name (string) -> OutputMetadata object
- query_metadata_table: a QueryMetadataTable object containing location metadata
Raises flavors of GraphQLError in the following cases:
- if the query is invalid GraphQL (GraphQLParsingError);
- if the query doesn't match the schema (GraphQLValidationError);
- if the query has more than one definition block (GraphQLValidationError);
- if the query has more than one selection in the root object (GraphQLCompilationError);
- if the query does not obey directive usage rules (GraphQLCompilationError);
- if the query provides invalid / disallowed / wrong number of arguments
for a directive (GraphQLCompilationError).
In the case of implementation bugs, could also raise ValueError, TypeError, or AssertionError. | [
"Convert",
"the",
"given",
"GraphQL",
"string",
"into",
"compiler",
"IR",
"using",
"the",
"given",
"schema",
"object",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L1003-L1059 | train | 227,832 |
kensho-technologies/graphql-compiler | graphql_compiler/debugging_utils.py | pretty_print_gremlin | def pretty_print_gremlin(gremlin):
"""Return a human-readable representation of a gremlin command string."""
gremlin = remove_custom_formatting(gremlin)
too_many_parts = re.split(r'([)}]|scatter)[ ]?\.', gremlin)
# Put the ) and } back on.
parts = [
too_many_parts[i] + too_many_parts[i + 1]
for i in six.moves.xrange(0, len(too_many_parts) - 1, 2)
]
parts.append(too_many_parts[-1])
# Put the . back on.
for i in six.moves.xrange(1, len(parts)):
parts[i] = '.' + parts[i]
indentation = 0
indentation_increment = 4
output = []
for current_part in parts:
if any([current_part.startswith('.out'),
current_part.startswith('.in'),
current_part.startswith('.ifThenElse')]):
indentation += indentation_increment
elif current_part.startswith('.back') or current_part.startswith('.optional'):
indentation -= indentation_increment
if indentation < 0:
raise AssertionError(u'Indentation became negative: {}'.format(indentation))
output.append((' ' * indentation) + current_part)
return '\n'.join(output).strip() | python | def pretty_print_gremlin(gremlin):
"""Return a human-readable representation of a gremlin command string."""
gremlin = remove_custom_formatting(gremlin)
too_many_parts = re.split(r'([)}]|scatter)[ ]?\.', gremlin)
# Put the ) and } back on.
parts = [
too_many_parts[i] + too_many_parts[i + 1]
for i in six.moves.xrange(0, len(too_many_parts) - 1, 2)
]
parts.append(too_many_parts[-1])
# Put the . back on.
for i in six.moves.xrange(1, len(parts)):
parts[i] = '.' + parts[i]
indentation = 0
indentation_increment = 4
output = []
for current_part in parts:
if any([current_part.startswith('.out'),
current_part.startswith('.in'),
current_part.startswith('.ifThenElse')]):
indentation += indentation_increment
elif current_part.startswith('.back') or current_part.startswith('.optional'):
indentation -= indentation_increment
if indentation < 0:
raise AssertionError(u'Indentation became negative: {}'.format(indentation))
output.append((' ' * indentation) + current_part)
return '\n'.join(output).strip() | [
"def",
"pretty_print_gremlin",
"(",
"gremlin",
")",
":",
"gremlin",
"=",
"remove_custom_formatting",
"(",
"gremlin",
")",
"too_many_parts",
"=",
"re",
".",
"split",
"(",
"r'([)}]|scatter)[ ]?\\.'",
",",
"gremlin",
")",
"# Put the ) and } back on.",
"parts",
"=",
"["... | Return a human-readable representation of a gremlin command string. | [
"Return",
"a",
"human",
"-",
"readable",
"representation",
"of",
"a",
"gremlin",
"command",
"string",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/debugging_utils.py#L13-L44 | train | 227,833 |
kensho-technologies/graphql-compiler | graphql_compiler/debugging_utils.py | pretty_print_match | def pretty_print_match(match, parameterized=True):
"""Return a human-readable representation of a parameterized MATCH query string."""
left_curly = '{{' if parameterized else '{'
right_curly = '}}' if parameterized else '}'
match = remove_custom_formatting(match)
parts = re.split('({}|{})'.format(left_curly, right_curly), match)
inside_braces = False
indent_size = 4
indent = ' ' * indent_size
output = [parts[0]]
for current_index, current_part in enumerate(parts[1:]):
if current_part == left_curly:
if inside_braces:
raise AssertionError(u'Found open-braces pair while already inside braces: '
u'{} {} {}'.format(current_index, parts, match))
inside_braces = True
output.append(current_part + '\n')
elif current_part == right_curly:
if not inside_braces:
raise AssertionError(u'Found close-braces pair while not inside braces: '
u'{} {} {}'.format(current_index, parts, match))
inside_braces = False
output.append(current_part)
else:
if not inside_braces:
stripped_part = current_part.lstrip()
if stripped_part.startswith('.'):
# Strip whitespace before traversal steps.
output.append(stripped_part)
else:
# Do not strip whitespace before e.g. the RETURN keyword.
output.append(current_part)
else:
# Split out the keywords, initially getting rid of commas.
separate_keywords = re.split(', ([a-z]+:)', current_part)
# The first item in the separated list is the full first "keyword: value" pair.
# For every subsequent item, the keyword and value are separated; join them
# back together, outputting the comma, newline and indentation before them.
output.append(indent + separate_keywords[0].lstrip())
for i in six.moves.xrange(1, len(separate_keywords) - 1, 2):
output.append(',\n{indent}{keyword} {value}'.format(
keyword=separate_keywords[i].strip(),
value=separate_keywords[i + 1].strip(),
indent=indent))
output.append('\n')
return ''.join(output).strip() | python | def pretty_print_match(match, parameterized=True):
"""Return a human-readable representation of a parameterized MATCH query string."""
left_curly = '{{' if parameterized else '{'
right_curly = '}}' if parameterized else '}'
match = remove_custom_formatting(match)
parts = re.split('({}|{})'.format(left_curly, right_curly), match)
inside_braces = False
indent_size = 4
indent = ' ' * indent_size
output = [parts[0]]
for current_index, current_part in enumerate(parts[1:]):
if current_part == left_curly:
if inside_braces:
raise AssertionError(u'Found open-braces pair while already inside braces: '
u'{} {} {}'.format(current_index, parts, match))
inside_braces = True
output.append(current_part + '\n')
elif current_part == right_curly:
if not inside_braces:
raise AssertionError(u'Found close-braces pair while not inside braces: '
u'{} {} {}'.format(current_index, parts, match))
inside_braces = False
output.append(current_part)
else:
if not inside_braces:
stripped_part = current_part.lstrip()
if stripped_part.startswith('.'):
# Strip whitespace before traversal steps.
output.append(stripped_part)
else:
# Do not strip whitespace before e.g. the RETURN keyword.
output.append(current_part)
else:
# Split out the keywords, initially getting rid of commas.
separate_keywords = re.split(', ([a-z]+:)', current_part)
# The first item in the separated list is the full first "keyword: value" pair.
# For every subsequent item, the keyword and value are separated; join them
# back together, outputting the comma, newline and indentation before them.
output.append(indent + separate_keywords[0].lstrip())
for i in six.moves.xrange(1, len(separate_keywords) - 1, 2):
output.append(',\n{indent}{keyword} {value}'.format(
keyword=separate_keywords[i].strip(),
value=separate_keywords[i + 1].strip(),
indent=indent))
output.append('\n')
return ''.join(output).strip() | [
"def",
"pretty_print_match",
"(",
"match",
",",
"parameterized",
"=",
"True",
")",
":",
"left_curly",
"=",
"'{{'",
"if",
"parameterized",
"else",
"'{'",
"right_curly",
"=",
"'}}'",
"if",
"parameterized",
"else",
"'}'",
"match",
"=",
"remove_custom_formatting",
"... | Return a human-readable representation of a parameterized MATCH query string. | [
"Return",
"a",
"human",
"-",
"readable",
"representation",
"of",
"a",
"parameterized",
"MATCH",
"query",
"string",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/debugging_utils.py#L47-L96 | train | 227,834 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/representations.py | represent_float_as_str | def represent_float_as_str(value):
"""Represent a float as a string without losing precision."""
# In Python 2, calling str() on a float object loses precision:
#
# In [1]: 1.23456789012345678
# Out[1]: 1.2345678901234567
#
# In [2]: 1.2345678901234567
# Out[2]: 1.2345678901234567
#
# In [3]: str(1.2345678901234567)
# Out[3]: '1.23456789012'
#
# The best way to ensure precision is not lost is to convert to string via Decimal:
# https://github.com/mogui/pyorient/pull/226/files
if not isinstance(value, float):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-float as a float: '
u'{}'.format(value))
with decimal.localcontext() as ctx:
ctx.prec = 20 # floats are max 80-bits wide = 20 significant digits
return u'{:f}'.format(decimal.Decimal(value)) | python | def represent_float_as_str(value):
"""Represent a float as a string without losing precision."""
# In Python 2, calling str() on a float object loses precision:
#
# In [1]: 1.23456789012345678
# Out[1]: 1.2345678901234567
#
# In [2]: 1.2345678901234567
# Out[2]: 1.2345678901234567
#
# In [3]: str(1.2345678901234567)
# Out[3]: '1.23456789012'
#
# The best way to ensure precision is not lost is to convert to string via Decimal:
# https://github.com/mogui/pyorient/pull/226/files
if not isinstance(value, float):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-float as a float: '
u'{}'.format(value))
with decimal.localcontext() as ctx:
ctx.prec = 20 # floats are max 80-bits wide = 20 significant digits
return u'{:f}'.format(decimal.Decimal(value)) | [
"def",
"represent_float_as_str",
"(",
"value",
")",
":",
"# In Python 2, calling str() on a float object loses precision:",
"#",
"# In [1]: 1.23456789012345678",
"# Out[1]: 1.2345678901234567",
"#",
"# In [2]: 1.2345678901234567",
"# Out[2]: 1.2345678901234567",
"#",
"# In [3]: str(1.234... | Represent a float as a string without losing precision. | [
"Represent",
"a",
"float",
"as",
"a",
"string",
"without",
"losing",
"precision",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/representations.py#L8-L29 | train | 227,835 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/representations.py | coerce_to_decimal | def coerce_to_decimal(value):
"""Attempt to coerce the value to a Decimal, or raise an error if unable to do so."""
if isinstance(value, decimal.Decimal):
return value
else:
try:
return decimal.Decimal(value)
except decimal.InvalidOperation as e:
raise GraphQLInvalidArgumentError(e) | python | def coerce_to_decimal(value):
"""Attempt to coerce the value to a Decimal, or raise an error if unable to do so."""
if isinstance(value, decimal.Decimal):
return value
else:
try:
return decimal.Decimal(value)
except decimal.InvalidOperation as e:
raise GraphQLInvalidArgumentError(e) | [
"def",
"coerce_to_decimal",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"decimal",
".",
"Decimal",
")",
":",
"return",
"value",
"else",
":",
"try",
":",
"return",
"decimal",
".",
"Decimal",
"(",
"value",
")",
"except",
"decimal",
".",
... | Attempt to coerce the value to a Decimal, or raise an error if unable to do so. | [
"Attempt",
"to",
"coerce",
"the",
"value",
"to",
"a",
"Decimal",
"or",
"raise",
"an",
"error",
"if",
"unable",
"to",
"do",
"so",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/representations.py#L41-L49 | train | 227,836 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | make_replacement_visitor | def make_replacement_visitor(find_expression, replace_expression):
"""Return a visitor function that replaces every instance of one expression with another one."""
def visitor_fn(expression):
"""Return the replacement if this expression matches the expression we're looking for."""
if expression == find_expression:
return replace_expression
else:
return expression
return visitor_fn | python | def make_replacement_visitor(find_expression, replace_expression):
"""Return a visitor function that replaces every instance of one expression with another one."""
def visitor_fn(expression):
"""Return the replacement if this expression matches the expression we're looking for."""
if expression == find_expression:
return replace_expression
else:
return expression
return visitor_fn | [
"def",
"make_replacement_visitor",
"(",
"find_expression",
",",
"replace_expression",
")",
":",
"def",
"visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Return the replacement if this expression matches the expression we're looking for.\"\"\"",
"if",
"expression",
"==",
"find_exp... | Return a visitor function that replaces every instance of one expression with another one. | [
"Return",
"a",
"visitor",
"function",
"that",
"replaces",
"every",
"instance",
"of",
"one",
"expression",
"with",
"another",
"one",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L29-L38 | train | 227,837 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | make_type_replacement_visitor | def make_type_replacement_visitor(find_types, replacement_func):
"""Return a visitor function that replaces expressions of a given type with new expressions."""
def visitor_fn(expression):
"""Return a replacement expression if the original expression is of the correct type."""
if isinstance(expression, find_types):
return replacement_func(expression)
else:
return expression
return visitor_fn | python | def make_type_replacement_visitor(find_types, replacement_func):
"""Return a visitor function that replaces expressions of a given type with new expressions."""
def visitor_fn(expression):
"""Return a replacement expression if the original expression is of the correct type."""
if isinstance(expression, find_types):
return replacement_func(expression)
else:
return expression
return visitor_fn | [
"def",
"make_type_replacement_visitor",
"(",
"find_types",
",",
"replacement_func",
")",
":",
"def",
"visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Return a replacement expression if the original expression is of the correct type.\"\"\"",
"if",
"isinstance",
"(",
"expression",... | Return a visitor function that replaces expressions of a given type with new expressions. | [
"Return",
"a",
"visitor",
"function",
"that",
"replaces",
"expressions",
"of",
"a",
"given",
"type",
"with",
"new",
"expressions",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L41-L50 | train | 227,838 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | _validate_operator_name | def _validate_operator_name(operator, supported_operators):
"""Ensure the named operator is valid and supported."""
if not isinstance(operator, six.text_type):
raise TypeError(u'Expected operator as unicode string, got: {} {}'.format(
type(operator).__name__, operator))
if operator not in supported_operators:
raise GraphQLCompilationError(u'Unrecognized operator: {}'.format(operator)) | python | def _validate_operator_name(operator, supported_operators):
"""Ensure the named operator is valid and supported."""
if not isinstance(operator, six.text_type):
raise TypeError(u'Expected operator as unicode string, got: {} {}'.format(
type(operator).__name__, operator))
if operator not in supported_operators:
raise GraphQLCompilationError(u'Unrecognized operator: {}'.format(operator)) | [
"def",
"_validate_operator_name",
"(",
"operator",
",",
"supported_operators",
")",
":",
"if",
"not",
"isinstance",
"(",
"operator",
",",
"six",
".",
"text_type",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected operator as unicode string, got: {} {}'",
".",
"format",... | Ensure the named operator is valid and supported. | [
"Ensure",
"the",
"named",
"operator",
"is",
"valid",
"and",
"supported",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L665-L672 | train | 227,839 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | Literal.validate | def validate(self):
"""Validate that the Literal is correctly representable."""
# Literals representing boolean values or None are correctly representable and supported.
if self.value is None or self.value is True or self.value is False:
return
# Literal safe strings are correctly representable and supported.
if isinstance(self.value, six.string_types):
validate_safe_string(self.value)
return
# Literal ints are correctly representable and supported.
if isinstance(self.value, int):
return
# Literal empty lists, and non-empty lists of safe strings, are
# correctly representable and supported.
if isinstance(self.value, list):
if len(self.value) > 0:
for x in self.value:
validate_safe_string(x)
return
raise GraphQLCompilationError(u'Cannot represent literal: {}'.format(self.value)) | python | def validate(self):
"""Validate that the Literal is correctly representable."""
# Literals representing boolean values or None are correctly representable and supported.
if self.value is None or self.value is True or self.value is False:
return
# Literal safe strings are correctly representable and supported.
if isinstance(self.value, six.string_types):
validate_safe_string(self.value)
return
# Literal ints are correctly representable and supported.
if isinstance(self.value, int):
return
# Literal empty lists, and non-empty lists of safe strings, are
# correctly representable and supported.
if isinstance(self.value, list):
if len(self.value) > 0:
for x in self.value:
validate_safe_string(x)
return
raise GraphQLCompilationError(u'Cannot represent literal: {}'.format(self.value)) | [
"def",
"validate",
"(",
"self",
")",
":",
"# Literals representing boolean values or None are correctly representable and supported.",
"if",
"self",
".",
"value",
"is",
"None",
"or",
"self",
".",
"value",
"is",
"True",
"or",
"self",
".",
"value",
"is",
"False",
":",... | Validate that the Literal is correctly representable. | [
"Validate",
"that",
"the",
"Literal",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L72-L95 | train | 227,840 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | Variable.validate | def validate(self):
"""Validate that the Variable is correctly representable."""
# Get the first letter, or empty string if it doesn't exist.
if not self.variable_name.startswith(u'$'):
raise GraphQLCompilationError(u'Expected variable name to start with $, but was: '
u'{}'.format(self.variable_name))
if self.variable_name in RESERVED_MATCH_KEYWORDS:
raise GraphQLCompilationError(u'Cannot use reserved MATCH keyword {} as variable '
u'name!'.format(self.variable_name))
validate_safe_string(self.variable_name[1:])
if not is_graphql_type(self.inferred_type):
raise ValueError(u'Invalid value of "inferred_type": {}'.format(self.inferred_type))
if isinstance(self.inferred_type, GraphQLNonNull):
raise ValueError(u'GraphQL non-null types are not supported as "inferred_type": '
u'{}'.format(self.inferred_type))
if isinstance(self.inferred_type, GraphQLList):
inner_type = strip_non_null_from_type(self.inferred_type.of_type)
if GraphQLDate.is_same_type(inner_type) or GraphQLDateTime.is_same_type(inner_type):
# This is a compilation error rather than a ValueError as
# it can be caused by an invalid GraphQL query on an otherwise valid schema.
# In other words, it's an error in writing the GraphQL query, rather than
# a programming error within the library.
raise GraphQLCompilationError(
u'Lists of Date or DateTime cannot currently be represented as '
u'Variable objects: {}'.format(self.inferred_type)) | python | def validate(self):
"""Validate that the Variable is correctly representable."""
# Get the first letter, or empty string if it doesn't exist.
if not self.variable_name.startswith(u'$'):
raise GraphQLCompilationError(u'Expected variable name to start with $, but was: '
u'{}'.format(self.variable_name))
if self.variable_name in RESERVED_MATCH_KEYWORDS:
raise GraphQLCompilationError(u'Cannot use reserved MATCH keyword {} as variable '
u'name!'.format(self.variable_name))
validate_safe_string(self.variable_name[1:])
if not is_graphql_type(self.inferred_type):
raise ValueError(u'Invalid value of "inferred_type": {}'.format(self.inferred_type))
if isinstance(self.inferred_type, GraphQLNonNull):
raise ValueError(u'GraphQL non-null types are not supported as "inferred_type": '
u'{}'.format(self.inferred_type))
if isinstance(self.inferred_type, GraphQLList):
inner_type = strip_non_null_from_type(self.inferred_type.of_type)
if GraphQLDate.is_same_type(inner_type) or GraphQLDateTime.is_same_type(inner_type):
# This is a compilation error rather than a ValueError as
# it can be caused by an invalid GraphQL query on an otherwise valid schema.
# In other words, it's an error in writing the GraphQL query, rather than
# a programming error within the library.
raise GraphQLCompilationError(
u'Lists of Date or DateTime cannot currently be represented as '
u'Variable objects: {}'.format(self.inferred_type)) | [
"def",
"validate",
"(",
"self",
")",
":",
"# Get the first letter, or empty string if it doesn't exist.",
"if",
"not",
"self",
".",
"variable_name",
".",
"startswith",
"(",
"u'$'",
")",
":",
"raise",
"GraphQLCompilationError",
"(",
"u'Expected variable name to start with $,... | Validate that the Variable is correctly representable. | [
"Validate",
"that",
"the",
"Variable",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L154-L183 | train | 227,841 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | Variable.to_match | def to_match(self):
"""Return a unicode object with the MATCH representation of this Variable."""
self.validate()
# We don't want the dollar sign as part of the variable name.
variable_with_no_dollar_sign = self.variable_name[1:]
match_variable_name = '{%s}' % (six.text_type(variable_with_no_dollar_sign),)
# We can't directly pass a Date or DateTime object, so we have to pass it as a string
# and then parse it inline. For date format parameter meanings, see:
# http://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html
# For the semantics of the date() OrientDB SQL function, see:
# http://orientdb.com/docs/last/SQL-Functions.html#date
if GraphQLDate.is_same_type(self.inferred_type):
return u'date(%s, "%s")' % (match_variable_name, STANDARD_DATE_FORMAT)
elif GraphQLDateTime.is_same_type(self.inferred_type):
return u'date(%s, "%s")' % (match_variable_name, STANDARD_DATETIME_FORMAT)
else:
return match_variable_name | python | def to_match(self):
"""Return a unicode object with the MATCH representation of this Variable."""
self.validate()
# We don't want the dollar sign as part of the variable name.
variable_with_no_dollar_sign = self.variable_name[1:]
match_variable_name = '{%s}' % (six.text_type(variable_with_no_dollar_sign),)
# We can't directly pass a Date or DateTime object, so we have to pass it as a string
# and then parse it inline. For date format parameter meanings, see:
# http://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html
# For the semantics of the date() OrientDB SQL function, see:
# http://orientdb.com/docs/last/SQL-Functions.html#date
if GraphQLDate.is_same_type(self.inferred_type):
return u'date(%s, "%s")' % (match_variable_name, STANDARD_DATE_FORMAT)
elif GraphQLDateTime.is_same_type(self.inferred_type):
return u'date(%s, "%s")' % (match_variable_name, STANDARD_DATETIME_FORMAT)
else:
return match_variable_name | [
"def",
"to_match",
"(",
"self",
")",
":",
"self",
".",
"validate",
"(",
")",
"# We don't want the dollar sign as part of the variable name.",
"variable_with_no_dollar_sign",
"=",
"self",
".",
"variable_name",
"[",
"1",
":",
"]",
"match_variable_name",
"=",
"'{%s}'",
"... | Return a unicode object with the MATCH representation of this Variable. | [
"Return",
"a",
"unicode",
"object",
"with",
"the",
"MATCH",
"representation",
"of",
"this",
"Variable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L185-L204 | train | 227,842 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | GlobalContextField.validate | def validate(self):
"""Validate that the GlobalContextField is correctly representable."""
if not isinstance(self.location, Location):
raise TypeError(u'Expected Location location, got: {} {}'
.format(type(self.location).__name__, self.location))
if self.location.field is None:
raise AssertionError(u'Received Location without a field: {}'
.format(self.location))
if not is_graphql_type(self.field_type):
raise ValueError(u'Invalid value of "field_type": {}'.format(self.field_type)) | python | def validate(self):
"""Validate that the GlobalContextField is correctly representable."""
if not isinstance(self.location, Location):
raise TypeError(u'Expected Location location, got: {} {}'
.format(type(self.location).__name__, self.location))
if self.location.field is None:
raise AssertionError(u'Received Location without a field: {}'
.format(self.location))
if not is_graphql_type(self.field_type):
raise ValueError(u'Invalid value of "field_type": {}'.format(self.field_type)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"location",
",",
"Location",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected Location location, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"location",
")... | Validate that the GlobalContextField is correctly representable. | [
"Validate",
"that",
"the",
"GlobalContextField",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L289-L300 | train | 227,843 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | GlobalContextField.to_match | def to_match(self):
"""Return a unicode object with the MATCH representation of this GlobalContextField."""
self.validate()
mark_name, field_name = self.location.get_location_name()
validate_safe_string(mark_name)
validate_safe_string(field_name)
return u'%s.%s' % (mark_name, field_name) | python | def to_match(self):
"""Return a unicode object with the MATCH representation of this GlobalContextField."""
self.validate()
mark_name, field_name = self.location.get_location_name()
validate_safe_string(mark_name)
validate_safe_string(field_name)
return u'%s.%s' % (mark_name, field_name) | [
"def",
"to_match",
"(",
"self",
")",
":",
"self",
".",
"validate",
"(",
")",
"mark_name",
",",
"field_name",
"=",
"self",
".",
"location",
".",
"get_location_name",
"(",
")",
"validate_safe_string",
"(",
"mark_name",
")",
"validate_safe_string",
"(",
"field_na... | Return a unicode object with the MATCH representation of this GlobalContextField. | [
"Return",
"a",
"unicode",
"object",
"with",
"the",
"MATCH",
"representation",
"of",
"this",
"GlobalContextField",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L302-L310 | train | 227,844 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | ContextField.to_match | def to_match(self):
"""Return a unicode object with the MATCH representation of this ContextField."""
self.validate()
mark_name, field_name = self.location.get_location_name()
validate_safe_string(mark_name)
if field_name is None:
return u'$matched.%s' % (mark_name,)
else:
validate_safe_string(field_name)
return u'$matched.%s.%s' % (mark_name, field_name) | python | def to_match(self):
"""Return a unicode object with the MATCH representation of this ContextField."""
self.validate()
mark_name, field_name = self.location.get_location_name()
validate_safe_string(mark_name)
if field_name is None:
return u'$matched.%s' % (mark_name,)
else:
validate_safe_string(field_name)
return u'$matched.%s.%s' % (mark_name, field_name) | [
"def",
"to_match",
"(",
"self",
")",
":",
"self",
".",
"validate",
"(",
")",
"mark_name",
",",
"field_name",
"=",
"self",
".",
"location",
".",
"get_location_name",
"(",
")",
"validate_safe_string",
"(",
"mark_name",
")",
"if",
"field_name",
"is",
"None",
... | Return a unicode object with the MATCH representation of this ContextField. | [
"Return",
"a",
"unicode",
"object",
"with",
"the",
"MATCH",
"representation",
"of",
"this",
"ContextField",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L350-L361 | train | 227,845 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | OutputContextField.validate | def validate(self):
"""Validate that the OutputContextField is correctly representable."""
if not isinstance(self.location, Location):
raise TypeError(u'Expected Location location, got: {} {}'.format(
type(self.location).__name__, self.location))
if not self.location.field:
raise ValueError(u'Expected Location object that points to a field, got: '
u'{}'.format(self.location))
if not is_graphql_type(self.field_type):
raise ValueError(u'Invalid value of "field_type": {}'.format(self.field_type))
stripped_field_type = strip_non_null_from_type(self.field_type)
if isinstance(stripped_field_type, GraphQLList):
inner_type = strip_non_null_from_type(stripped_field_type.of_type)
if GraphQLDate.is_same_type(inner_type) or GraphQLDateTime.is_same_type(inner_type):
# This is a compilation error rather than a ValueError as
# it can be caused by an invalid GraphQL query on an otherwise valid schema.
# In other words, it's an error in writing the GraphQL query, rather than
# a programming error within the library.
raise GraphQLCompilationError(
u'Lists of Date or DateTime cannot currently be represented as '
u'OutputContextField objects: {}'.format(self.field_type)) | python | def validate(self):
"""Validate that the OutputContextField is correctly representable."""
if not isinstance(self.location, Location):
raise TypeError(u'Expected Location location, got: {} {}'.format(
type(self.location).__name__, self.location))
if not self.location.field:
raise ValueError(u'Expected Location object that points to a field, got: '
u'{}'.format(self.location))
if not is_graphql_type(self.field_type):
raise ValueError(u'Invalid value of "field_type": {}'.format(self.field_type))
stripped_field_type = strip_non_null_from_type(self.field_type)
if isinstance(stripped_field_type, GraphQLList):
inner_type = strip_non_null_from_type(stripped_field_type.of_type)
if GraphQLDate.is_same_type(inner_type) or GraphQLDateTime.is_same_type(inner_type):
# This is a compilation error rather than a ValueError as
# it can be caused by an invalid GraphQL query on an otherwise valid schema.
# In other words, it's an error in writing the GraphQL query, rather than
# a programming error within the library.
raise GraphQLCompilationError(
u'Lists of Date or DateTime cannot currently be represented as '
u'OutputContextField objects: {}'.format(self.field_type)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"location",
",",
"Location",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected Location location, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"location",
")... | Validate that the OutputContextField is correctly representable. | [
"Validate",
"that",
"the",
"OutputContextField",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L404-L427 | train | 227,846 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | FoldedContextField.validate | def validate(self):
"""Validate that the FoldedContextField is correctly representable."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format(
type(self.fold_scope_location), self.fold_scope_location))
if self.fold_scope_location.field is None:
raise ValueError(u'Expected FoldScopeLocation at a field, but got: {}'
.format(self.fold_scope_location))
if self.fold_scope_location.field == COUNT_META_FIELD_NAME:
if not GraphQLInt.is_same_type(self.field_type):
raise TypeError(u'Expected the _x_count meta-field to be of GraphQLInt type, but '
u'encountered type {} instead: {}'
.format(self.field_type, self.fold_scope_location))
else:
if not isinstance(self.field_type, GraphQLList):
raise ValueError(u'Invalid value of "field_type" for a field that is not '
u'a meta-field, expected a list type but got: {} {}'
.format(self.field_type, self.fold_scope_location))
inner_type = strip_non_null_from_type(self.field_type.of_type)
if isinstance(inner_type, GraphQLList):
raise GraphQLCompilationError(
u'Outputting list-valued fields in a @fold context is currently not supported: '
u'{} {}'.format(self.fold_scope_location, self.field_type.of_type)) | python | def validate(self):
"""Validate that the FoldedContextField is correctly representable."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format(
type(self.fold_scope_location), self.fold_scope_location))
if self.fold_scope_location.field is None:
raise ValueError(u'Expected FoldScopeLocation at a field, but got: {}'
.format(self.fold_scope_location))
if self.fold_scope_location.field == COUNT_META_FIELD_NAME:
if not GraphQLInt.is_same_type(self.field_type):
raise TypeError(u'Expected the _x_count meta-field to be of GraphQLInt type, but '
u'encountered type {} instead: {}'
.format(self.field_type, self.fold_scope_location))
else:
if not isinstance(self.field_type, GraphQLList):
raise ValueError(u'Invalid value of "field_type" for a field that is not '
u'a meta-field, expected a list type but got: {} {}'
.format(self.field_type, self.fold_scope_location))
inner_type = strip_non_null_from_type(self.field_type.of_type)
if isinstance(inner_type, GraphQLList):
raise GraphQLCompilationError(
u'Outputting list-valued fields in a @fold context is currently not supported: '
u'{} {}'.format(self.fold_scope_location, self.field_type.of_type)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"fold_scope_location",
",",
"FoldScopeLocation",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected FoldScopeLocation fold_scope_location, got: {} {}'",
".",
"format",
"(",
"type",
... | Validate that the FoldedContextField is correctly representable. | [
"Validate",
"that",
"the",
"FoldedContextField",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L505-L530 | train | 227,847 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | FoldCountContextField.validate | def validate(self):
"""Validate that the FoldCountContextField is correctly representable."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format(
type(self.fold_scope_location), self.fold_scope_location))
if self.fold_scope_location.field != COUNT_META_FIELD_NAME:
raise AssertionError(u'Unexpected field in the FoldScopeLocation of this '
u'FoldCountContextField object: {} {}'
.format(self.fold_scope_location, self)) | python | def validate(self):
"""Validate that the FoldCountContextField is correctly representable."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format(
type(self.fold_scope_location), self.fold_scope_location))
if self.fold_scope_location.field != COUNT_META_FIELD_NAME:
raise AssertionError(u'Unexpected field in the FoldScopeLocation of this '
u'FoldCountContextField object: {} {}'
.format(self.fold_scope_location, self)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"fold_scope_location",
",",
"FoldScopeLocation",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected FoldScopeLocation fold_scope_location, got: {} {}'",
".",
"format",
"(",
"type",
... | Validate that the FoldCountContextField is correctly representable. | [
"Validate",
"that",
"the",
"FoldCountContextField",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L596-L605 | train | 227,848 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | ContextFieldExistence.validate | def validate(self):
"""Validate that the ContextFieldExistence is correctly representable."""
if not isinstance(self.location, Location):
raise TypeError(u'Expected Location location, got: {} {}'.format(
type(self.location).__name__, self.location))
if self.location.field:
raise ValueError(u'Expected location to point to a vertex, '
u'but found a field: {}'.format(self.location)) | python | def validate(self):
"""Validate that the ContextFieldExistence is correctly representable."""
if not isinstance(self.location, Location):
raise TypeError(u'Expected Location location, got: {} {}'.format(
type(self.location).__name__, self.location))
if self.location.field:
raise ValueError(u'Expected location to point to a vertex, '
u'but found a field: {}'.format(self.location)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"location",
",",
"Location",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected Location location, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"location",
")... | Validate that the ContextFieldExistence is correctly representable. | [
"Validate",
"that",
"the",
"ContextFieldExistence",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L646-L654 | train | 227,849 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | UnaryTransformation.validate | def validate(self):
"""Validate that the UnaryTransformation is correctly representable."""
_validate_operator_name(self.operator, UnaryTransformation.SUPPORTED_OPERATORS)
if not isinstance(self.inner_expression, Expression):
raise TypeError(u'Expected Expression inner_expression, got {} {}'.format(
type(self.inner_expression).__name__, self.inner_expression)) | python | def validate(self):
"""Validate that the UnaryTransformation is correctly representable."""
_validate_operator_name(self.operator, UnaryTransformation.SUPPORTED_OPERATORS)
if not isinstance(self.inner_expression, Expression):
raise TypeError(u'Expected Expression inner_expression, got {} {}'.format(
type(self.inner_expression).__name__, self.inner_expression)) | [
"def",
"validate",
"(",
"self",
")",
":",
"_validate_operator_name",
"(",
"self",
".",
"operator",
",",
"UnaryTransformation",
".",
"SUPPORTED_OPERATORS",
")",
"if",
"not",
"isinstance",
"(",
"self",
".",
"inner_expression",
",",
"Expression",
")",
":",
"raise",... | Validate that the UnaryTransformation is correctly representable. | [
"Validate",
"that",
"the",
"UnaryTransformation",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L688-L694 | train | 227,850 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | UnaryTransformation.to_match | def to_match(self):
"""Return a unicode object with the MATCH representation of this UnaryTransformation."""
self.validate()
translation_table = {
u'size': u'size()',
}
match_operator = translation_table.get(self.operator)
if not match_operator:
raise AssertionError(u'Unrecognized operator used: '
u'{} {}'.format(self.operator, self))
template = u'%(inner)s.%(operator)s'
args = {
'inner': self.inner_expression.to_match(),
'operator': match_operator,
}
return template % args | python | def to_match(self):
"""Return a unicode object with the MATCH representation of this UnaryTransformation."""
self.validate()
translation_table = {
u'size': u'size()',
}
match_operator = translation_table.get(self.operator)
if not match_operator:
raise AssertionError(u'Unrecognized operator used: '
u'{} {}'.format(self.operator, self))
template = u'%(inner)s.%(operator)s'
args = {
'inner': self.inner_expression.to_match(),
'operator': match_operator,
}
return template % args | [
"def",
"to_match",
"(",
"self",
")",
":",
"self",
".",
"validate",
"(",
")",
"translation_table",
"=",
"{",
"u'size'",
":",
"u'size()'",
",",
"}",
"match_operator",
"=",
"translation_table",
".",
"get",
"(",
"self",
".",
"operator",
")",
"if",
"not",
"ma... | Return a unicode object with the MATCH representation of this UnaryTransformation. | [
"Return",
"a",
"unicode",
"object",
"with",
"the",
"MATCH",
"representation",
"of",
"this",
"UnaryTransformation",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L705-L722 | train | 227,851 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | BinaryComposition.validate | def validate(self):
"""Validate that the BinaryComposition is correctly representable."""
_validate_operator_name(self.operator, BinaryComposition.SUPPORTED_OPERATORS)
if not isinstance(self.left, Expression):
raise TypeError(u'Expected Expression left, got: {} {} {}'.format(
type(self.left).__name__, self.left, self))
if not isinstance(self.right, Expression):
raise TypeError(u'Expected Expression right, got: {} {}'.format(
type(self.right).__name__, self.right)) | python | def validate(self):
"""Validate that the BinaryComposition is correctly representable."""
_validate_operator_name(self.operator, BinaryComposition.SUPPORTED_OPERATORS)
if not isinstance(self.left, Expression):
raise TypeError(u'Expected Expression left, got: {} {} {}'.format(
type(self.left).__name__, self.left, self))
if not isinstance(self.right, Expression):
raise TypeError(u'Expected Expression right, got: {} {}'.format(
type(self.right).__name__, self.right)) | [
"def",
"validate",
"(",
"self",
")",
":",
"_validate_operator_name",
"(",
"self",
".",
"operator",
",",
"BinaryComposition",
".",
"SUPPORTED_OPERATORS",
")",
"if",
"not",
"isinstance",
"(",
"self",
".",
"left",
",",
"Expression",
")",
":",
"raise",
"TypeError"... | Validate that the BinaryComposition is correctly representable. | [
"Validate",
"that",
"the",
"BinaryComposition",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L769-L779 | train | 227,852 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | BinaryComposition.to_match | def to_match(self):
"""Return a unicode object with the MATCH representation of this BinaryComposition."""
self.validate()
# The MATCH versions of some operators require an inverted order of arguments.
# pylint: disable=unused-variable
regular_operator_format = '(%(left)s %(operator)s %(right)s)'
inverted_operator_format = '(%(right)s %(operator)s %(left)s)' # noqa
intersects_operator_format = '(%(operator)s(%(left)s, %(right)s).asList().size() > 0)'
# pylint: enable=unused-variable
# Null literals use 'is/is not' as (in)equality operators, while other values use '=/<>'.
if any((isinstance(self.left, Literal) and self.left.value is None,
isinstance(self.right, Literal) and self.right.value is None)):
translation_table = {
u'=': (u'IS', regular_operator_format),
u'!=': (u'IS NOT', regular_operator_format),
}
else:
translation_table = {
u'=': (u'=', regular_operator_format),
u'!=': (u'<>', regular_operator_format),
u'>=': (u'>=', regular_operator_format),
u'<=': (u'<=', regular_operator_format),
u'>': (u'>', regular_operator_format),
u'<': (u'<', regular_operator_format),
u'+': (u'+', regular_operator_format),
u'||': (u'OR', regular_operator_format),
u'&&': (u'AND', regular_operator_format),
u'contains': (u'CONTAINS', regular_operator_format),
u'intersects': (u'intersect', intersects_operator_format),
u'has_substring': (None, None), # must be lowered into compatible form using LIKE
# MATCH-specific operators
u'LIKE': (u'LIKE', regular_operator_format),
u'INSTANCEOF': (u'INSTANCEOF', regular_operator_format),
}
match_operator, format_spec = translation_table.get(self.operator, (None, None))
if not match_operator:
raise AssertionError(u'Unrecognized operator used: '
u'{} {}'.format(self.operator, self))
return format_spec % dict(operator=match_operator,
left=self.left.to_match(),
right=self.right.to_match()) | python | def to_match(self):
"""Return a unicode object with the MATCH representation of this BinaryComposition."""
self.validate()
# The MATCH versions of some operators require an inverted order of arguments.
# pylint: disable=unused-variable
regular_operator_format = '(%(left)s %(operator)s %(right)s)'
inverted_operator_format = '(%(right)s %(operator)s %(left)s)' # noqa
intersects_operator_format = '(%(operator)s(%(left)s, %(right)s).asList().size() > 0)'
# pylint: enable=unused-variable
# Null literals use 'is/is not' as (in)equality operators, while other values use '=/<>'.
if any((isinstance(self.left, Literal) and self.left.value is None,
isinstance(self.right, Literal) and self.right.value is None)):
translation_table = {
u'=': (u'IS', regular_operator_format),
u'!=': (u'IS NOT', regular_operator_format),
}
else:
translation_table = {
u'=': (u'=', regular_operator_format),
u'!=': (u'<>', regular_operator_format),
u'>=': (u'>=', regular_operator_format),
u'<=': (u'<=', regular_operator_format),
u'>': (u'>', regular_operator_format),
u'<': (u'<', regular_operator_format),
u'+': (u'+', regular_operator_format),
u'||': (u'OR', regular_operator_format),
u'&&': (u'AND', regular_operator_format),
u'contains': (u'CONTAINS', regular_operator_format),
u'intersects': (u'intersect', intersects_operator_format),
u'has_substring': (None, None), # must be lowered into compatible form using LIKE
# MATCH-specific operators
u'LIKE': (u'LIKE', regular_operator_format),
u'INSTANCEOF': (u'INSTANCEOF', regular_operator_format),
}
match_operator, format_spec = translation_table.get(self.operator, (None, None))
if not match_operator:
raise AssertionError(u'Unrecognized operator used: '
u'{} {}'.format(self.operator, self))
return format_spec % dict(operator=match_operator,
left=self.left.to_match(),
right=self.right.to_match()) | [
"def",
"to_match",
"(",
"self",
")",
":",
"self",
".",
"validate",
"(",
")",
"# The MATCH versions of some operators require an inverted order of arguments.",
"# pylint: disable=unused-variable",
"regular_operator_format",
"=",
"'(%(left)s %(operator)s %(right)s)'",
"inverted_operato... | Return a unicode object with the MATCH representation of this BinaryComposition. | [
"Return",
"a",
"unicode",
"object",
"with",
"the",
"MATCH",
"representation",
"of",
"this",
"BinaryComposition",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L791-L836 | train | 227,853 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | TernaryConditional.validate | def validate(self):
"""Validate that the TernaryConditional is correctly representable."""
if not isinstance(self.predicate, Expression):
raise TypeError(u'Expected Expression predicate, got: {} {}'.format(
type(self.predicate).__name__, self.predicate))
if not isinstance(self.if_true, Expression):
raise TypeError(u'Expected Expression if_true, got: {} {}'.format(
type(self.if_true).__name__, self.if_true))
if not isinstance(self.if_false, Expression):
raise TypeError(u'Expected Expression if_false, got: {} {}'.format(
type(self.if_false).__name__, self.if_false)) | python | def validate(self):
"""Validate that the TernaryConditional is correctly representable."""
if not isinstance(self.predicate, Expression):
raise TypeError(u'Expected Expression predicate, got: {} {}'.format(
type(self.predicate).__name__, self.predicate))
if not isinstance(self.if_true, Expression):
raise TypeError(u'Expected Expression if_true, got: {} {}'.format(
type(self.if_true).__name__, self.if_true))
if not isinstance(self.if_false, Expression):
raise TypeError(u'Expected Expression if_false, got: {} {}'.format(
type(self.if_false).__name__, self.if_false)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"predicate",
",",
"Expression",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected Expression predicate, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"predicate... | Validate that the TernaryConditional is correctly representable. | [
"Validate",
"that",
"the",
"TernaryConditional",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L893-L903 | train | 227,854 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/expressions.py | TernaryConditional.to_match | def to_match(self):
"""Return a unicode object with the MATCH representation of this TernaryConditional."""
self.validate()
# For MATCH, an additional validation step is needed -- we currently do not support
# emitting MATCH code for TernaryConditional that contains another TernaryConditional
# anywhere within the predicate expression. This is because the predicate expression
# must be surrounded in quotes, and it is unclear whether nested/escaped quotes would work.
def visitor_fn(expression):
"""Visitor function that ensures the predicate does not contain TernaryConditionals."""
if isinstance(expression, TernaryConditional):
raise ValueError(u'Cannot emit MATCH code for TernaryConditional that contains '
u'in its predicate another TernaryConditional: '
u'{} {}'.format(expression, self))
return expression
self.predicate.visit_and_update(visitor_fn)
format_spec = u'if(eval("%(predicate)s"), %(if_true)s, %(if_false)s)'
predicate_string = self.predicate.to_match()
if u'"' in predicate_string:
raise AssertionError(u'Found a double-quote within the predicate string, this would '
u'have terminated the if(eval()) early and should be fixed: '
u'{} {}'.format(predicate_string, self))
return format_spec % dict(predicate=predicate_string,
if_true=self.if_true.to_match(),
if_false=self.if_false.to_match()) | python | def to_match(self):
"""Return a unicode object with the MATCH representation of this TernaryConditional."""
self.validate()
# For MATCH, an additional validation step is needed -- we currently do not support
# emitting MATCH code for TernaryConditional that contains another TernaryConditional
# anywhere within the predicate expression. This is because the predicate expression
# must be surrounded in quotes, and it is unclear whether nested/escaped quotes would work.
def visitor_fn(expression):
"""Visitor function that ensures the predicate does not contain TernaryConditionals."""
if isinstance(expression, TernaryConditional):
raise ValueError(u'Cannot emit MATCH code for TernaryConditional that contains '
u'in its predicate another TernaryConditional: '
u'{} {}'.format(expression, self))
return expression
self.predicate.visit_and_update(visitor_fn)
format_spec = u'if(eval("%(predicate)s"), %(if_true)s, %(if_false)s)'
predicate_string = self.predicate.to_match()
if u'"' in predicate_string:
raise AssertionError(u'Found a double-quote within the predicate string, this would '
u'have terminated the if(eval()) early and should be fixed: '
u'{} {}'.format(predicate_string, self))
return format_spec % dict(predicate=predicate_string,
if_true=self.if_true.to_match(),
if_false=self.if_false.to_match()) | [
"def",
"to_match",
"(",
"self",
")",
":",
"self",
".",
"validate",
"(",
")",
"# For MATCH, an additional validation step is needed -- we currently do not support",
"# emitting MATCH code for TernaryConditional that contains another TernaryConditional",
"# anywhere within the predicate expr... | Return a unicode object with the MATCH representation of this TernaryConditional. | [
"Return",
"a",
"unicode",
"object",
"with",
"the",
"MATCH",
"representation",
"of",
"this",
"TernaryConditional",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L918-L945 | train | 227,855 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | sanity_check_ir_blocks_from_frontend | def sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table):
"""Assert that IR blocks originating from the frontend do not have nonsensical structure.
Args:
ir_blocks: list of BasicBlocks representing the IR to sanity-check
Raises:
AssertionError, if the IR has unexpected structure. If the IR produced by the front-end
cannot be successfully and correctly used to generate MATCH or Gremlin due to a bug,
this is the method that should catch the problem.
"""
if not ir_blocks:
raise AssertionError(u'Received no ir_blocks: {}'.format(ir_blocks))
_sanity_check_fold_scope_locations_are_unique(ir_blocks)
_sanity_check_no_nested_folds(ir_blocks)
_sanity_check_query_root_block(ir_blocks)
_sanity_check_output_source_follower_blocks(ir_blocks)
_sanity_check_block_pairwise_constraints(ir_blocks)
_sanity_check_mark_location_preceding_optional_traverse(ir_blocks)
_sanity_check_every_location_is_marked(ir_blocks)
_sanity_check_coerce_type_outside_of_fold(ir_blocks)
_sanity_check_all_marked_locations_are_registered(ir_blocks, query_metadata_table)
_sanity_check_registered_locations_parent_locations(query_metadata_table) | python | def sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table):
"""Assert that IR blocks originating from the frontend do not have nonsensical structure.
Args:
ir_blocks: list of BasicBlocks representing the IR to sanity-check
Raises:
AssertionError, if the IR has unexpected structure. If the IR produced by the front-end
cannot be successfully and correctly used to generate MATCH or Gremlin due to a bug,
this is the method that should catch the problem.
"""
if not ir_blocks:
raise AssertionError(u'Received no ir_blocks: {}'.format(ir_blocks))
_sanity_check_fold_scope_locations_are_unique(ir_blocks)
_sanity_check_no_nested_folds(ir_blocks)
_sanity_check_query_root_block(ir_blocks)
_sanity_check_output_source_follower_blocks(ir_blocks)
_sanity_check_block_pairwise_constraints(ir_blocks)
_sanity_check_mark_location_preceding_optional_traverse(ir_blocks)
_sanity_check_every_location_is_marked(ir_blocks)
_sanity_check_coerce_type_outside_of_fold(ir_blocks)
_sanity_check_all_marked_locations_are_registered(ir_blocks, query_metadata_table)
_sanity_check_registered_locations_parent_locations(query_metadata_table) | [
"def",
"sanity_check_ir_blocks_from_frontend",
"(",
"ir_blocks",
",",
"query_metadata_table",
")",
":",
"if",
"not",
"ir_blocks",
":",
"raise",
"AssertionError",
"(",
"u'Received no ir_blocks: {}'",
".",
"format",
"(",
"ir_blocks",
")",
")",
"_sanity_check_fold_scope_loca... | Assert that IR blocks originating from the frontend do not have nonsensical structure.
Args:
ir_blocks: list of BasicBlocks representing the IR to sanity-check
Raises:
AssertionError, if the IR has unexpected structure. If the IR produced by the front-end
cannot be successfully and correctly used to generate MATCH or Gremlin due to a bug,
this is the method that should catch the problem. | [
"Assert",
"that",
"IR",
"blocks",
"originating",
"from",
"the",
"frontend",
"do",
"not",
"have",
"nonsensical",
"structure",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L13-L36 | train | 227,856 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | _sanity_check_registered_locations_parent_locations | def _sanity_check_registered_locations_parent_locations(query_metadata_table):
"""Assert that all registered locations' parent locations are also registered."""
for location, location_info in query_metadata_table.registered_locations:
if (location != query_metadata_table.root_location and
not query_metadata_table.root_location.is_revisited_at(location)):
# If the location is not the root location and is not a revisit of the root,
# then it must have a parent location.
if location_info.parent_location is None:
raise AssertionError(u'Found a location that is not the root location of the query '
u'or a revisit of the root, but does not have a parent: '
u'{} {}'.format(location, location_info))
if location_info.parent_location is not None:
# Make sure the parent_location is also registered.
# If the location is not registered, the following line will raise an error.
query_metadata_table.get_location_info(location_info.parent_location) | python | def _sanity_check_registered_locations_parent_locations(query_metadata_table):
"""Assert that all registered locations' parent locations are also registered."""
for location, location_info in query_metadata_table.registered_locations:
if (location != query_metadata_table.root_location and
not query_metadata_table.root_location.is_revisited_at(location)):
# If the location is not the root location and is not a revisit of the root,
# then it must have a parent location.
if location_info.parent_location is None:
raise AssertionError(u'Found a location that is not the root location of the query '
u'or a revisit of the root, but does not have a parent: '
u'{} {}'.format(location, location_info))
if location_info.parent_location is not None:
# Make sure the parent_location is also registered.
# If the location is not registered, the following line will raise an error.
query_metadata_table.get_location_info(location_info.parent_location) | [
"def",
"_sanity_check_registered_locations_parent_locations",
"(",
"query_metadata_table",
")",
":",
"for",
"location",
",",
"location_info",
"in",
"query_metadata_table",
".",
"registered_locations",
":",
"if",
"(",
"location",
"!=",
"query_metadata_table",
".",
"root_loca... | Assert that all registered locations' parent locations are also registered. | [
"Assert",
"that",
"all",
"registered",
"locations",
"parent",
"locations",
"are",
"also",
"registered",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L39-L54 | train | 227,857 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | _sanity_check_all_marked_locations_are_registered | def _sanity_check_all_marked_locations_are_registered(ir_blocks, query_metadata_table):
"""Assert that all locations in MarkLocation blocks have registered and valid metadata."""
# Grab all the registered locations, then make sure that:
# - Any location that appears in a MarkLocation block is also registered.
# - There are no registered locations that do not appear in a MarkLocation block.
registered_locations = {
location
for location, _ in query_metadata_table.registered_locations
}
ir_encountered_locations = {
block.location
for block in ir_blocks
if isinstance(block, MarkLocation)
}
unregistered_locations = ir_encountered_locations - registered_locations
unencountered_locations = registered_locations - ir_encountered_locations
if unregistered_locations:
raise AssertionError(u'IR blocks unexpectedly contain locations not registered in the '
u'QueryMetadataTable: {}'.format(unregistered_locations))
if unencountered_locations:
raise AssertionError(u'QueryMetadataTable unexpectedly contains registered locations that '
u'never appear in the IR blocks: {}'.format(unencountered_locations)) | python | def _sanity_check_all_marked_locations_are_registered(ir_blocks, query_metadata_table):
"""Assert that all locations in MarkLocation blocks have registered and valid metadata."""
# Grab all the registered locations, then make sure that:
# - Any location that appears in a MarkLocation block is also registered.
# - There are no registered locations that do not appear in a MarkLocation block.
registered_locations = {
location
for location, _ in query_metadata_table.registered_locations
}
ir_encountered_locations = {
block.location
for block in ir_blocks
if isinstance(block, MarkLocation)
}
unregistered_locations = ir_encountered_locations - registered_locations
unencountered_locations = registered_locations - ir_encountered_locations
if unregistered_locations:
raise AssertionError(u'IR blocks unexpectedly contain locations not registered in the '
u'QueryMetadataTable: {}'.format(unregistered_locations))
if unencountered_locations:
raise AssertionError(u'QueryMetadataTable unexpectedly contains registered locations that '
u'never appear in the IR blocks: {}'.format(unencountered_locations)) | [
"def",
"_sanity_check_all_marked_locations_are_registered",
"(",
"ir_blocks",
",",
"query_metadata_table",
")",
":",
"# Grab all the registered locations, then make sure that:",
"# - Any location that appears in a MarkLocation block is also registered.",
"# - There are no registered locations th... | Assert that all locations in MarkLocation blocks have registered and valid metadata. | [
"Assert",
"that",
"all",
"locations",
"in",
"MarkLocation",
"blocks",
"have",
"registered",
"and",
"valid",
"metadata",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L57-L80 | train | 227,858 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | _sanity_check_fold_scope_locations_are_unique | def _sanity_check_fold_scope_locations_are_unique(ir_blocks):
"""Assert that every FoldScopeLocation that exists on a Fold block is unique."""
observed_locations = dict()
for block in ir_blocks:
if isinstance(block, Fold):
alternate = observed_locations.get(block.fold_scope_location, None)
if alternate is not None:
raise AssertionError(u'Found two Fold blocks with identical FoldScopeLocations: '
u'{} {} {}'.format(alternate, block, ir_blocks))
observed_locations[block.fold_scope_location] = block | python | def _sanity_check_fold_scope_locations_are_unique(ir_blocks):
"""Assert that every FoldScopeLocation that exists on a Fold block is unique."""
observed_locations = dict()
for block in ir_blocks:
if isinstance(block, Fold):
alternate = observed_locations.get(block.fold_scope_location, None)
if alternate is not None:
raise AssertionError(u'Found two Fold blocks with identical FoldScopeLocations: '
u'{} {} {}'.format(alternate, block, ir_blocks))
observed_locations[block.fold_scope_location] = block | [
"def",
"_sanity_check_fold_scope_locations_are_unique",
"(",
"ir_blocks",
")",
":",
"observed_locations",
"=",
"dict",
"(",
")",
"for",
"block",
"in",
"ir_blocks",
":",
"if",
"isinstance",
"(",
"block",
",",
"Fold",
")",
":",
"alternate",
"=",
"observed_locations"... | Assert that every FoldScopeLocation that exists on a Fold block is unique. | [
"Assert",
"that",
"every",
"FoldScopeLocation",
"that",
"exists",
"on",
"a",
"Fold",
"block",
"is",
"unique",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L83-L92 | train | 227,859 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | _sanity_check_no_nested_folds | def _sanity_check_no_nested_folds(ir_blocks):
"""Assert that there are no nested Fold contexts, and that every Fold has a matching Unfold."""
fold_seen = False
for block in ir_blocks:
if isinstance(block, Fold):
if fold_seen:
raise AssertionError(u'Found a nested Fold contexts: {}'.format(ir_blocks))
else:
fold_seen = True
elif isinstance(block, Unfold):
if not fold_seen:
raise AssertionError(u'Found an Unfold block without a matching Fold: '
u'{}'.format(ir_blocks))
else:
fold_seen = False | python | def _sanity_check_no_nested_folds(ir_blocks):
"""Assert that there are no nested Fold contexts, and that every Fold has a matching Unfold."""
fold_seen = False
for block in ir_blocks:
if isinstance(block, Fold):
if fold_seen:
raise AssertionError(u'Found a nested Fold contexts: {}'.format(ir_blocks))
else:
fold_seen = True
elif isinstance(block, Unfold):
if not fold_seen:
raise AssertionError(u'Found an Unfold block without a matching Fold: '
u'{}'.format(ir_blocks))
else:
fold_seen = False | [
"def",
"_sanity_check_no_nested_folds",
"(",
"ir_blocks",
")",
":",
"fold_seen",
"=",
"False",
"for",
"block",
"in",
"ir_blocks",
":",
"if",
"isinstance",
"(",
"block",
",",
"Fold",
")",
":",
"if",
"fold_seen",
":",
"raise",
"AssertionError",
"(",
"u'Found a n... | Assert that there are no nested Fold contexts, and that every Fold has a matching Unfold. | [
"Assert",
"that",
"there",
"are",
"no",
"nested",
"Fold",
"contexts",
"and",
"that",
"every",
"Fold",
"has",
"a",
"matching",
"Unfold",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L95-L109 | train | 227,860 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | _sanity_check_query_root_block | def _sanity_check_query_root_block(ir_blocks):
"""Assert that QueryRoot is always the first block, and only the first block."""
if not isinstance(ir_blocks[0], QueryRoot):
raise AssertionError(u'The first block was not QueryRoot: {}'.format(ir_blocks))
for block in ir_blocks[1:]:
if isinstance(block, QueryRoot):
raise AssertionError(u'Found QueryRoot after the first block: {}'.format(ir_blocks)) | python | def _sanity_check_query_root_block(ir_blocks):
"""Assert that QueryRoot is always the first block, and only the first block."""
if not isinstance(ir_blocks[0], QueryRoot):
raise AssertionError(u'The first block was not QueryRoot: {}'.format(ir_blocks))
for block in ir_blocks[1:]:
if isinstance(block, QueryRoot):
raise AssertionError(u'Found QueryRoot after the first block: {}'.format(ir_blocks)) | [
"def",
"_sanity_check_query_root_block",
"(",
"ir_blocks",
")",
":",
"if",
"not",
"isinstance",
"(",
"ir_blocks",
"[",
"0",
"]",
",",
"QueryRoot",
")",
":",
"raise",
"AssertionError",
"(",
"u'The first block was not QueryRoot: {}'",
".",
"format",
"(",
"ir_blocks",
... | Assert that QueryRoot is always the first block, and only the first block. | [
"Assert",
"that",
"QueryRoot",
"is",
"always",
"the",
"first",
"block",
"and",
"only",
"the",
"first",
"block",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L112-L118 | train | 227,861 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | _sanity_check_construct_result_block | def _sanity_check_construct_result_block(ir_blocks):
"""Assert that ConstructResult is always the last block, and only the last block."""
if not isinstance(ir_blocks[-1], ConstructResult):
raise AssertionError(u'The last block was not ConstructResult: {}'.format(ir_blocks))
for block in ir_blocks[:-1]:
if isinstance(block, ConstructResult):
raise AssertionError(u'Found ConstructResult before the last block: '
u'{}'.format(ir_blocks)) | python | def _sanity_check_construct_result_block(ir_blocks):
"""Assert that ConstructResult is always the last block, and only the last block."""
if not isinstance(ir_blocks[-1], ConstructResult):
raise AssertionError(u'The last block was not ConstructResult: {}'.format(ir_blocks))
for block in ir_blocks[:-1]:
if isinstance(block, ConstructResult):
raise AssertionError(u'Found ConstructResult before the last block: '
u'{}'.format(ir_blocks)) | [
"def",
"_sanity_check_construct_result_block",
"(",
"ir_blocks",
")",
":",
"if",
"not",
"isinstance",
"(",
"ir_blocks",
"[",
"-",
"1",
"]",
",",
"ConstructResult",
")",
":",
"raise",
"AssertionError",
"(",
"u'The last block was not ConstructResult: {}'",
".",
"format"... | Assert that ConstructResult is always the last block, and only the last block. | [
"Assert",
"that",
"ConstructResult",
"is",
"always",
"the",
"last",
"block",
"and",
"only",
"the",
"last",
"block",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L121-L128 | train | 227,862 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | _sanity_check_block_pairwise_constraints | def _sanity_check_block_pairwise_constraints(ir_blocks):
"""Assert that adjacent blocks obey all invariants."""
for first_block, second_block in pairwise(ir_blocks):
# Always Filter before MarkLocation, never after.
if isinstance(first_block, MarkLocation) and isinstance(second_block, Filter):
raise AssertionError(u'Found Filter after MarkLocation block: {}'.format(ir_blocks))
# There's no point in marking the same location twice in a row.
if isinstance(first_block, MarkLocation) and isinstance(second_block, MarkLocation):
raise AssertionError(u'Found consecutive MarkLocation blocks: {}'.format(ir_blocks))
# Traverse blocks with optional=True are immediately followed
# by a MarkLocation, CoerceType or Filter block.
if isinstance(first_block, Traverse) and first_block.optional:
if not isinstance(second_block, (MarkLocation, CoerceType, Filter)):
raise AssertionError(u'Expected MarkLocation, CoerceType or Filter after Traverse '
u'with optional=True. Found: {}'.format(ir_blocks))
# Backtrack blocks with optional=True are immediately followed by a MarkLocation block.
if isinstance(first_block, Backtrack) and first_block.optional:
if not isinstance(second_block, MarkLocation):
raise AssertionError(u'Expected MarkLocation after Backtrack with optional=True, '
u'but none was found: {}'.format(ir_blocks))
# Recurse blocks are immediately preceded by a MarkLocation or Backtrack block.
if isinstance(second_block, Recurse):
if not (isinstance(first_block, MarkLocation) or isinstance(first_block, Backtrack)):
raise AssertionError(u'Expected MarkLocation or Backtrack before Recurse, but none '
u'was found: {}'.format(ir_blocks)) | python | def _sanity_check_block_pairwise_constraints(ir_blocks):
"""Assert that adjacent blocks obey all invariants."""
for first_block, second_block in pairwise(ir_blocks):
# Always Filter before MarkLocation, never after.
if isinstance(first_block, MarkLocation) and isinstance(second_block, Filter):
raise AssertionError(u'Found Filter after MarkLocation block: {}'.format(ir_blocks))
# There's no point in marking the same location twice in a row.
if isinstance(first_block, MarkLocation) and isinstance(second_block, MarkLocation):
raise AssertionError(u'Found consecutive MarkLocation blocks: {}'.format(ir_blocks))
# Traverse blocks with optional=True are immediately followed
# by a MarkLocation, CoerceType or Filter block.
if isinstance(first_block, Traverse) and first_block.optional:
if not isinstance(second_block, (MarkLocation, CoerceType, Filter)):
raise AssertionError(u'Expected MarkLocation, CoerceType or Filter after Traverse '
u'with optional=True. Found: {}'.format(ir_blocks))
# Backtrack blocks with optional=True are immediately followed by a MarkLocation block.
if isinstance(first_block, Backtrack) and first_block.optional:
if not isinstance(second_block, MarkLocation):
raise AssertionError(u'Expected MarkLocation after Backtrack with optional=True, '
u'but none was found: {}'.format(ir_blocks))
# Recurse blocks are immediately preceded by a MarkLocation or Backtrack block.
if isinstance(second_block, Recurse):
if not (isinstance(first_block, MarkLocation) or isinstance(first_block, Backtrack)):
raise AssertionError(u'Expected MarkLocation or Backtrack before Recurse, but none '
u'was found: {}'.format(ir_blocks)) | [
"def",
"_sanity_check_block_pairwise_constraints",
"(",
"ir_blocks",
")",
":",
"for",
"first_block",
",",
"second_block",
"in",
"pairwise",
"(",
"ir_blocks",
")",
":",
"# Always Filter before MarkLocation, never after.",
"if",
"isinstance",
"(",
"first_block",
",",
"MarkL... | Assert that adjacent blocks obey all invariants. | [
"Assert",
"that",
"adjacent",
"blocks",
"obey",
"all",
"invariants",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L144-L172 | train | 227,863 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | _sanity_check_mark_location_preceding_optional_traverse | def _sanity_check_mark_location_preceding_optional_traverse(ir_blocks):
"""Assert that optional Traverse blocks are preceded by a MarkLocation."""
# Once all fold blocks are removed, each optional Traverse must have
# a MarkLocation block immediately before it.
_, new_ir_blocks = extract_folds_from_ir_blocks(ir_blocks)
for first_block, second_block in pairwise(new_ir_blocks):
# Traverse blocks with optional=True are immediately preceded by a MarkLocation block.
if isinstance(second_block, Traverse) and second_block.optional:
if not isinstance(first_block, MarkLocation):
raise AssertionError(u'Expected MarkLocation before Traverse with optional=True, '
u'but none was found: {}'.format(ir_blocks)) | python | def _sanity_check_mark_location_preceding_optional_traverse(ir_blocks):
"""Assert that optional Traverse blocks are preceded by a MarkLocation."""
# Once all fold blocks are removed, each optional Traverse must have
# a MarkLocation block immediately before it.
_, new_ir_blocks = extract_folds_from_ir_blocks(ir_blocks)
for first_block, second_block in pairwise(new_ir_blocks):
# Traverse blocks with optional=True are immediately preceded by a MarkLocation block.
if isinstance(second_block, Traverse) and second_block.optional:
if not isinstance(first_block, MarkLocation):
raise AssertionError(u'Expected MarkLocation before Traverse with optional=True, '
u'but none was found: {}'.format(ir_blocks)) | [
"def",
"_sanity_check_mark_location_preceding_optional_traverse",
"(",
"ir_blocks",
")",
":",
"# Once all fold blocks are removed, each optional Traverse must have",
"# a MarkLocation block immediately before it.",
"_",
",",
"new_ir_blocks",
"=",
"extract_folds_from_ir_blocks",
"(",
"ir_... | Assert that optional Traverse blocks are preceded by a MarkLocation. | [
"Assert",
"that",
"optional",
"Traverse",
"blocks",
"are",
"preceded",
"by",
"a",
"MarkLocation",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L175-L185 | train | 227,864 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | _sanity_check_every_location_is_marked | def _sanity_check_every_location_is_marked(ir_blocks):
"""Ensure that every new location is marked with a MarkLocation block."""
# Exactly one MarkLocation block is found between any block that starts an interval of blocks
# that all affect the same query position, and the first subsequent block that affects a
# different position in the query. Such intervals include the following examples:
# - from Fold to Unfold
# - from QueryRoot to Traverse/Recurse
# - from one Traverse to the next Traverse
# - from Traverse to Backtrack
found_start_block = False
mark_location_blocks_count = 0
start_interval_types = (QueryRoot, Traverse, Recurse, Fold)
end_interval_types = (Backtrack, ConstructResult, Recurse, Traverse, Unfold)
for block in ir_blocks:
# Terminate started intervals before opening new ones.
if isinstance(block, end_interval_types) and found_start_block:
found_start_block = False
if mark_location_blocks_count != 1:
raise AssertionError(u'Expected 1 MarkLocation block between traversals, found: '
u'{} {}'.format(mark_location_blocks_count, ir_blocks))
# Now consider opening new intervals or processing MarkLocation blocks.
if isinstance(block, MarkLocation):
mark_location_blocks_count += 1
elif isinstance(block, start_interval_types):
found_start_block = True
mark_location_blocks_count = 0 | python | def _sanity_check_every_location_is_marked(ir_blocks):
"""Ensure that every new location is marked with a MarkLocation block."""
# Exactly one MarkLocation block is found between any block that starts an interval of blocks
# that all affect the same query position, and the first subsequent block that affects a
# different position in the query. Such intervals include the following examples:
# - from Fold to Unfold
# - from QueryRoot to Traverse/Recurse
# - from one Traverse to the next Traverse
# - from Traverse to Backtrack
found_start_block = False
mark_location_blocks_count = 0
start_interval_types = (QueryRoot, Traverse, Recurse, Fold)
end_interval_types = (Backtrack, ConstructResult, Recurse, Traverse, Unfold)
for block in ir_blocks:
# Terminate started intervals before opening new ones.
if isinstance(block, end_interval_types) and found_start_block:
found_start_block = False
if mark_location_blocks_count != 1:
raise AssertionError(u'Expected 1 MarkLocation block between traversals, found: '
u'{} {}'.format(mark_location_blocks_count, ir_blocks))
# Now consider opening new intervals or processing MarkLocation blocks.
if isinstance(block, MarkLocation):
mark_location_blocks_count += 1
elif isinstance(block, start_interval_types):
found_start_block = True
mark_location_blocks_count = 0 | [
"def",
"_sanity_check_every_location_is_marked",
"(",
"ir_blocks",
")",
":",
"# Exactly one MarkLocation block is found between any block that starts an interval of blocks",
"# that all affect the same query position, and the first subsequent block that affects a",
"# different position in the query... | Ensure that every new location is marked with a MarkLocation block. | [
"Ensure",
"that",
"every",
"new",
"location",
"is",
"marked",
"with",
"a",
"MarkLocation",
"block",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L188-L216 | train | 227,865 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_sanity_checks.py | _sanity_check_coerce_type_outside_of_fold | def _sanity_check_coerce_type_outside_of_fold(ir_blocks):
"""Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block."""
is_in_fold = False
for first_block, second_block in pairwise(ir_blocks):
if isinstance(first_block, Fold):
is_in_fold = True
if not is_in_fold and isinstance(first_block, CoerceType):
if not isinstance(second_block, (MarkLocation, Filter)):
raise AssertionError(u'Expected MarkLocation or Filter after CoerceType, '
u'but none was found: {}'.format(ir_blocks))
if isinstance(second_block, Unfold):
is_in_fold = False | python | def _sanity_check_coerce_type_outside_of_fold(ir_blocks):
"""Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block."""
is_in_fold = False
for first_block, second_block in pairwise(ir_blocks):
if isinstance(first_block, Fold):
is_in_fold = True
if not is_in_fold and isinstance(first_block, CoerceType):
if not isinstance(second_block, (MarkLocation, Filter)):
raise AssertionError(u'Expected MarkLocation or Filter after CoerceType, '
u'but none was found: {}'.format(ir_blocks))
if isinstance(second_block, Unfold):
is_in_fold = False | [
"def",
"_sanity_check_coerce_type_outside_of_fold",
"(",
"ir_blocks",
")",
":",
"is_in_fold",
"=",
"False",
"for",
"first_block",
",",
"second_block",
"in",
"pairwise",
"(",
"ir_blocks",
")",
":",
"if",
"isinstance",
"(",
"first_block",
",",
"Fold",
")",
":",
"i... | Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block. | [
"Ensure",
"that",
"CoerceType",
"not",
"in",
"a"
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L219-L232 | train | 227,866 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_properties.py | validate_supported_property_type_id | def validate_supported_property_type_id(property_name, property_type_id):
"""Ensure that the given property type_id is supported by the graph."""
if property_type_id not in PROPERTY_TYPE_ID_TO_NAME:
raise AssertionError(u'Property "{}" has unsupported property type id: '
u'{}'.format(property_name, property_type_id)) | python | def validate_supported_property_type_id(property_name, property_type_id):
"""Ensure that the given property type_id is supported by the graph."""
if property_type_id not in PROPERTY_TYPE_ID_TO_NAME:
raise AssertionError(u'Property "{}" has unsupported property type id: '
u'{}'.format(property_name, property_type_id)) | [
"def",
"validate_supported_property_type_id",
"(",
"property_name",
",",
"property_type_id",
")",
":",
"if",
"property_type_id",
"not",
"in",
"PROPERTY_TYPE_ID_TO_NAME",
":",
"raise",
"AssertionError",
"(",
"u'Property \"{}\" has unsupported property type id: '",
"u'{}'",
".",
... | Ensure that the given property type_id is supported by the graph. | [
"Ensure",
"that",
"the",
"given",
"property",
"type_id",
"is",
"supported",
"by",
"the",
"graph",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_properties.py#L96-L100 | train | 227,867 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_properties.py | _parse_bool_default_value | def _parse_bool_default_value(property_name, default_value_string):
"""Parse and return the default value for a boolean property."""
lowercased_value_string = default_value_string.lower()
if lowercased_value_string in {'0', 'false'}:
return False
elif lowercased_value_string in {'1', 'true'}:
return True
else:
raise AssertionError(u'Unsupported default value for boolean property "{}": '
u'{}'.format(property_name, default_value_string)) | python | def _parse_bool_default_value(property_name, default_value_string):
"""Parse and return the default value for a boolean property."""
lowercased_value_string = default_value_string.lower()
if lowercased_value_string in {'0', 'false'}:
return False
elif lowercased_value_string in {'1', 'true'}:
return True
else:
raise AssertionError(u'Unsupported default value for boolean property "{}": '
u'{}'.format(property_name, default_value_string)) | [
"def",
"_parse_bool_default_value",
"(",
"property_name",
",",
"default_value_string",
")",
":",
"lowercased_value_string",
"=",
"default_value_string",
".",
"lower",
"(",
")",
"if",
"lowercased_value_string",
"in",
"{",
"'0'",
",",
"'false'",
"}",
":",
"return",
"F... | Parse and return the default value for a boolean property. | [
"Parse",
"and",
"return",
"the",
"default",
"value",
"for",
"a",
"boolean",
"property",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_properties.py#L103-L112 | train | 227,868 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_properties.py | _parse_datetime_default_value | def _parse_datetime_default_value(property_name, default_value_string):
"""Parse and return the default value for a datetime property."""
# OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually
# and then turn it into a python datetime object. strptime() will raise an exception
# if the provided value cannot be parsed correctly.
parsed_value = time.strptime(default_value_string, ORIENTDB_DATETIME_FORMAT)
return datetime.datetime(
parsed_value.tm_year, parsed_value.tm_mon, parsed_value.tm_mday,
parsed_value.tm_hour, parsed_value.tm_min, parsed_value.tm_sec, 0, None) | python | def _parse_datetime_default_value(property_name, default_value_string):
"""Parse and return the default value for a datetime property."""
# OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually
# and then turn it into a python datetime object. strptime() will raise an exception
# if the provided value cannot be parsed correctly.
parsed_value = time.strptime(default_value_string, ORIENTDB_DATETIME_FORMAT)
return datetime.datetime(
parsed_value.tm_year, parsed_value.tm_mon, parsed_value.tm_mday,
parsed_value.tm_hour, parsed_value.tm_min, parsed_value.tm_sec, 0, None) | [
"def",
"_parse_datetime_default_value",
"(",
"property_name",
",",
"default_value_string",
")",
":",
"# OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually",
"# and then turn it into a python datetime object. strptime() will raise an exception",
"# if the provided valu... | Parse and return the default value for a datetime property. | [
"Parse",
"and",
"return",
"the",
"default",
"value",
"for",
"a",
"datetime",
"property",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_properties.py#L115-L123 | train | 227,869 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_properties.py | _parse_date_default_value | def _parse_date_default_value(property_name, default_value_string):
"""Parse and return the default value for a date property."""
# OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually
# and then turn it into a python datetime object. strptime() will raise an exception
# if the provided value cannot be parsed correctly.
parsed_value = time.strptime(default_value_string, ORIENTDB_DATE_FORMAT)
return datetime.date(parsed_value.tm_year, parsed_value.tm_mon, parsed_value.tm_mday) | python | def _parse_date_default_value(property_name, default_value_string):
"""Parse and return the default value for a date property."""
# OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually
# and then turn it into a python datetime object. strptime() will raise an exception
# if the provided value cannot be parsed correctly.
parsed_value = time.strptime(default_value_string, ORIENTDB_DATE_FORMAT)
return datetime.date(parsed_value.tm_year, parsed_value.tm_mon, parsed_value.tm_mday) | [
"def",
"_parse_date_default_value",
"(",
"property_name",
",",
"default_value_string",
")",
":",
"# OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually",
"# and then turn it into a python datetime object. strptime() will raise an exception",
"# if the provided value ca... | Parse and return the default value for a date property. | [
"Parse",
"and",
"return",
"the",
"default",
"value",
"for",
"a",
"date",
"property",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_properties.py#L126-L132 | train | 227,870 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_properties.py | parse_default_property_value | def parse_default_property_value(property_name, property_type_id, default_value_string):
"""Parse the default value string into its proper form given the property type ID.
Args:
property_name: string, the name of the property whose default value is being parsed.
Used primarily to construct meaningful error messages, should the default
value prove invalid.
property_type_id: int, one of the property type ID constants defined in this file that
OrientDB uses to designate the native type of a given property.
default_value_string: string, the textual representation of the default value for
for the property, as returned by OrientDB's schema introspection code.
Returns:
an object of type matching the property that can be used as the property's default value.
For example, if the property is of string type, the return type will be a string, and if
the property is of list type, the return type will be a list.
Raises:
AssertionError, if the default value is not supported or does not match the
property's declared type (e.g. if a default of "[]" is set on an integer property).
"""
if property_type_id == PROPERTY_TYPE_EMBEDDED_SET_ID and default_value_string == '{}':
return set()
elif property_type_id == PROPERTY_TYPE_EMBEDDED_LIST_ID and default_value_string == '[]':
return list()
elif (property_type_id == PROPERTY_TYPE_STRING_ID and
isinstance(default_value_string, six.string_types)):
return default_value_string
elif property_type_id == PROPERTY_TYPE_BOOLEAN_ID:
return _parse_bool_default_value(property_name, default_value_string)
elif property_type_id == PROPERTY_TYPE_DATETIME_ID:
return _parse_datetime_default_value(property_name, default_value_string)
elif property_type_id == PROPERTY_TYPE_DATE_ID:
return _parse_date_default_value(property_name, default_value_string)
else:
raise AssertionError(u'Unsupported default value for property "{}" with type id {}: '
u'{}'.format(property_name, property_type_id, default_value_string)) | python | def parse_default_property_value(property_name, property_type_id, default_value_string):
"""Parse the default value string into its proper form given the property type ID.
Args:
property_name: string, the name of the property whose default value is being parsed.
Used primarily to construct meaningful error messages, should the default
value prove invalid.
property_type_id: int, one of the property type ID constants defined in this file that
OrientDB uses to designate the native type of a given property.
default_value_string: string, the textual representation of the default value for
for the property, as returned by OrientDB's schema introspection code.
Returns:
an object of type matching the property that can be used as the property's default value.
For example, if the property is of string type, the return type will be a string, and if
the property is of list type, the return type will be a list.
Raises:
AssertionError, if the default value is not supported or does not match the
property's declared type (e.g. if a default of "[]" is set on an integer property).
"""
if property_type_id == PROPERTY_TYPE_EMBEDDED_SET_ID and default_value_string == '{}':
return set()
elif property_type_id == PROPERTY_TYPE_EMBEDDED_LIST_ID and default_value_string == '[]':
return list()
elif (property_type_id == PROPERTY_TYPE_STRING_ID and
isinstance(default_value_string, six.string_types)):
return default_value_string
elif property_type_id == PROPERTY_TYPE_BOOLEAN_ID:
return _parse_bool_default_value(property_name, default_value_string)
elif property_type_id == PROPERTY_TYPE_DATETIME_ID:
return _parse_datetime_default_value(property_name, default_value_string)
elif property_type_id == PROPERTY_TYPE_DATE_ID:
return _parse_date_default_value(property_name, default_value_string)
else:
raise AssertionError(u'Unsupported default value for property "{}" with type id {}: '
u'{}'.format(property_name, property_type_id, default_value_string)) | [
"def",
"parse_default_property_value",
"(",
"property_name",
",",
"property_type_id",
",",
"default_value_string",
")",
":",
"if",
"property_type_id",
"==",
"PROPERTY_TYPE_EMBEDDED_SET_ID",
"and",
"default_value_string",
"==",
"'{}'",
":",
"return",
"set",
"(",
")",
"el... | Parse the default value string into its proper form given the property type ID.
Args:
property_name: string, the name of the property whose default value is being parsed.
Used primarily to construct meaningful error messages, should the default
value prove invalid.
property_type_id: int, one of the property type ID constants defined in this file that
OrientDB uses to designate the native type of a given property.
default_value_string: string, the textual representation of the default value for
for the property, as returned by OrientDB's schema introspection code.
Returns:
an object of type matching the property that can be used as the property's default value.
For example, if the property is of string type, the return type will be a string, and if
the property is of list type, the return type will be a list.
Raises:
AssertionError, if the default value is not supported or does not match the
property's declared type (e.g. if a default of "[]" is set on an integer property). | [
"Parse",
"the",
"default",
"value",
"string",
"into",
"its",
"proper",
"form",
"given",
"the",
"property",
"type",
"ID",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_properties.py#L135-L171 | train | 227,871 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/common.py | _compile_graphql_generic | def _compile_graphql_generic(language, lowering_func, query_emitter_func,
schema, graphql_string, type_equivalence_hints, compiler_metadata):
"""Compile the GraphQL input, lowering and emitting the query using the given functions.
Args:
language: string indicating the target language to compile to.
lowering_func: Function to lower the compiler IR into a compatible form for the target
language backend.
query_emitter_func: Function that emits a query in the target language from the lowered IR.
schema: GraphQL schema object describing the schema of the graph to be queried.
graphql_string: the GraphQL query to compile to the target language, as a string.
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
compiler_metadata: optional target specific metadata for usage by the query_emitter_func.
Returns:
a CompilationResult object
"""
ir_and_metadata = graphql_to_ir(
schema, graphql_string, type_equivalence_hints=type_equivalence_hints)
lowered_ir_blocks = lowering_func(
ir_and_metadata.ir_blocks, ir_and_metadata.query_metadata_table,
type_equivalence_hints=type_equivalence_hints)
query = query_emitter_func(lowered_ir_blocks, compiler_metadata)
return CompilationResult(
query=query,
language=language,
output_metadata=ir_and_metadata.output_metadata,
input_metadata=ir_and_metadata.input_metadata) | python | def _compile_graphql_generic(language, lowering_func, query_emitter_func,
schema, graphql_string, type_equivalence_hints, compiler_metadata):
"""Compile the GraphQL input, lowering and emitting the query using the given functions.
Args:
language: string indicating the target language to compile to.
lowering_func: Function to lower the compiler IR into a compatible form for the target
language backend.
query_emitter_func: Function that emits a query in the target language from the lowered IR.
schema: GraphQL schema object describing the schema of the graph to be queried.
graphql_string: the GraphQL query to compile to the target language, as a string.
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
compiler_metadata: optional target specific metadata for usage by the query_emitter_func.
Returns:
a CompilationResult object
"""
ir_and_metadata = graphql_to_ir(
schema, graphql_string, type_equivalence_hints=type_equivalence_hints)
lowered_ir_blocks = lowering_func(
ir_and_metadata.ir_blocks, ir_and_metadata.query_metadata_table,
type_equivalence_hints=type_equivalence_hints)
query = query_emitter_func(lowered_ir_blocks, compiler_metadata)
return CompilationResult(
query=query,
language=language,
output_metadata=ir_and_metadata.output_metadata,
input_metadata=ir_and_metadata.input_metadata) | [
"def",
"_compile_graphql_generic",
"(",
"language",
",",
"lowering_func",
",",
"query_emitter_func",
",",
"schema",
",",
"graphql_string",
",",
"type_equivalence_hints",
",",
"compiler_metadata",
")",
":",
"ir_and_metadata",
"=",
"graphql_to_ir",
"(",
"schema",
",",
"... | Compile the GraphQL input, lowering and emitting the query using the given functions.
Args:
language: string indicating the target language to compile to.
lowering_func: Function to lower the compiler IR into a compatible form for the target
language backend.
query_emitter_func: Function that emits a query in the target language from the lowered IR.
schema: GraphQL schema object describing the schema of the graph to be queried.
graphql_string: the GraphQL query to compile to the target language, as a string.
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
compiler_metadata: optional target specific metadata for usage by the query_emitter_func.
Returns:
a CompilationResult object | [
"Compile",
"the",
"GraphQL",
"input",
"lowering",
"and",
"emitting",
"the",
"query",
"using",
"the",
"given",
"functions",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/common.py#L122-L152 | train | 227,872 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | scalar_leaf_only | def scalar_leaf_only(operator):
"""Ensure the filter function is only applied to scalar leaf types."""
def decorator(f):
"""Decorate the supplied function with the "scalar_leaf_only" logic."""
@wraps(f)
def wrapper(filter_operation_info, context, parameters, *args, **kwargs):
"""Check that the type on which the operator operates is a scalar leaf type."""
if 'operator' in kwargs:
current_operator = kwargs['operator']
else:
# Because "operator" is from an enclosing scope, it is immutable in Python 2.x.
current_operator = operator
if not is_leaf_type(filter_operation_info.field_type):
raise GraphQLCompilationError(u'Cannot apply "{}" filter to non-leaf type'
u'{}'.format(current_operator, filter_operation_info))
return f(filter_operation_info, context, parameters, *args, **kwargs)
return wrapper
return decorator | python | def scalar_leaf_only(operator):
"""Ensure the filter function is only applied to scalar leaf types."""
def decorator(f):
"""Decorate the supplied function with the "scalar_leaf_only" logic."""
@wraps(f)
def wrapper(filter_operation_info, context, parameters, *args, **kwargs):
"""Check that the type on which the operator operates is a scalar leaf type."""
if 'operator' in kwargs:
current_operator = kwargs['operator']
else:
# Because "operator" is from an enclosing scope, it is immutable in Python 2.x.
current_operator = operator
if not is_leaf_type(filter_operation_info.field_type):
raise GraphQLCompilationError(u'Cannot apply "{}" filter to non-leaf type'
u'{}'.format(current_operator, filter_operation_info))
return f(filter_operation_info, context, parameters, *args, **kwargs)
return wrapper
return decorator | [
"def",
"scalar_leaf_only",
"(",
"operator",
")",
":",
"def",
"decorator",
"(",
"f",
")",
":",
"\"\"\"Decorate the supplied function with the \"scalar_leaf_only\" logic.\"\"\"",
"@",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"filter_operation_info",
",",
"context",
... | Ensure the filter function is only applied to scalar leaf types. | [
"Ensure",
"the",
"filter",
"function",
"is",
"only",
"applied",
"to",
"scalar",
"leaf",
"types",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L17-L37 | train | 227,873 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | vertex_field_only | def vertex_field_only(operator):
"""Ensure the filter function is only applied to vertex field types."""
def decorator(f):
"""Decorate the supplied function with the "vertex_field_only" logic."""
@wraps(f)
def wrapper(filter_operation_info, context, parameters, *args, **kwargs):
"""Check that the type on which the operator operates is a vertex field type."""
if 'operator' in kwargs:
current_operator = kwargs['operator']
else:
# Because "operator" is from an enclosing scope, it is immutable in Python 2.x.
current_operator = operator
if not is_vertex_field_type(filter_operation_info.field_type):
raise GraphQLCompilationError(
u'Cannot apply "{}" filter to non-vertex field: '
u'{}'.format(current_operator, filter_operation_info.field_name))
return f(filter_operation_info, context, parameters, *args, **kwargs)
return wrapper
return decorator | python | def vertex_field_only(operator):
"""Ensure the filter function is only applied to vertex field types."""
def decorator(f):
"""Decorate the supplied function with the "vertex_field_only" logic."""
@wraps(f)
def wrapper(filter_operation_info, context, parameters, *args, **kwargs):
"""Check that the type on which the operator operates is a vertex field type."""
if 'operator' in kwargs:
current_operator = kwargs['operator']
else:
# Because "operator" is from an enclosing scope, it is immutable in Python 2.x.
current_operator = operator
if not is_vertex_field_type(filter_operation_info.field_type):
raise GraphQLCompilationError(
u'Cannot apply "{}" filter to non-vertex field: '
u'{}'.format(current_operator, filter_operation_info.field_name))
return f(filter_operation_info, context, parameters, *args, **kwargs)
return wrapper
return decorator | [
"def",
"vertex_field_only",
"(",
"operator",
")",
":",
"def",
"decorator",
"(",
"f",
")",
":",
"\"\"\"Decorate the supplied function with the \"vertex_field_only\" logic.\"\"\"",
"@",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"filter_operation_info",
",",
"context"... | Ensure the filter function is only applied to vertex field types. | [
"Ensure",
"the",
"filter",
"function",
"is",
"only",
"applied",
"to",
"vertex",
"field",
"types",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L40-L61 | train | 227,874 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | takes_parameters | def takes_parameters(count):
"""Ensure the filter function has "count" parameters specified."""
def decorator(f):
"""Decorate the supplied function with the "takes_parameters" logic."""
@wraps(f)
def wrapper(filter_operation_info, location, context, parameters, *args, **kwargs):
"""Check that the supplied number of parameters equals the expected number."""
if len(parameters) != count:
raise GraphQLCompilationError(u'Incorrect number of parameters, expected {} got '
u'{}: {}'.format(count, len(parameters), parameters))
return f(filter_operation_info, location, context, parameters, *args, **kwargs)
return wrapper
return decorator | python | def takes_parameters(count):
"""Ensure the filter function has "count" parameters specified."""
def decorator(f):
"""Decorate the supplied function with the "takes_parameters" logic."""
@wraps(f)
def wrapper(filter_operation_info, location, context, parameters, *args, **kwargs):
"""Check that the supplied number of parameters equals the expected number."""
if len(parameters) != count:
raise GraphQLCompilationError(u'Incorrect number of parameters, expected {} got '
u'{}: {}'.format(count, len(parameters), parameters))
return f(filter_operation_info, location, context, parameters, *args, **kwargs)
return wrapper
return decorator | [
"def",
"takes_parameters",
"(",
"count",
")",
":",
"def",
"decorator",
"(",
"f",
")",
":",
"\"\"\"Decorate the supplied function with the \"takes_parameters\" logic.\"\"\"",
"@",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"filter_operation_info",
",",
"location",
... | Ensure the filter function has "count" parameters specified. | [
"Ensure",
"the",
"filter",
"function",
"has",
"count",
"parameters",
"specified",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L64-L79 | train | 227,875 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | _represent_argument | def _represent_argument(directive_location, context, argument, inferred_type):
"""Return a two-element tuple that represents the argument to the directive being processed.
Args:
directive_location: Location where the directive is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
argument: string, the name of the argument to the directive
inferred_type: GraphQL type object specifying the inferred type of the argument
Returns:
(argument_expression, non_existence_expression)
- argument_expression: an Expression object that captures the semantics of the argument
- non_existence_expression: None or Expression object;
If the current block is not optional, this is set to None. Otherwise, it is an
expression that will evaluate to True if the argument is skipped as optional and
therefore not present, and False otherwise.
"""
# Regardless of what kind of variable we are dealing with,
# we want to ensure its name is valid.
argument_name = argument[1:]
validate_safe_string(argument_name)
if is_variable_argument(argument):
existing_type = context['inputs'].get(argument_name, inferred_type)
if not inferred_type.is_same_type(existing_type):
raise GraphQLCompilationError(u'Incompatible types inferred for argument {}. '
u'The argument cannot simultaneously be '
u'{} and {}.'.format(argument, existing_type,
inferred_type))
context['inputs'][argument_name] = inferred_type
return (expressions.Variable(argument, inferred_type), None)
elif is_tag_argument(argument):
argument_context = context['tags'].get(argument_name, None)
if argument_context is None:
raise GraphQLCompilationError(u'Undeclared argument used: {}'.format(argument))
location = argument_context['location']
optional = argument_context['optional']
tag_inferred_type = argument_context['type']
if location is None:
raise AssertionError(u'Argument declared without location: {}'.format(argument_name))
if location.field is None:
raise AssertionError(u'Argument location is not a property field: {}'.format(location))
if not inferred_type.is_same_type(tag_inferred_type):
raise GraphQLCompilationError(u'The inferred type of the matching @tag directive does '
u'not match the inferred required type for this filter: '
u'{} vs {}'.format(tag_inferred_type, inferred_type))
# Check whether the argument is a field on the vertex on which the directive is applied.
field_is_local = directive_location.at_vertex() == location.at_vertex()
non_existence_expression = None
if optional:
if field_is_local:
non_existence_expression = expressions.FalseLiteral
else:
non_existence_expression = expressions.BinaryComposition(
u'=',
expressions.ContextFieldExistence(location.at_vertex()),
expressions.FalseLiteral)
if field_is_local:
representation = expressions.LocalField(argument_name)
else:
representation = expressions.ContextField(location, tag_inferred_type)
return (representation, non_existence_expression)
else:
# If we want to support literal arguments, add them here.
raise GraphQLCompilationError(u'Non-argument type found: {}'.format(argument)) | python | def _represent_argument(directive_location, context, argument, inferred_type):
"""Return a two-element tuple that represents the argument to the directive being processed.
Args:
directive_location: Location where the directive is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
argument: string, the name of the argument to the directive
inferred_type: GraphQL type object specifying the inferred type of the argument
Returns:
(argument_expression, non_existence_expression)
- argument_expression: an Expression object that captures the semantics of the argument
- non_existence_expression: None or Expression object;
If the current block is not optional, this is set to None. Otherwise, it is an
expression that will evaluate to True if the argument is skipped as optional and
therefore not present, and False otherwise.
"""
# Regardless of what kind of variable we are dealing with,
# we want to ensure its name is valid.
argument_name = argument[1:]
validate_safe_string(argument_name)
if is_variable_argument(argument):
existing_type = context['inputs'].get(argument_name, inferred_type)
if not inferred_type.is_same_type(existing_type):
raise GraphQLCompilationError(u'Incompatible types inferred for argument {}. '
u'The argument cannot simultaneously be '
u'{} and {}.'.format(argument, existing_type,
inferred_type))
context['inputs'][argument_name] = inferred_type
return (expressions.Variable(argument, inferred_type), None)
elif is_tag_argument(argument):
argument_context = context['tags'].get(argument_name, None)
if argument_context is None:
raise GraphQLCompilationError(u'Undeclared argument used: {}'.format(argument))
location = argument_context['location']
optional = argument_context['optional']
tag_inferred_type = argument_context['type']
if location is None:
raise AssertionError(u'Argument declared without location: {}'.format(argument_name))
if location.field is None:
raise AssertionError(u'Argument location is not a property field: {}'.format(location))
if not inferred_type.is_same_type(tag_inferred_type):
raise GraphQLCompilationError(u'The inferred type of the matching @tag directive does '
u'not match the inferred required type for this filter: '
u'{} vs {}'.format(tag_inferred_type, inferred_type))
# Check whether the argument is a field on the vertex on which the directive is applied.
field_is_local = directive_location.at_vertex() == location.at_vertex()
non_existence_expression = None
if optional:
if field_is_local:
non_existence_expression = expressions.FalseLiteral
else:
non_existence_expression = expressions.BinaryComposition(
u'=',
expressions.ContextFieldExistence(location.at_vertex()),
expressions.FalseLiteral)
if field_is_local:
representation = expressions.LocalField(argument_name)
else:
representation = expressions.ContextField(location, tag_inferred_type)
return (representation, non_existence_expression)
else:
# If we want to support literal arguments, add them here.
raise GraphQLCompilationError(u'Non-argument type found: {}'.format(argument)) | [
"def",
"_represent_argument",
"(",
"directive_location",
",",
"context",
",",
"argument",
",",
"inferred_type",
")",
":",
"# Regardless of what kind of variable we are dealing with,",
"# we want to ensure its name is valid.",
"argument_name",
"=",
"argument",
"[",
"1",
":",
"... | Return a two-element tuple that represents the argument to the directive being processed.
Args:
directive_location: Location where the directive is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
argument: string, the name of the argument to the directive
inferred_type: GraphQL type object specifying the inferred type of the argument
Returns:
(argument_expression, non_existence_expression)
- argument_expression: an Expression object that captures the semantics of the argument
- non_existence_expression: None or Expression object;
If the current block is not optional, this is set to None. Otherwise, it is an
expression that will evaluate to True if the argument is skipped as optional and
therefore not present, and False otherwise. | [
"Return",
"a",
"two",
"-",
"element",
"tuple",
"that",
"represents",
"the",
"argument",
"to",
"the",
"directive",
"being",
"processed",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L82-L156 | train | 227,876 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | _process_comparison_filter_directive | def _process_comparison_filter_directive(filter_operation_info, location,
context, parameters, operator=None):
"""Return a Filter basic block that performs the given comparison against the property field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, containing the value to perform the comparison against;
if the parameter is optional and missing, the check will return True
operator: unicode, a comparison operator, like '=', '!=', '>=' etc.
This is a kwarg only to preserve the same positional arguments in the
function signature, to ease validation.
Returns:
a Filter basic block that performs the requested comparison
"""
comparison_operators = {u'=', u'!=', u'>', u'<', u'>=', u'<='}
if operator not in comparison_operators:
raise AssertionError(u'Expected a valid comparison operator ({}), but got '
u'{}'.format(comparison_operators, operator))
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
argument_inferred_type = strip_non_null_from_type(filtered_field_type)
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
comparison_expression = expressions.BinaryComposition(
operator, expressions.LocalField(filtered_field_name), argument_expression)
final_expression = None
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
final_expression = expressions.BinaryComposition(
u'||', non_existence_expression, comparison_expression)
else:
final_expression = comparison_expression
return blocks.Filter(final_expression) | python | def _process_comparison_filter_directive(filter_operation_info, location,
context, parameters, operator=None):
"""Return a Filter basic block that performs the given comparison against the property field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, containing the value to perform the comparison against;
if the parameter is optional and missing, the check will return True
operator: unicode, a comparison operator, like '=', '!=', '>=' etc.
This is a kwarg only to preserve the same positional arguments in the
function signature, to ease validation.
Returns:
a Filter basic block that performs the requested comparison
"""
comparison_operators = {u'=', u'!=', u'>', u'<', u'>=', u'<='}
if operator not in comparison_operators:
raise AssertionError(u'Expected a valid comparison operator ({}), but got '
u'{}'.format(comparison_operators, operator))
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
argument_inferred_type = strip_non_null_from_type(filtered_field_type)
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
comparison_expression = expressions.BinaryComposition(
operator, expressions.LocalField(filtered_field_name), argument_expression)
final_expression = None
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
final_expression = expressions.BinaryComposition(
u'||', non_existence_expression, comparison_expression)
else:
final_expression = comparison_expression
return blocks.Filter(final_expression) | [
"def",
"_process_comparison_filter_directive",
"(",
"filter_operation_info",
",",
"location",
",",
"context",
",",
"parameters",
",",
"operator",
"=",
"None",
")",
":",
"comparison_operators",
"=",
"{",
"u'='",
",",
"u'!='",
",",
"u'>'",
",",
"u'<'",
",",
"u'>='... | Return a Filter basic block that performs the given comparison against the property field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, containing the value to perform the comparison against;
if the parameter is optional and missing, the check will return True
operator: unicode, a comparison operator, like '=', '!=', '>=' etc.
This is a kwarg only to preserve the same positional arguments in the
function signature, to ease validation.
Returns:
a Filter basic block that performs the requested comparison | [
"Return",
"a",
"Filter",
"basic",
"block",
"that",
"performs",
"the",
"given",
"comparison",
"against",
"the",
"property",
"field",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L161-L204 | train | 227,877 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | _process_has_edge_degree_filter_directive | def _process_has_edge_degree_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks the degree of the edge to the given vertex field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, containing the value to check the edge degree against;
if the parameter is optional and missing, the check will return True
Returns:
a Filter basic block that performs the check
"""
if isinstance(filter_operation_info.field_ast, InlineFragment):
raise AssertionError(u'Received InlineFragment AST node in "has_edge_degree" filter '
u'handler. This should have been caught earlier: '
u'{}'.format(filter_operation_info.field_ast))
filtered_field_name = filter_operation_info.field_name
if filtered_field_name is None or not is_vertex_field_name(filtered_field_name):
raise AssertionError(u'Invalid value for "filtered_field_name" in "has_edge_degree" '
u'filter: {}'.format(filtered_field_name))
if not is_vertex_field_type(filter_operation_info.field_type):
raise AssertionError(u'Invalid value for "filter_operation_info.field_type" in '
u'"has_edge_degree" filter: {}'.format(filter_operation_info))
argument = parameters[0]
if not is_variable_argument(argument):
raise GraphQLCompilationError(u'The "has_edge_degree" filter only supports runtime '
u'variable arguments. Tagged values are not supported.'
u'Argument name: {}'.format(argument))
argument_inferred_type = GraphQLInt
argument_expression, non_existence_expression = _represent_argument(
location, context, argument, argument_inferred_type)
if non_existence_expression is not None:
raise AssertionError(u'Since we do not support tagged values, non_existence_expression '
u'should have been None. However, it was: '
u'{}'.format(non_existence_expression))
# If no edges to the vertex field exist, the edges' field in the database may be "null".
# We also don't know ahead of time whether the supplied argument is zero or not.
# We have to accommodate these facts in our generated comparison code.
# We construct the following expression to check if the edge degree is zero:
# ({argument} == 0) && (edge_field == null)
argument_is_zero = expressions.BinaryComposition(
u'=', argument_expression, expressions.ZeroLiteral)
edge_field_is_null = expressions.BinaryComposition(
u'=', expressions.LocalField(filtered_field_name), expressions.NullLiteral)
edge_degree_is_zero = expressions.BinaryComposition(
u'&&', argument_is_zero, edge_field_is_null)
# The following expression will check for a non-zero edge degree equal to the argument.
# (edge_field != null) && (edge_field.size() == {argument})
edge_field_is_not_null = expressions.BinaryComposition(
u'!=', expressions.LocalField(filtered_field_name), expressions.NullLiteral)
edge_degree = expressions.UnaryTransformation(
u'size', expressions.LocalField(filtered_field_name))
edge_degree_matches_argument = expressions.BinaryComposition(
u'=', edge_degree, argument_expression)
edge_degree_is_non_zero = expressions.BinaryComposition(
u'&&', edge_field_is_not_null, edge_degree_matches_argument)
# We combine the two cases with a logical-or to handle both situations:
filter_predicate = expressions.BinaryComposition(
u'||', edge_degree_is_zero, edge_degree_is_non_zero)
return blocks.Filter(filter_predicate) | python | def _process_has_edge_degree_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks the degree of the edge to the given vertex field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, containing the value to check the edge degree against;
if the parameter is optional and missing, the check will return True
Returns:
a Filter basic block that performs the check
"""
if isinstance(filter_operation_info.field_ast, InlineFragment):
raise AssertionError(u'Received InlineFragment AST node in "has_edge_degree" filter '
u'handler. This should have been caught earlier: '
u'{}'.format(filter_operation_info.field_ast))
filtered_field_name = filter_operation_info.field_name
if filtered_field_name is None or not is_vertex_field_name(filtered_field_name):
raise AssertionError(u'Invalid value for "filtered_field_name" in "has_edge_degree" '
u'filter: {}'.format(filtered_field_name))
if not is_vertex_field_type(filter_operation_info.field_type):
raise AssertionError(u'Invalid value for "filter_operation_info.field_type" in '
u'"has_edge_degree" filter: {}'.format(filter_operation_info))
argument = parameters[0]
if not is_variable_argument(argument):
raise GraphQLCompilationError(u'The "has_edge_degree" filter only supports runtime '
u'variable arguments. Tagged values are not supported.'
u'Argument name: {}'.format(argument))
argument_inferred_type = GraphQLInt
argument_expression, non_existence_expression = _represent_argument(
location, context, argument, argument_inferred_type)
if non_existence_expression is not None:
raise AssertionError(u'Since we do not support tagged values, non_existence_expression '
u'should have been None. However, it was: '
u'{}'.format(non_existence_expression))
# If no edges to the vertex field exist, the edges' field in the database may be "null".
# We also don't know ahead of time whether the supplied argument is zero or not.
# We have to accommodate these facts in our generated comparison code.
# We construct the following expression to check if the edge degree is zero:
# ({argument} == 0) && (edge_field == null)
argument_is_zero = expressions.BinaryComposition(
u'=', argument_expression, expressions.ZeroLiteral)
edge_field_is_null = expressions.BinaryComposition(
u'=', expressions.LocalField(filtered_field_name), expressions.NullLiteral)
edge_degree_is_zero = expressions.BinaryComposition(
u'&&', argument_is_zero, edge_field_is_null)
# The following expression will check for a non-zero edge degree equal to the argument.
# (edge_field != null) && (edge_field.size() == {argument})
edge_field_is_not_null = expressions.BinaryComposition(
u'!=', expressions.LocalField(filtered_field_name), expressions.NullLiteral)
edge_degree = expressions.UnaryTransformation(
u'size', expressions.LocalField(filtered_field_name))
edge_degree_matches_argument = expressions.BinaryComposition(
u'=', edge_degree, argument_expression)
edge_degree_is_non_zero = expressions.BinaryComposition(
u'&&', edge_field_is_not_null, edge_degree_matches_argument)
# We combine the two cases with a logical-or to handle both situations:
filter_predicate = expressions.BinaryComposition(
u'||', edge_degree_is_zero, edge_degree_is_non_zero)
return blocks.Filter(filter_predicate) | [
"def",
"_process_has_edge_degree_filter_directive",
"(",
"filter_operation_info",
",",
"location",
",",
"context",
",",
"parameters",
")",
":",
"if",
"isinstance",
"(",
"filter_operation_info",
".",
"field_ast",
",",
"InlineFragment",
")",
":",
"raise",
"AssertionError"... | Return a Filter basic block that checks the degree of the edge to the given vertex field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, containing the value to check the edge degree against;
if the parameter is optional and missing, the check will return True
Returns:
a Filter basic block that performs the check | [
"Return",
"a",
"Filter",
"basic",
"block",
"that",
"checks",
"the",
"degree",
"of",
"the",
"edge",
"to",
"the",
"given",
"vertex",
"field",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L209-L279 | train | 227,878 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | _process_name_or_alias_filter_directive | def _process_name_or_alias_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks for a match against an Entity's name or alias.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, containing the value to check the name or alias against;
if the parameter is optional and missing, the check will return True
Returns:
a Filter basic block that performs the check against the name or alias
"""
filtered_field_type = filter_operation_info.field_type
if isinstance(filtered_field_type, GraphQLUnionType):
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to union type '
u'{}'.format(filtered_field_type))
current_type_fields = filtered_field_type.fields
name_field = current_type_fields.get('name', None)
alias_field = current_type_fields.get('alias', None)
if not name_field or not alias_field:
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because it lacks a '
u'"name" or "alias" field.'.format(filtered_field_type))
name_field_type = strip_non_null_from_type(name_field.type)
alias_field_type = strip_non_null_from_type(alias_field.type)
if not isinstance(name_field_type, GraphQLScalarType):
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its "name" '
u'field is not a scalar.'.format(filtered_field_type))
if not isinstance(alias_field_type, GraphQLList):
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its '
u'"alias" field is not a list.'.format(filtered_field_type))
alias_field_inner_type = strip_non_null_from_type(alias_field_type.of_type)
if alias_field_inner_type != name_field_type:
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because the '
u'"name" field and the inner type of the "alias" field '
u'do not match: {} vs {}'.format(filtered_field_type,
name_field_type,
alias_field_inner_type))
argument_inferred_type = name_field_type
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
check_against_name = expressions.BinaryComposition(
u'=', expressions.LocalField('name'), argument_expression)
check_against_alias = expressions.BinaryComposition(
u'contains', expressions.LocalField('alias'), argument_expression)
filter_predicate = expressions.BinaryComposition(
u'||', check_against_name, check_against_alias)
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) | python | def _process_name_or_alias_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks for a match against an Entity's name or alias.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, containing the value to check the name or alias against;
if the parameter is optional and missing, the check will return True
Returns:
a Filter basic block that performs the check against the name or alias
"""
filtered_field_type = filter_operation_info.field_type
if isinstance(filtered_field_type, GraphQLUnionType):
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to union type '
u'{}'.format(filtered_field_type))
current_type_fields = filtered_field_type.fields
name_field = current_type_fields.get('name', None)
alias_field = current_type_fields.get('alias', None)
if not name_field or not alias_field:
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because it lacks a '
u'"name" or "alias" field.'.format(filtered_field_type))
name_field_type = strip_non_null_from_type(name_field.type)
alias_field_type = strip_non_null_from_type(alias_field.type)
if not isinstance(name_field_type, GraphQLScalarType):
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its "name" '
u'field is not a scalar.'.format(filtered_field_type))
if not isinstance(alias_field_type, GraphQLList):
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its '
u'"alias" field is not a list.'.format(filtered_field_type))
alias_field_inner_type = strip_non_null_from_type(alias_field_type.of_type)
if alias_field_inner_type != name_field_type:
raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because the '
u'"name" field and the inner type of the "alias" field '
u'do not match: {} vs {}'.format(filtered_field_type,
name_field_type,
alias_field_inner_type))
argument_inferred_type = name_field_type
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
check_against_name = expressions.BinaryComposition(
u'=', expressions.LocalField('name'), argument_expression)
check_against_alias = expressions.BinaryComposition(
u'contains', expressions.LocalField('alias'), argument_expression)
filter_predicate = expressions.BinaryComposition(
u'||', check_against_name, check_against_alias)
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) | [
"def",
"_process_name_or_alias_filter_directive",
"(",
"filter_operation_info",
",",
"location",
",",
"context",
",",
"parameters",
")",
":",
"filtered_field_type",
"=",
"filter_operation_info",
".",
"field_type",
"if",
"isinstance",
"(",
"filtered_field_type",
",",
"Grap... | Return a Filter basic block that checks for a match against an Entity's name or alias.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, containing the value to check the name or alias against;
if the parameter is optional and missing, the check will return True
Returns:
a Filter basic block that performs the check against the name or alias | [
"Return",
"a",
"Filter",
"basic",
"block",
"that",
"checks",
"for",
"a",
"match",
"against",
"an",
"Entity",
"s",
"name",
"or",
"alias",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L284-L346 | train | 227,879 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | _process_between_filter_directive | def _process_between_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks that a field is between two values, inclusive.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 2 elements, specifying the time range in which the data must lie;
if either of the elements is optional and missing,
their side of the check is assumed to be True
Returns:
a Filter basic block that performs the range check
"""
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
argument_inferred_type = strip_non_null_from_type(filtered_field_type)
arg1_expression, arg1_non_existence = _represent_argument(
location, context, parameters[0], argument_inferred_type)
arg2_expression, arg2_non_existence = _represent_argument(
location, context, parameters[1], argument_inferred_type)
lower_bound_clause = expressions.BinaryComposition(
u'>=', expressions.LocalField(filtered_field_name), arg1_expression)
if arg1_non_existence is not None:
# The argument is optional, and if it doesn't exist, this side of the check should pass.
lower_bound_clause = expressions.BinaryComposition(
u'||', arg1_non_existence, lower_bound_clause)
upper_bound_clause = expressions.BinaryComposition(
u'<=', expressions.LocalField(filtered_field_name), arg2_expression)
if arg2_non_existence is not None:
# The argument is optional, and if it doesn't exist, this side of the check should pass.
upper_bound_clause = expressions.BinaryComposition(
u'||', arg2_non_existence, upper_bound_clause)
filter_predicate = expressions.BinaryComposition(
u'&&', lower_bound_clause, upper_bound_clause)
return blocks.Filter(filter_predicate) | python | def _process_between_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks that a field is between two values, inclusive.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 2 elements, specifying the time range in which the data must lie;
if either of the elements is optional and missing,
their side of the check is assumed to be True
Returns:
a Filter basic block that performs the range check
"""
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
argument_inferred_type = strip_non_null_from_type(filtered_field_type)
arg1_expression, arg1_non_existence = _represent_argument(
location, context, parameters[0], argument_inferred_type)
arg2_expression, arg2_non_existence = _represent_argument(
location, context, parameters[1], argument_inferred_type)
lower_bound_clause = expressions.BinaryComposition(
u'>=', expressions.LocalField(filtered_field_name), arg1_expression)
if arg1_non_existence is not None:
# The argument is optional, and if it doesn't exist, this side of the check should pass.
lower_bound_clause = expressions.BinaryComposition(
u'||', arg1_non_existence, lower_bound_clause)
upper_bound_clause = expressions.BinaryComposition(
u'<=', expressions.LocalField(filtered_field_name), arg2_expression)
if arg2_non_existence is not None:
# The argument is optional, and if it doesn't exist, this side of the check should pass.
upper_bound_clause = expressions.BinaryComposition(
u'||', arg2_non_existence, upper_bound_clause)
filter_predicate = expressions.BinaryComposition(
u'&&', lower_bound_clause, upper_bound_clause)
return blocks.Filter(filter_predicate) | [
"def",
"_process_between_filter_directive",
"(",
"filter_operation_info",
",",
"location",
",",
"context",
",",
"parameters",
")",
":",
"filtered_field_type",
"=",
"filter_operation_info",
".",
"field_type",
"filtered_field_name",
"=",
"filter_operation_info",
".",
"field_n... | Return a Filter basic block that checks that a field is between two values, inclusive.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 2 elements, specifying the time range in which the data must lie;
if either of the elements is optional and missing,
their side of the check is assumed to be True
Returns:
a Filter basic block that performs the range check | [
"Return",
"a",
"Filter",
"basic",
"block",
"that",
"checks",
"that",
"a",
"field",
"is",
"between",
"two",
"values",
"inclusive",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L351-L392 | train | 227,880 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | _process_in_collection_filter_directive | def _process_in_collection_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks for a value's existence in a collection.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the collection existence check
"""
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
argument_inferred_type = GraphQLList(strip_non_null_from_type(filtered_field_type))
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
filter_predicate = expressions.BinaryComposition(
u'contains', argument_expression, expressions.LocalField(filtered_field_name))
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) | python | def _process_in_collection_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks for a value's existence in a collection.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the collection existence check
"""
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
argument_inferred_type = GraphQLList(strip_non_null_from_type(filtered_field_type))
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
filter_predicate = expressions.BinaryComposition(
u'contains', argument_expression, expressions.LocalField(filtered_field_name))
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) | [
"def",
"_process_in_collection_filter_directive",
"(",
"filter_operation_info",
",",
"location",
",",
"context",
",",
"parameters",
")",
":",
"filtered_field_type",
"=",
"filter_operation_info",
".",
"field_type",
"filtered_field_name",
"=",
"filter_operation_info",
".",
"f... | Return a Filter basic block that checks for a value's existence in a collection.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the collection existence check | [
"Return",
"a",
"Filter",
"basic",
"block",
"that",
"checks",
"for",
"a",
"value",
"s",
"existence",
"in",
"a",
"collection",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L397-L427 | train | 227,881 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | _process_has_substring_filter_directive | def _process_has_substring_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks if the directive arg is a substring of the field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the substring check
"""
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
if not strip_non_null_from_type(filtered_field_type).is_same_type(GraphQLString):
raise GraphQLCompilationError(u'Cannot apply "has_substring" to non-string '
u'type {}'.format(filtered_field_type))
argument_inferred_type = GraphQLString
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
filter_predicate = expressions.BinaryComposition(
u'has_substring', expressions.LocalField(filtered_field_name), argument_expression)
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) | python | def _process_has_substring_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks if the directive arg is a substring of the field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the substring check
"""
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
if not strip_non_null_from_type(filtered_field_type).is_same_type(GraphQLString):
raise GraphQLCompilationError(u'Cannot apply "has_substring" to non-string '
u'type {}'.format(filtered_field_type))
argument_inferred_type = GraphQLString
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
filter_predicate = expressions.BinaryComposition(
u'has_substring', expressions.LocalField(filtered_field_name), argument_expression)
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) | [
"def",
"_process_has_substring_filter_directive",
"(",
"filter_operation_info",
",",
"location",
",",
"context",
",",
"parameters",
")",
":",
"filtered_field_type",
"=",
"filter_operation_info",
".",
"field_type",
"filtered_field_name",
"=",
"filter_operation_info",
".",
"f... | Return a Filter basic block that checks if the directive arg is a substring of the field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the substring check | [
"Return",
"a",
"Filter",
"basic",
"block",
"that",
"checks",
"if",
"the",
"directive",
"arg",
"is",
"a",
"substring",
"of",
"the",
"field",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L432-L466 | train | 227,882 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | _process_contains_filter_directive | def _process_contains_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks if the directive arg is contained in the field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the contains check
"""
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
base_field_type = strip_non_null_from_type(filtered_field_type)
if not isinstance(base_field_type, GraphQLList):
raise GraphQLCompilationError(u'Cannot apply "contains" to non-list '
u'type {}'.format(filtered_field_type))
argument_inferred_type = strip_non_null_from_type(base_field_type.of_type)
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
filter_predicate = expressions.BinaryComposition(
u'contains', expressions.LocalField(filtered_field_name), argument_expression)
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) | python | def _process_contains_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks if the directive arg is contained in the field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the contains check
"""
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
base_field_type = strip_non_null_from_type(filtered_field_type)
if not isinstance(base_field_type, GraphQLList):
raise GraphQLCompilationError(u'Cannot apply "contains" to non-list '
u'type {}'.format(filtered_field_type))
argument_inferred_type = strip_non_null_from_type(base_field_type.of_type)
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
filter_predicate = expressions.BinaryComposition(
u'contains', expressions.LocalField(filtered_field_name), argument_expression)
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) | [
"def",
"_process_contains_filter_directive",
"(",
"filter_operation_info",
",",
"location",
",",
"context",
",",
"parameters",
")",
":",
"filtered_field_type",
"=",
"filter_operation_info",
".",
"field_type",
"filtered_field_name",
"=",
"filter_operation_info",
".",
"field_... | Return a Filter basic block that checks if the directive arg is contained in the field.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the contains check | [
"Return",
"a",
"Filter",
"basic",
"block",
"that",
"checks",
"if",
"the",
"directive",
"arg",
"is",
"contained",
"in",
"the",
"field",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L470-L505 | train | 227,883 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | _process_intersects_filter_directive | def _process_intersects_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks if the directive arg and the field intersect.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the intersects check
"""
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
argument_inferred_type = strip_non_null_from_type(filtered_field_type)
if not isinstance(argument_inferred_type, GraphQLList):
raise GraphQLCompilationError(u'Cannot apply "intersects" to non-list '
u'type {}'.format(filtered_field_type))
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
filter_predicate = expressions.BinaryComposition(
u'intersects', expressions.LocalField(filtered_field_name), argument_expression)
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) | python | def _process_intersects_filter_directive(filter_operation_info, location, context, parameters):
"""Return a Filter basic block that checks if the directive arg and the field intersect.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the intersects check
"""
filtered_field_type = filter_operation_info.field_type
filtered_field_name = filter_operation_info.field_name
argument_inferred_type = strip_non_null_from_type(filtered_field_type)
if not isinstance(argument_inferred_type, GraphQLList):
raise GraphQLCompilationError(u'Cannot apply "intersects" to non-list '
u'type {}'.format(filtered_field_type))
argument_expression, non_existence_expression = _represent_argument(
location, context, parameters[0], argument_inferred_type)
filter_predicate = expressions.BinaryComposition(
u'intersects', expressions.LocalField(filtered_field_name), argument_expression)
if non_existence_expression is not None:
# The argument comes from an optional block and might not exist,
# in which case the filter expression should evaluate to True.
filter_predicate = expressions.BinaryComposition(
u'||', non_existence_expression, filter_predicate)
return blocks.Filter(filter_predicate) | [
"def",
"_process_intersects_filter_directive",
"(",
"filter_operation_info",
",",
"location",
",",
"context",
",",
"parameters",
")",
":",
"filtered_field_type",
"=",
"filter_operation_info",
".",
"field_type",
"filtered_field_name",
"=",
"filter_operation_info",
".",
"fiel... | Return a Filter basic block that checks if the directive arg and the field intersect.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
parameters: list of 1 element, specifying the collection in which the value must exist;
if the collection is optional and missing, the check will return True
Returns:
a Filter basic block that performs the intersects check | [
"Return",
"a",
"Filter",
"basic",
"block",
"that",
"checks",
"if",
"the",
"directive",
"arg",
"and",
"the",
"field",
"intersect",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L509-L543 | train | 227,884 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | is_filter_with_outer_scope_vertex_field_operator | def is_filter_with_outer_scope_vertex_field_operator(directive):
"""Return True if we have a filter directive whose operator applies to the outer scope."""
if directive.name.value != 'filter':
return False
op_name, _ = _get_filter_op_name_and_values(directive)
return op_name in OUTER_SCOPE_VERTEX_FIELD_OPERATORS | python | def is_filter_with_outer_scope_vertex_field_operator(directive):
"""Return True if we have a filter directive whose operator applies to the outer scope."""
if directive.name.value != 'filter':
return False
op_name, _ = _get_filter_op_name_and_values(directive)
return op_name in OUTER_SCOPE_VERTEX_FIELD_OPERATORS | [
"def",
"is_filter_with_outer_scope_vertex_field_operator",
"(",
"directive",
")",
":",
"if",
"directive",
".",
"name",
".",
"value",
"!=",
"'filter'",
":",
"return",
"False",
"op_name",
",",
"_",
"=",
"_get_filter_op_name_and_values",
"(",
"directive",
")",
"return"... | Return True if we have a filter directive whose operator applies to the outer scope. | [
"Return",
"True",
"if",
"we",
"have",
"a",
"filter",
"directive",
"whose",
"operator",
"applies",
"to",
"the",
"outer",
"scope",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L598-L604 | train | 227,885 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/filters.py | process_filter_directive | def process_filter_directive(filter_operation_info, location, context):
"""Return a Filter basic block that corresponds to the filter operation in the directive.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
Returns:
a Filter basic block that performs the requested filtering operation
"""
op_name, operator_params = _get_filter_op_name_and_values(filter_operation_info.directive)
non_comparison_filters = {
u'name_or_alias': _process_name_or_alias_filter_directive,
u'between': _process_between_filter_directive,
u'in_collection': _process_in_collection_filter_directive,
u'has_substring': _process_has_substring_filter_directive,
u'contains': _process_contains_filter_directive,
u'intersects': _process_intersects_filter_directive,
u'has_edge_degree': _process_has_edge_degree_filter_directive,
}
all_recognized_filters = frozenset(non_comparison_filters.keys()) | COMPARISON_OPERATORS
if all_recognized_filters != ALL_OPERATORS:
unrecognized_filters = ALL_OPERATORS - all_recognized_filters
raise AssertionError(u'Some filtering operators are defined but do not have an associated '
u'processing function. This is a bug: {}'.format(unrecognized_filters))
if op_name in COMPARISON_OPERATORS:
process_func = partial(_process_comparison_filter_directive, operator=op_name)
else:
process_func = non_comparison_filters.get(op_name, None)
if process_func is None:
raise GraphQLCompilationError(u'Unknown op_name for filter directive: {}'.format(op_name))
# Operators that do not affect the inner scope require a field name to which they apply.
# There is no field name on InlineFragment ASTs, which is why only operators that affect
# the inner scope make semantic sense when applied to InlineFragments.
# Here, we ensure that we either have a field name to which the filter applies,
# or that the operator affects the inner scope.
if (filter_operation_info.field_name is None and
op_name not in INNER_SCOPE_VERTEX_FIELD_OPERATORS):
raise GraphQLCompilationError(u'The filter with op_name "{}" must be applied on a field. '
u'It may not be applied on a type coercion.'.format(op_name))
fields = ((filter_operation_info.field_name,) if op_name != 'name_or_alias'
else ('name', 'alias'))
context['metadata'].record_filter_info(
location,
FilterInfo(fields=fields, op_name=op_name, args=tuple(operator_params))
)
return process_func(filter_operation_info, location, context, operator_params) | python | def process_filter_directive(filter_operation_info, location, context):
"""Return a Filter basic block that corresponds to the filter operation in the directive.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
Returns:
a Filter basic block that performs the requested filtering operation
"""
op_name, operator_params = _get_filter_op_name_and_values(filter_operation_info.directive)
non_comparison_filters = {
u'name_or_alias': _process_name_or_alias_filter_directive,
u'between': _process_between_filter_directive,
u'in_collection': _process_in_collection_filter_directive,
u'has_substring': _process_has_substring_filter_directive,
u'contains': _process_contains_filter_directive,
u'intersects': _process_intersects_filter_directive,
u'has_edge_degree': _process_has_edge_degree_filter_directive,
}
all_recognized_filters = frozenset(non_comparison_filters.keys()) | COMPARISON_OPERATORS
if all_recognized_filters != ALL_OPERATORS:
unrecognized_filters = ALL_OPERATORS - all_recognized_filters
raise AssertionError(u'Some filtering operators are defined but do not have an associated '
u'processing function. This is a bug: {}'.format(unrecognized_filters))
if op_name in COMPARISON_OPERATORS:
process_func = partial(_process_comparison_filter_directive, operator=op_name)
else:
process_func = non_comparison_filters.get(op_name, None)
if process_func is None:
raise GraphQLCompilationError(u'Unknown op_name for filter directive: {}'.format(op_name))
# Operators that do not affect the inner scope require a field name to which they apply.
# There is no field name on InlineFragment ASTs, which is why only operators that affect
# the inner scope make semantic sense when applied to InlineFragments.
# Here, we ensure that we either have a field name to which the filter applies,
# or that the operator affects the inner scope.
if (filter_operation_info.field_name is None and
op_name not in INNER_SCOPE_VERTEX_FIELD_OPERATORS):
raise GraphQLCompilationError(u'The filter with op_name "{}" must be applied on a field. '
u'It may not be applied on a type coercion.'.format(op_name))
fields = ((filter_operation_info.field_name,) if op_name != 'name_or_alias'
else ('name', 'alias'))
context['metadata'].record_filter_info(
location,
FilterInfo(fields=fields, op_name=op_name, args=tuple(operator_params))
)
return process_func(filter_operation_info, location, context, operator_params) | [
"def",
"process_filter_directive",
"(",
"filter_operation_info",
",",
"location",
",",
"context",
")",
":",
"op_name",
",",
"operator_params",
"=",
"_get_filter_op_name_and_values",
"(",
"filter_operation_info",
".",
"directive",
")",
"non_comparison_filters",
"=",
"{",
... | Return a Filter basic block that corresponds to the filter operation in the directive.
Args:
filter_operation_info: FilterOperationInfo object, containing the directive and field info
of the field where the filter is to be applied.
location: Location where this filter is used.
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
Returns:
a Filter basic block that performs the requested filtering operation | [
"Return",
"a",
"Filter",
"basic",
"block",
"that",
"corresponds",
"to",
"the",
"filter",
"operation",
"in",
"the",
"directive",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L607-L663 | train | 227,886 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/sql_context_helpers.py | get_schema_type_name | def get_schema_type_name(node, context):
"""Return the GraphQL type name of a node."""
query_path = node.query_path
if query_path not in context.query_path_to_location_info:
raise AssertionError(
u'Unable to find type name for query path {} with context {}.'.format(
query_path, context))
location_info = context.query_path_to_location_info[query_path]
return location_info.type.name | python | def get_schema_type_name(node, context):
"""Return the GraphQL type name of a node."""
query_path = node.query_path
if query_path not in context.query_path_to_location_info:
raise AssertionError(
u'Unable to find type name for query path {} with context {}.'.format(
query_path, context))
location_info = context.query_path_to_location_info[query_path]
return location_info.type.name | [
"def",
"get_schema_type_name",
"(",
"node",
",",
"context",
")",
":",
"query_path",
"=",
"node",
".",
"query_path",
"if",
"query_path",
"not",
"in",
"context",
".",
"query_path_to_location_info",
":",
"raise",
"AssertionError",
"(",
"u'Unable to find type name for que... | Return the GraphQL type name of a node. | [
"Return",
"the",
"GraphQL",
"type",
"name",
"of",
"a",
"node",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/sql_context_helpers.py#L5-L13 | train | 227,887 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/sql_context_helpers.py | get_node_at_path | def get_node_at_path(query_path, context):
"""Return the SqlNode associated with the query path."""
if query_path not in context.query_path_to_node:
raise AssertionError(
u'Unable to find SqlNode for query path {} with context {}.'.format(
query_path, context))
node = context.query_path_to_node[query_path]
return node | python | def get_node_at_path(query_path, context):
"""Return the SqlNode associated with the query path."""
if query_path not in context.query_path_to_node:
raise AssertionError(
u'Unable to find SqlNode for query path {} with context {}.'.format(
query_path, context))
node = context.query_path_to_node[query_path]
return node | [
"def",
"get_node_at_path",
"(",
"query_path",
",",
"context",
")",
":",
"if",
"query_path",
"not",
"in",
"context",
".",
"query_path_to_node",
":",
"raise",
"AssertionError",
"(",
"u'Unable to find SqlNode for query path {} with context {}.'",
".",
"format",
"(",
"query... | Return the SqlNode associated with the query path. | [
"Return",
"the",
"SqlNode",
"associated",
"with",
"the",
"query",
"path",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/sql_context_helpers.py#L27-L34 | train | 227,888 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/sql_context_helpers.py | try_get_column | def try_get_column(column_name, node, context):
"""Attempt to get a column by name from the selectable.
Args:
column_name: str, name of the column to retrieve.
node: SqlNode, the node the column is being retrieved for.
context: CompilationContext, compilation specific metadata.
Returns:
Optional[column], the SQLAlchemy column if found, None otherwise.
"""
selectable = get_node_selectable(node, context)
if not hasattr(selectable, 'c'):
raise AssertionError(
u'Selectable "{}" does not have a column collection. Context is {}.'.format(
selectable, context))
return selectable.c.get(column_name, None) | python | def try_get_column(column_name, node, context):
"""Attempt to get a column by name from the selectable.
Args:
column_name: str, name of the column to retrieve.
node: SqlNode, the node the column is being retrieved for.
context: CompilationContext, compilation specific metadata.
Returns:
Optional[column], the SQLAlchemy column if found, None otherwise.
"""
selectable = get_node_selectable(node, context)
if not hasattr(selectable, 'c'):
raise AssertionError(
u'Selectable "{}" does not have a column collection. Context is {}.'.format(
selectable, context))
return selectable.c.get(column_name, None) | [
"def",
"try_get_column",
"(",
"column_name",
",",
"node",
",",
"context",
")",
":",
"selectable",
"=",
"get_node_selectable",
"(",
"node",
",",
"context",
")",
"if",
"not",
"hasattr",
"(",
"selectable",
",",
"'c'",
")",
":",
"raise",
"AssertionError",
"(",
... | Attempt to get a column by name from the selectable.
Args:
column_name: str, name of the column to retrieve.
node: SqlNode, the node the column is being retrieved for.
context: CompilationContext, compilation specific metadata.
Returns:
Optional[column], the SQLAlchemy column if found, None otherwise. | [
"Attempt",
"to",
"get",
"a",
"column",
"by",
"name",
"from",
"the",
"selectable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/sql_context_helpers.py#L37-L53 | train | 227,889 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/sql_context_helpers.py | get_column | def get_column(column_name, node, context):
"""Get a column by name from the selectable.
Args:
column_name: str, name of the column to retrieve.
node: SqlNode, the node the column is being retrieved for.
context: CompilationContext, compilation specific metadata.
Returns:
column, the SQLAlchemy column if found. Raises an AssertionError otherwise.
"""
column = try_get_column(column_name, node, context)
if column is None:
selectable = get_node_selectable(node, context)
raise AssertionError(
u'Column "{}" not found in selectable "{}". Columns present are {}. '
u'Context is {}.'.format(column_name, selectable.original,
[col.name for col in selectable.c], context))
return column | python | def get_column(column_name, node, context):
"""Get a column by name from the selectable.
Args:
column_name: str, name of the column to retrieve.
node: SqlNode, the node the column is being retrieved for.
context: CompilationContext, compilation specific metadata.
Returns:
column, the SQLAlchemy column if found. Raises an AssertionError otherwise.
"""
column = try_get_column(column_name, node, context)
if column is None:
selectable = get_node_selectable(node, context)
raise AssertionError(
u'Column "{}" not found in selectable "{}". Columns present are {}. '
u'Context is {}.'.format(column_name, selectable.original,
[col.name for col in selectable.c], context))
return column | [
"def",
"get_column",
"(",
"column_name",
",",
"node",
",",
"context",
")",
":",
"column",
"=",
"try_get_column",
"(",
"column_name",
",",
"node",
",",
"context",
")",
"if",
"column",
"is",
"None",
":",
"selectable",
"=",
"get_node_selectable",
"(",
"node",
... | Get a column by name from the selectable.
Args:
column_name: str, name of the column to retrieve.
node: SqlNode, the node the column is being retrieved for.
context: CompilationContext, compilation specific metadata.
Returns:
column, the SQLAlchemy column if found. Raises an AssertionError otherwise. | [
"Get",
"a",
"column",
"by",
"name",
"from",
"the",
"selectable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/sql_context_helpers.py#L56-L74 | train | 227,890 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/directive_helpers.py | get_unique_directives | def get_unique_directives(ast):
"""Return a dict of directive name to directive object for the given AST node.
Any directives that are allowed to exist more than once on any AST node are ignored.
For any directives that can only exist up to once, we verify that they are not duplicated
raising GraphQLCompilationError in case we find them more than once on the AST node.
Args:
ast: GraphQL AST node, obtained from the graphql library
Returns:
dict of string to directive object
"""
if not ast.directives:
return dict()
result = dict()
for directive_obj in ast.directives:
directive_name = directive_obj.name.value
if directive_name in ALLOWED_DUPLICATED_DIRECTIVES:
pass # We don't return these.
elif directive_name in result:
raise GraphQLCompilationError(u'Directive was unexpectedly applied twice in the same '
u'location: {} {}'.format(directive_name, ast.directives))
else:
result[directive_name] = directive_obj
return result | python | def get_unique_directives(ast):
"""Return a dict of directive name to directive object for the given AST node.
Any directives that are allowed to exist more than once on any AST node are ignored.
For any directives that can only exist up to once, we verify that they are not duplicated
raising GraphQLCompilationError in case we find them more than once on the AST node.
Args:
ast: GraphQL AST node, obtained from the graphql library
Returns:
dict of string to directive object
"""
if not ast.directives:
return dict()
result = dict()
for directive_obj in ast.directives:
directive_name = directive_obj.name.value
if directive_name in ALLOWED_DUPLICATED_DIRECTIVES:
pass # We don't return these.
elif directive_name in result:
raise GraphQLCompilationError(u'Directive was unexpectedly applied twice in the same '
u'location: {} {}'.format(directive_name, ast.directives))
else:
result[directive_name] = directive_obj
return result | [
"def",
"get_unique_directives",
"(",
"ast",
")",
":",
"if",
"not",
"ast",
".",
"directives",
":",
"return",
"dict",
"(",
")",
"result",
"=",
"dict",
"(",
")",
"for",
"directive_obj",
"in",
"ast",
".",
"directives",
":",
"directive_name",
"=",
"directive_ob... | Return a dict of directive name to directive object for the given AST node.
Any directives that are allowed to exist more than once on any AST node are ignored.
For any directives that can only exist up to once, we verify that they are not duplicated
raising GraphQLCompilationError in case we find them more than once on the AST node.
Args:
ast: GraphQL AST node, obtained from the graphql library
Returns:
dict of string to directive object | [
"Return",
"a",
"dict",
"of",
"directive",
"name",
"to",
"directive",
"object",
"for",
"the",
"given",
"AST",
"node",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L27-L54 | train | 227,891 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/directive_helpers.py | get_local_filter_directives | def get_local_filter_directives(ast, current_schema_type, inner_vertex_fields):
"""Get all filter directives that apply to the current field.
This helper abstracts away the fact that some vertex field filtering operators apply on the
inner scope (the scope of the inner vertex field on which they are applied), whereas some apply
on the outer scope (the scope that contains the inner vertex field).
See filters.py for more information.
Args:
ast: a GraphQL AST object for which to load local filters, from the graphql library
current_schema_type: GraphQLType, the schema type at the current AST location
inner_vertex_fields: a list of inner AST objects representing vertex fields that are within
the current field. If currently processing a property field (i.e.
there are no inner vertex fields), this argument may be set to None.
Returns:
list of FilterOperationInfo objects.
If the field_ast field is of type InlineFragment, the field_name field is set to None.
"""
result = []
if ast.directives: # it'll be None if the AST has no directives at that node
for directive_obj in ast.directives:
# Of all filters that appear *on the field itself*, only the ones that apply
# to the outer scope are not considered "local" and are not to be returned.
if directive_obj.name.value == 'filter':
filtered_field_name = get_ast_field_name_or_none(ast)
if is_filter_with_outer_scope_vertex_field_operator(directive_obj):
# We found a filter that affects the outer scope vertex. Let's make sure
# we are at a vertex field. If we are actually at a property field,
# that is a compilation error.
if not is_vertex_field_type(current_schema_type):
raise GraphQLCompilationError(
u'Found disallowed filter on a property field: {} {} '
u'{}'.format(directive_obj, current_schema_type, filtered_field_name))
elif isinstance(ast, InlineFragment):
raise GraphQLCompilationError(
u'Found disallowed filter on a type coercion: {} '
u'{}'.format(directive_obj, current_schema_type))
else:
# The filter is valid and non-local, since it is applied at this AST node
# but affects the outer scope vertex field. Skip over it.
pass
else:
operation = FilterOperationInfo(
directive=directive_obj, field_name=filtered_field_name,
field_type=current_schema_type, field_ast=ast)
result.append(operation)
if inner_vertex_fields: # allow the argument to be None
for inner_ast in inner_vertex_fields:
for directive_obj in inner_ast.directives:
# Of all filters that appear on an inner vertex field, only the ones that apply
# to the outer scope are "local" to the outer field and therefore to be returned.
if is_filter_with_outer_scope_vertex_field_operator(directive_obj):
# The inner AST must not be an InlineFragment, so it must have a field name.
filtered_field_name = get_ast_field_name(inner_ast)
filtered_field_type = get_vertex_field_type(
current_schema_type, filtered_field_name)
operation = FilterOperationInfo(
directive=directive_obj, field_name=filtered_field_name,
field_type=filtered_field_type, field_ast=inner_ast)
result.append(operation)
return result | python | def get_local_filter_directives(ast, current_schema_type, inner_vertex_fields):
"""Get all filter directives that apply to the current field.
This helper abstracts away the fact that some vertex field filtering operators apply on the
inner scope (the scope of the inner vertex field on which they are applied), whereas some apply
on the outer scope (the scope that contains the inner vertex field).
See filters.py for more information.
Args:
ast: a GraphQL AST object for which to load local filters, from the graphql library
current_schema_type: GraphQLType, the schema type at the current AST location
inner_vertex_fields: a list of inner AST objects representing vertex fields that are within
the current field. If currently processing a property field (i.e.
there are no inner vertex fields), this argument may be set to None.
Returns:
list of FilterOperationInfo objects.
If the field_ast field is of type InlineFragment, the field_name field is set to None.
"""
result = []
if ast.directives: # it'll be None if the AST has no directives at that node
for directive_obj in ast.directives:
# Of all filters that appear *on the field itself*, only the ones that apply
# to the outer scope are not considered "local" and are not to be returned.
if directive_obj.name.value == 'filter':
filtered_field_name = get_ast_field_name_or_none(ast)
if is_filter_with_outer_scope_vertex_field_operator(directive_obj):
# We found a filter that affects the outer scope vertex. Let's make sure
# we are at a vertex field. If we are actually at a property field,
# that is a compilation error.
if not is_vertex_field_type(current_schema_type):
raise GraphQLCompilationError(
u'Found disallowed filter on a property field: {} {} '
u'{}'.format(directive_obj, current_schema_type, filtered_field_name))
elif isinstance(ast, InlineFragment):
raise GraphQLCompilationError(
u'Found disallowed filter on a type coercion: {} '
u'{}'.format(directive_obj, current_schema_type))
else:
# The filter is valid and non-local, since it is applied at this AST node
# but affects the outer scope vertex field. Skip over it.
pass
else:
operation = FilterOperationInfo(
directive=directive_obj, field_name=filtered_field_name,
field_type=current_schema_type, field_ast=ast)
result.append(operation)
if inner_vertex_fields: # allow the argument to be None
for inner_ast in inner_vertex_fields:
for directive_obj in inner_ast.directives:
# Of all filters that appear on an inner vertex field, only the ones that apply
# to the outer scope are "local" to the outer field and therefore to be returned.
if is_filter_with_outer_scope_vertex_field_operator(directive_obj):
# The inner AST must not be an InlineFragment, so it must have a field name.
filtered_field_name = get_ast_field_name(inner_ast)
filtered_field_type = get_vertex_field_type(
current_schema_type, filtered_field_name)
operation = FilterOperationInfo(
directive=directive_obj, field_name=filtered_field_name,
field_type=filtered_field_type, field_ast=inner_ast)
result.append(operation)
return result | [
"def",
"get_local_filter_directives",
"(",
"ast",
",",
"current_schema_type",
",",
"inner_vertex_fields",
")",
":",
"result",
"=",
"[",
"]",
"if",
"ast",
".",
"directives",
":",
"# it'll be None if the AST has no directives at that node",
"for",
"directive_obj",
"in",
"... | Get all filter directives that apply to the current field.
This helper abstracts away the fact that some vertex field filtering operators apply on the
inner scope (the scope of the inner vertex field on which they are applied), whereas some apply
on the outer scope (the scope that contains the inner vertex field).
See filters.py for more information.
Args:
ast: a GraphQL AST object for which to load local filters, from the graphql library
current_schema_type: GraphQLType, the schema type at the current AST location
inner_vertex_fields: a list of inner AST objects representing vertex fields that are within
the current field. If currently processing a property field (i.e.
there are no inner vertex fields), this argument may be set to None.
Returns:
list of FilterOperationInfo objects.
If the field_ast field is of type InlineFragment, the field_name field is set to None. | [
"Get",
"all",
"filter",
"directives",
"that",
"apply",
"to",
"the",
"current",
"field",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L57-L121 | train | 227,892 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/directive_helpers.py | validate_property_directives | def validate_property_directives(directives):
"""Validate the directives that appear at a property field."""
for directive_name in six.iterkeys(directives):
if directive_name in VERTEX_ONLY_DIRECTIVES:
raise GraphQLCompilationError(
u'Found vertex-only directive {} set on property.'.format(directive_name)) | python | def validate_property_directives(directives):
"""Validate the directives that appear at a property field."""
for directive_name in six.iterkeys(directives):
if directive_name in VERTEX_ONLY_DIRECTIVES:
raise GraphQLCompilationError(
u'Found vertex-only directive {} set on property.'.format(directive_name)) | [
"def",
"validate_property_directives",
"(",
"directives",
")",
":",
"for",
"directive_name",
"in",
"six",
".",
"iterkeys",
"(",
"directives",
")",
":",
"if",
"directive_name",
"in",
"VERTEX_ONLY_DIRECTIVES",
":",
"raise",
"GraphQLCompilationError",
"(",
"u'Found verte... | Validate the directives that appear at a property field. | [
"Validate",
"the",
"directives",
"that",
"appear",
"at",
"a",
"property",
"field",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L124-L129 | train | 227,893 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/directive_helpers.py | validate_vertex_directives | def validate_vertex_directives(directives):
"""Validate the directives that appear at a vertex field."""
for directive_name in six.iterkeys(directives):
if directive_name in PROPERTY_ONLY_DIRECTIVES:
raise GraphQLCompilationError(
u'Found property-only directive {} set on vertex.'.format(directive_name)) | python | def validate_vertex_directives(directives):
"""Validate the directives that appear at a vertex field."""
for directive_name in six.iterkeys(directives):
if directive_name in PROPERTY_ONLY_DIRECTIVES:
raise GraphQLCompilationError(
u'Found property-only directive {} set on vertex.'.format(directive_name)) | [
"def",
"validate_vertex_directives",
"(",
"directives",
")",
":",
"for",
"directive_name",
"in",
"six",
".",
"iterkeys",
"(",
"directives",
")",
":",
"if",
"directive_name",
"in",
"PROPERTY_ONLY_DIRECTIVES",
":",
"raise",
"GraphQLCompilationError",
"(",
"u'Found prope... | Validate the directives that appear at a vertex field. | [
"Validate",
"the",
"directives",
"that",
"appear",
"at",
"a",
"vertex",
"field",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L132-L137 | train | 227,894 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/directive_helpers.py | validate_root_vertex_directives | def validate_root_vertex_directives(root_ast):
"""Validate the directives that appear at the root vertex field."""
directives_present_at_root = set()
for directive_obj in root_ast.directives:
directive_name = directive_obj.name.value
if is_filter_with_outer_scope_vertex_field_operator(directive_obj):
raise GraphQLCompilationError(u'Found a filter directive with an operator that is not'
u'allowed on the root vertex: {}'.format(directive_obj))
directives_present_at_root.add(directive_name)
disallowed_directives = directives_present_at_root & VERTEX_DIRECTIVES_PROHIBITED_ON_ROOT
if disallowed_directives:
raise GraphQLCompilationError(u'Found prohibited directives on root vertex: '
u'{}'.format(disallowed_directives)) | python | def validate_root_vertex_directives(root_ast):
"""Validate the directives that appear at the root vertex field."""
directives_present_at_root = set()
for directive_obj in root_ast.directives:
directive_name = directive_obj.name.value
if is_filter_with_outer_scope_vertex_field_operator(directive_obj):
raise GraphQLCompilationError(u'Found a filter directive with an operator that is not'
u'allowed on the root vertex: {}'.format(directive_obj))
directives_present_at_root.add(directive_name)
disallowed_directives = directives_present_at_root & VERTEX_DIRECTIVES_PROHIBITED_ON_ROOT
if disallowed_directives:
raise GraphQLCompilationError(u'Found prohibited directives on root vertex: '
u'{}'.format(disallowed_directives)) | [
"def",
"validate_root_vertex_directives",
"(",
"root_ast",
")",
":",
"directives_present_at_root",
"=",
"set",
"(",
")",
"for",
"directive_obj",
"in",
"root_ast",
".",
"directives",
":",
"directive_name",
"=",
"directive_obj",
".",
"name",
".",
"value",
"if",
"is_... | Validate the directives that appear at the root vertex field. | [
"Validate",
"the",
"directives",
"that",
"appear",
"at",
"the",
"root",
"vertex",
"field",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L140-L155 | train | 227,895 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/directive_helpers.py | validate_vertex_field_directive_interactions | def validate_vertex_field_directive_interactions(parent_location, vertex_field_name, directives):
"""Ensure that the specified vertex field directives are not mutually disallowed."""
fold_directive = directives.get('fold', None)
optional_directive = directives.get('optional', None)
output_source_directive = directives.get('output_source', None)
recurse_directive = directives.get('recurse', None)
if fold_directive and optional_directive:
raise GraphQLCompilationError(u'@fold and @optional may not appear at the same '
u'vertex field! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if fold_directive and output_source_directive:
raise GraphQLCompilationError(u'@fold and @output_source may not appear at the same '
u'vertex field! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if fold_directive and recurse_directive:
raise GraphQLCompilationError(u'@fold and @recurse may not appear at the same '
u'vertex field! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if optional_directive and output_source_directive:
raise GraphQLCompilationError(u'@optional and @output_source may not appear at the same '
u'vertex field! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if optional_directive and recurse_directive:
raise GraphQLCompilationError(u'@optional and @recurse may not appear at the same '
u'vertex field! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name)) | python | def validate_vertex_field_directive_interactions(parent_location, vertex_field_name, directives):
"""Ensure that the specified vertex field directives are not mutually disallowed."""
fold_directive = directives.get('fold', None)
optional_directive = directives.get('optional', None)
output_source_directive = directives.get('output_source', None)
recurse_directive = directives.get('recurse', None)
if fold_directive and optional_directive:
raise GraphQLCompilationError(u'@fold and @optional may not appear at the same '
u'vertex field! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if fold_directive and output_source_directive:
raise GraphQLCompilationError(u'@fold and @output_source may not appear at the same '
u'vertex field! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if fold_directive and recurse_directive:
raise GraphQLCompilationError(u'@fold and @recurse may not appear at the same '
u'vertex field! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if optional_directive and output_source_directive:
raise GraphQLCompilationError(u'@optional and @output_source may not appear at the same '
u'vertex field! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if optional_directive and recurse_directive:
raise GraphQLCompilationError(u'@optional and @recurse may not appear at the same '
u'vertex field! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name)) | [
"def",
"validate_vertex_field_directive_interactions",
"(",
"parent_location",
",",
"vertex_field_name",
",",
"directives",
")",
":",
"fold_directive",
"=",
"directives",
".",
"get",
"(",
"'fold'",
",",
"None",
")",
"optional_directive",
"=",
"directives",
".",
"get",... | Ensure that the specified vertex field directives are not mutually disallowed. | [
"Ensure",
"that",
"the",
"specified",
"vertex",
"field",
"directives",
"are",
"not",
"mutually",
"disallowed",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L158-L188 | train | 227,896 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/directive_helpers.py | validate_vertex_field_directive_in_context | def validate_vertex_field_directive_in_context(parent_location, vertex_field_name,
directives, context):
"""Ensure that the specified vertex field directives are allowed in the current context."""
fold_directive = directives.get('fold', None)
optional_directive = directives.get('optional', None)
recurse_directive = directives.get('recurse', None)
output_source_directive = directives.get('output_source', None)
fold_context = 'fold' in context
optional_context = 'optional' in context
output_source_context = 'output_source' in context
if fold_directive and fold_context:
raise GraphQLCompilationError(u'@fold is not allowed within a @fold traversal! '
u'Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if optional_directive and fold_context:
raise GraphQLCompilationError(u'@optional is not allowed within a @fold traversal! '
u'Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if output_source_directive and fold_context:
raise GraphQLCompilationError(u'@output_source is not allowed within a @fold traversal! '
u'Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if recurse_directive and fold_context:
raise GraphQLCompilationError(u'@recurse is not allowed within a @fold traversal! '
u'Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if output_source_context and not fold_directive:
raise GraphQLCompilationError(u'Found non-fold vertex field after the vertex marked '
u'output source! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if optional_context and fold_directive:
raise GraphQLCompilationError(u'@fold is not allowed within a @optional traversal! '
u'Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if optional_context and output_source_directive:
raise GraphQLCompilationError(u'@output_source is not allowed within a @optional '
u'traversal! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name)) | python | def validate_vertex_field_directive_in_context(parent_location, vertex_field_name,
directives, context):
"""Ensure that the specified vertex field directives are allowed in the current context."""
fold_directive = directives.get('fold', None)
optional_directive = directives.get('optional', None)
recurse_directive = directives.get('recurse', None)
output_source_directive = directives.get('output_source', None)
fold_context = 'fold' in context
optional_context = 'optional' in context
output_source_context = 'output_source' in context
if fold_directive and fold_context:
raise GraphQLCompilationError(u'@fold is not allowed within a @fold traversal! '
u'Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if optional_directive and fold_context:
raise GraphQLCompilationError(u'@optional is not allowed within a @fold traversal! '
u'Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if output_source_directive and fold_context:
raise GraphQLCompilationError(u'@output_source is not allowed within a @fold traversal! '
u'Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if recurse_directive and fold_context:
raise GraphQLCompilationError(u'@recurse is not allowed within a @fold traversal! '
u'Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if output_source_context and not fold_directive:
raise GraphQLCompilationError(u'Found non-fold vertex field after the vertex marked '
u'output source! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if optional_context and fold_directive:
raise GraphQLCompilationError(u'@fold is not allowed within a @optional traversal! '
u'Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name))
if optional_context and output_source_directive:
raise GraphQLCompilationError(u'@output_source is not allowed within a @optional '
u'traversal! Parent location: {}, vertex field name: {}'
.format(parent_location, vertex_field_name)) | [
"def",
"validate_vertex_field_directive_in_context",
"(",
"parent_location",
",",
"vertex_field_name",
",",
"directives",
",",
"context",
")",
":",
"fold_directive",
"=",
"directives",
".",
"get",
"(",
"'fold'",
",",
"None",
")",
"optional_directive",
"=",
"directives... | Ensure that the specified vertex field directives are allowed in the current context. | [
"Ensure",
"that",
"the",
"specified",
"vertex",
"field",
"directives",
"are",
"allowed",
"in",
"the",
"current",
"context",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L191-L231 | train | 227,897 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/match_formatting.py | _safe_match_string | def _safe_match_string(value):
"""Sanitize and represent a string argument in MATCH."""
if not isinstance(value, six.string_types):
if isinstance(value, bytes): # should only happen in py3
value = value.decode('utf-8')
else:
raise GraphQLInvalidArgumentError(u'Attempting to convert a non-string into a string: '
u'{}'.format(value))
# Using JSON encoding means that all unicode literals and special chars
# (e.g. newlines and backslashes) are replaced by appropriate escape sequences.
# JSON has the same escaping rules as MATCH / SQL, so no further escaping is necessary.
return json.dumps(value) | python | def _safe_match_string(value):
"""Sanitize and represent a string argument in MATCH."""
if not isinstance(value, six.string_types):
if isinstance(value, bytes): # should only happen in py3
value = value.decode('utf-8')
else:
raise GraphQLInvalidArgumentError(u'Attempting to convert a non-string into a string: '
u'{}'.format(value))
# Using JSON encoding means that all unicode literals and special chars
# (e.g. newlines and backslashes) are replaced by appropriate escape sequences.
# JSON has the same escaping rules as MATCH / SQL, so no further escaping is necessary.
return json.dumps(value) | [
"def",
"_safe_match_string",
"(",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"six",
".",
"string_types",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"bytes",
")",
":",
"# should only happen in py3",
"value",
"=",
"value",
".",
"d... | Sanitize and represent a string argument in MATCH. | [
"Sanitize",
"and",
"represent",
"a",
"string",
"argument",
"in",
"MATCH",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/match_formatting.py#L17-L29 | train | 227,898 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/match_formatting.py | _safe_match_date_and_datetime | def _safe_match_date_and_datetime(graphql_type, expected_python_types, value):
"""Represent date and datetime objects as MATCH strings."""
# Python datetime.datetime is a subclass of datetime.date,
# but in this case, the two are not interchangeable.
# Rather than using isinstance, we will therefore check for exact type equality.
value_type = type(value)
if not any(value_type == x for x in expected_python_types):
raise GraphQLInvalidArgumentError(u'Expected value to be exactly one of '
u'python types {}, but was {}: '
u'{}'.format(expected_python_types, value_type, value))
# The serialize() method of GraphQLDate and GraphQLDateTime produces the correct
# ISO-8601 format that MATCH expects. We then simply represent it as a regular string.
try:
serialized_value = graphql_type.serialize(value)
except ValueError as e:
raise GraphQLInvalidArgumentError(e)
return _safe_match_string(serialized_value) | python | def _safe_match_date_and_datetime(graphql_type, expected_python_types, value):
"""Represent date and datetime objects as MATCH strings."""
# Python datetime.datetime is a subclass of datetime.date,
# but in this case, the two are not interchangeable.
# Rather than using isinstance, we will therefore check for exact type equality.
value_type = type(value)
if not any(value_type == x for x in expected_python_types):
raise GraphQLInvalidArgumentError(u'Expected value to be exactly one of '
u'python types {}, but was {}: '
u'{}'.format(expected_python_types, value_type, value))
# The serialize() method of GraphQLDate and GraphQLDateTime produces the correct
# ISO-8601 format that MATCH expects. We then simply represent it as a regular string.
try:
serialized_value = graphql_type.serialize(value)
except ValueError as e:
raise GraphQLInvalidArgumentError(e)
return _safe_match_string(serialized_value) | [
"def",
"_safe_match_date_and_datetime",
"(",
"graphql_type",
",",
"expected_python_types",
",",
"value",
")",
":",
"# Python datetime.datetime is a subclass of datetime.date,",
"# but in this case, the two are not interchangeable.",
"# Rather than using isinstance, we will therefore check fo... | Represent date and datetime objects as MATCH strings. | [
"Represent",
"date",
"and",
"datetime",
"objects",
"as",
"MATCH",
"strings",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/match_formatting.py#L32-L50 | train | 227,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.