repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/match_formatting.py | _safe_match_list | def _safe_match_list(inner_type, argument_value):
"""Represent the list of "inner_type" objects in MATCH form."""
stripped_type = strip_non_null_from_type(inner_type)
if isinstance(stripped_type, GraphQLList):
raise GraphQLInvalidArgumentError(u'MATCH does not currently support nested lists, '
u'but inner type was {}: '
u'{}'.format(inner_type, argument_value))
if not isinstance(argument_value, list):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-list as a list: '
u'{}'.format(argument_value))
components = (
_safe_match_argument(stripped_type, x)
for x in argument_value
)
return u'[' + u','.join(components) + u']' | python | def _safe_match_list(inner_type, argument_value):
"""Represent the list of "inner_type" objects in MATCH form."""
stripped_type = strip_non_null_from_type(inner_type)
if isinstance(stripped_type, GraphQLList):
raise GraphQLInvalidArgumentError(u'MATCH does not currently support nested lists, '
u'but inner type was {}: '
u'{}'.format(inner_type, argument_value))
if not isinstance(argument_value, list):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-list as a list: '
u'{}'.format(argument_value))
components = (
_safe_match_argument(stripped_type, x)
for x in argument_value
)
return u'[' + u','.join(components) + u']' | [
"def",
"_safe_match_list",
"(",
"inner_type",
",",
"argument_value",
")",
":",
"stripped_type",
"=",
"strip_non_null_from_type",
"(",
"inner_type",
")",
"if",
"isinstance",
"(",
"stripped_type",
",",
"GraphQLList",
")",
":",
"raise",
"GraphQLInvalidArgumentError",
"("... | Represent the list of "inner_type" objects in MATCH form. | [
"Represent",
"the",
"list",
"of",
"inner_type",
"objects",
"in",
"MATCH",
"form",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/match_formatting.py#L59-L75 | train | 227,900 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/match_formatting.py | insert_arguments_into_match_query | def insert_arguments_into_match_query(compilation_result, arguments):
"""Insert the arguments into the compiled MATCH query to form a complete query.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a MATCH query with inserted argument data
"""
if compilation_result.language != MATCH_LANGUAGE:
raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result))
base_query = compilation_result.query
argument_types = compilation_result.input_metadata
# The arguments are assumed to have already been validated against the query.
sanitized_arguments = {
key: _safe_match_argument(argument_types[key], value)
for key, value in six.iteritems(arguments)
}
return base_query.format(**sanitized_arguments) | python | def insert_arguments_into_match_query(compilation_result, arguments):
"""Insert the arguments into the compiled MATCH query to form a complete query.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a MATCH query with inserted argument data
"""
if compilation_result.language != MATCH_LANGUAGE:
raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result))
base_query = compilation_result.query
argument_types = compilation_result.input_metadata
# The arguments are assumed to have already been validated against the query.
sanitized_arguments = {
key: _safe_match_argument(argument_types[key], value)
for key, value in six.iteritems(arguments)
}
return base_query.format(**sanitized_arguments) | [
"def",
"insert_arguments_into_match_query",
"(",
"compilation_result",
",",
"arguments",
")",
":",
"if",
"compilation_result",
".",
"language",
"!=",
"MATCH_LANGUAGE",
":",
"raise",
"AssertionError",
"(",
"u'Unexpected query output language: {}'",
".",
"format",
"(",
"com... | Insert the arguments into the compiled MATCH query to form a complete query.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a MATCH query with inserted argument data | [
"Insert",
"the",
"arguments",
"into",
"the",
"compiled",
"MATCH",
"query",
"to",
"form",
"a",
"complete",
"query",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/match_formatting.py#L120-L142 | train | 227,901 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/metadata.py | SqlMetadata.get_table | def get_table(self, schema_type):
"""Retrieve a SQLAlchemy table based on the supplied GraphQL schema type name."""
table_name = schema_type.lower()
if not self.has_table(table_name):
raise exceptions.GraphQLCompilationError(
'No Table found in SQLAlchemy metadata for table name "{}"'.format(table_name)
)
return self.table_name_to_table[table_name] | python | def get_table(self, schema_type):
"""Retrieve a SQLAlchemy table based on the supplied GraphQL schema type name."""
table_name = schema_type.lower()
if not self.has_table(table_name):
raise exceptions.GraphQLCompilationError(
'No Table found in SQLAlchemy metadata for table name "{}"'.format(table_name)
)
return self.table_name_to_table[table_name] | [
"def",
"get_table",
"(",
"self",
",",
"schema_type",
")",
":",
"table_name",
"=",
"schema_type",
".",
"lower",
"(",
")",
"if",
"not",
"self",
".",
"has_table",
"(",
"table_name",
")",
":",
"raise",
"exceptions",
".",
"GraphQLCompilationError",
"(",
"'No Tabl... | Retrieve a SQLAlchemy table based on the supplied GraphQL schema type name. | [
"Retrieve",
"a",
"SQLAlchemy",
"table",
"based",
"on",
"the",
"supplied",
"GraphQL",
"schema",
"type",
"name",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/metadata.py#L27-L34 | train | 227,902 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/match_query.py | _per_location_tuple_to_step | def _per_location_tuple_to_step(ir_tuple):
"""Construct a MatchStep from a tuple of its constituent blocks."""
root_block = ir_tuple[0]
if not isinstance(root_block, root_block_types):
raise AssertionError(u'Unexpected root block type for MatchStep: '
u'{} {}'.format(root_block, ir_tuple))
coerce_type_block = None
where_block = None
as_block = None
for block in ir_tuple[1:]:
if isinstance(block, CoerceType):
if coerce_type_block is not None:
raise AssertionError(u'Unexpectedly found two blocks eligible for "class" clause: '
u'{} {} {}'.format(block, coerce_type_block, ir_tuple))
coerce_type_block = block
elif isinstance(block, MarkLocation):
if as_block is not None:
raise AssertionError(u'Unexpectedly found two blocks eligible for "as" clause: '
u'{} {} {}'.format(block, as_block, ir_tuple))
as_block = block
elif isinstance(block, Filter):
if where_block is not None:
raise AssertionError(u'Unexpectedly found two blocks eligible for "where" clause: '
u'{} {} {}'.format(block, as_block, ir_tuple))
# Filter always comes before MarkLocation in a given MatchStep.
if as_block is not None:
raise AssertionError(u'Unexpectedly found MarkLocation before Filter in '
u'MatchStep: {} {} {}'.format(block, where_block, ir_tuple))
where_block = block
else:
raise AssertionError(u'Unexpected block encountered: {} {}'.format(block, ir_tuple))
step = MatchStep(root_block=root_block,
coerce_type_block=coerce_type_block,
where_block=where_block,
as_block=as_block)
# MatchSteps with Backtrack as the root block should only contain MarkLocation,
# and not do filtering or type coercion.
if isinstance(root_block, Backtrack):
if where_block is not None or coerce_type_block is not None:
raise AssertionError(u'Unexpected blocks in Backtrack-based MatchStep: {}'.format(step))
return step | python | def _per_location_tuple_to_step(ir_tuple):
"""Construct a MatchStep from a tuple of its constituent blocks."""
root_block = ir_tuple[0]
if not isinstance(root_block, root_block_types):
raise AssertionError(u'Unexpected root block type for MatchStep: '
u'{} {}'.format(root_block, ir_tuple))
coerce_type_block = None
where_block = None
as_block = None
for block in ir_tuple[1:]:
if isinstance(block, CoerceType):
if coerce_type_block is not None:
raise AssertionError(u'Unexpectedly found two blocks eligible for "class" clause: '
u'{} {} {}'.format(block, coerce_type_block, ir_tuple))
coerce_type_block = block
elif isinstance(block, MarkLocation):
if as_block is not None:
raise AssertionError(u'Unexpectedly found two blocks eligible for "as" clause: '
u'{} {} {}'.format(block, as_block, ir_tuple))
as_block = block
elif isinstance(block, Filter):
if where_block is not None:
raise AssertionError(u'Unexpectedly found two blocks eligible for "where" clause: '
u'{} {} {}'.format(block, as_block, ir_tuple))
# Filter always comes before MarkLocation in a given MatchStep.
if as_block is not None:
raise AssertionError(u'Unexpectedly found MarkLocation before Filter in '
u'MatchStep: {} {} {}'.format(block, where_block, ir_tuple))
where_block = block
else:
raise AssertionError(u'Unexpected block encountered: {} {}'.format(block, ir_tuple))
step = MatchStep(root_block=root_block,
coerce_type_block=coerce_type_block,
where_block=where_block,
as_block=as_block)
# MatchSteps with Backtrack as the root block should only contain MarkLocation,
# and not do filtering or type coercion.
if isinstance(root_block, Backtrack):
if where_block is not None or coerce_type_block is not None:
raise AssertionError(u'Unexpected blocks in Backtrack-based MatchStep: {}'.format(step))
return step | [
"def",
"_per_location_tuple_to_step",
"(",
"ir_tuple",
")",
":",
"root_block",
"=",
"ir_tuple",
"[",
"0",
"]",
"if",
"not",
"isinstance",
"(",
"root_block",
",",
"root_block_types",
")",
":",
"raise",
"AssertionError",
"(",
"u'Unexpected root block type for MatchStep:... | Construct a MatchStep from a tuple of its constituent blocks. | [
"Construct",
"a",
"MatchStep",
"from",
"a",
"tuple",
"of",
"its",
"constituent",
"blocks",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/match_query.py#L39-L85 | train | 227,903 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/match_query.py | _split_ir_into_match_steps | def _split_ir_into_match_steps(pruned_ir_blocks):
"""Split a list of IR blocks into per-location MATCH steps.
Args:
pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step.
Returns:
list of MatchStep namedtuples, each of which contains all basic blocks that correspond
to a single MATCH step.
"""
output = []
current_tuple = None
for block in pruned_ir_blocks:
if isinstance(block, OutputSource):
# OutputSource blocks do not require any MATCH code, and only serve to help
# optimizations and debugging. Simply omit them at this stage.
continue
elif isinstance(block, root_block_types):
if current_tuple is not None:
output.append(current_tuple)
current_tuple = (block,)
elif isinstance(block, (CoerceType, Filter, MarkLocation)):
current_tuple += (block,)
else:
raise AssertionError(u'Unexpected block type when converting to MATCH query: '
u'{} {}'.format(block, pruned_ir_blocks))
if current_tuple is None:
raise AssertionError(u'current_tuple was unexpectedly None: {}'.format(pruned_ir_blocks))
output.append(current_tuple)
return [_per_location_tuple_to_step(x) for x in output] | python | def _split_ir_into_match_steps(pruned_ir_blocks):
"""Split a list of IR blocks into per-location MATCH steps.
Args:
pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step.
Returns:
list of MatchStep namedtuples, each of which contains all basic blocks that correspond
to a single MATCH step.
"""
output = []
current_tuple = None
for block in pruned_ir_blocks:
if isinstance(block, OutputSource):
# OutputSource blocks do not require any MATCH code, and only serve to help
# optimizations and debugging. Simply omit them at this stage.
continue
elif isinstance(block, root_block_types):
if current_tuple is not None:
output.append(current_tuple)
current_tuple = (block,)
elif isinstance(block, (CoerceType, Filter, MarkLocation)):
current_tuple += (block,)
else:
raise AssertionError(u'Unexpected block type when converting to MATCH query: '
u'{} {}'.format(block, pruned_ir_blocks))
if current_tuple is None:
raise AssertionError(u'current_tuple was unexpectedly None: {}'.format(pruned_ir_blocks))
output.append(current_tuple)
return [_per_location_tuple_to_step(x) for x in output] | [
"def",
"_split_ir_into_match_steps",
"(",
"pruned_ir_blocks",
")",
":",
"output",
"=",
"[",
"]",
"current_tuple",
"=",
"None",
"for",
"block",
"in",
"pruned_ir_blocks",
":",
"if",
"isinstance",
"(",
"block",
",",
"OutputSource",
")",
":",
"# OutputSource blocks do... | Split a list of IR blocks into per-location MATCH steps.
Args:
pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step.
Returns:
list of MatchStep namedtuples, each of which contains all basic blocks that correspond
to a single MATCH step. | [
"Split",
"a",
"list",
"of",
"IR",
"blocks",
"into",
"per",
"-",
"location",
"MATCH",
"steps",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/match_query.py#L88-L119 | train | 227,904 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/match_query.py | _split_match_steps_into_match_traversals | def _split_match_steps_into_match_traversals(match_steps):
"""Split a list of MatchSteps into multiple lists, each denoting a single MATCH traversal."""
output = []
current_list = None
for step in match_steps:
if isinstance(step.root_block, QueryRoot):
if current_list is not None:
output.append(current_list)
current_list = [step]
else:
current_list.append(step)
if current_list is None:
raise AssertionError(u'current_list was unexpectedly None: {}'.format(match_steps))
output.append(current_list)
return output | python | def _split_match_steps_into_match_traversals(match_steps):
"""Split a list of MatchSteps into multiple lists, each denoting a single MATCH traversal."""
output = []
current_list = None
for step in match_steps:
if isinstance(step.root_block, QueryRoot):
if current_list is not None:
output.append(current_list)
current_list = [step]
else:
current_list.append(step)
if current_list is None:
raise AssertionError(u'current_list was unexpectedly None: {}'.format(match_steps))
output.append(current_list)
return output | [
"def",
"_split_match_steps_into_match_traversals",
"(",
"match_steps",
")",
":",
"output",
"=",
"[",
"]",
"current_list",
"=",
"None",
"for",
"step",
"in",
"match_steps",
":",
"if",
"isinstance",
"(",
"step",
".",
"root_block",
",",
"QueryRoot",
")",
":",
"if"... | Split a list of MatchSteps into multiple lists, each denoting a single MATCH traversal. | [
"Split",
"a",
"list",
"of",
"MatchSteps",
"into",
"multiple",
"lists",
"each",
"denoting",
"a",
"single",
"MATCH",
"traversal",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/match_query.py#L122-L138 | train | 227,905 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/match_query.py | convert_to_match_query | def convert_to_match_query(ir_blocks):
"""Convert the list of IR blocks into a MatchQuery object, for easier manipulation."""
output_block = ir_blocks[-1]
if not isinstance(output_block, ConstructResult):
raise AssertionError(u'Expected last IR block to be ConstructResult, found: '
u'{} {}'.format(output_block, ir_blocks))
ir_except_output = ir_blocks[:-1]
folds, ir_except_output_and_folds = extract_folds_from_ir_blocks(ir_except_output)
# Extract WHERE Filter
global_operation_ir_blocks_tuple = _extract_global_operations(ir_except_output_and_folds)
global_operation_blocks, pruned_ir_blocks = global_operation_ir_blocks_tuple
if len(global_operation_blocks) > 1:
raise AssertionError(u'Received IR blocks with multiple global operation blocks. Only one '
u'is allowed: {} {}'.format(global_operation_blocks, ir_blocks))
if len(global_operation_blocks) == 1:
if not isinstance(global_operation_blocks[0], Filter):
raise AssertionError(u'Received non-Filter global operation block. {}'
.format(global_operation_blocks[0]))
where_block = global_operation_blocks[0]
else:
where_block = None
match_steps = _split_ir_into_match_steps(pruned_ir_blocks)
match_traversals = _split_match_steps_into_match_traversals(match_steps)
return MatchQuery(
match_traversals=match_traversals,
folds=folds,
output_block=output_block,
where_block=where_block,
) | python | def convert_to_match_query(ir_blocks):
"""Convert the list of IR blocks into a MatchQuery object, for easier manipulation."""
output_block = ir_blocks[-1]
if not isinstance(output_block, ConstructResult):
raise AssertionError(u'Expected last IR block to be ConstructResult, found: '
u'{} {}'.format(output_block, ir_blocks))
ir_except_output = ir_blocks[:-1]
folds, ir_except_output_and_folds = extract_folds_from_ir_blocks(ir_except_output)
# Extract WHERE Filter
global_operation_ir_blocks_tuple = _extract_global_operations(ir_except_output_and_folds)
global_operation_blocks, pruned_ir_blocks = global_operation_ir_blocks_tuple
if len(global_operation_blocks) > 1:
raise AssertionError(u'Received IR blocks with multiple global operation blocks. Only one '
u'is allowed: {} {}'.format(global_operation_blocks, ir_blocks))
if len(global_operation_blocks) == 1:
if not isinstance(global_operation_blocks[0], Filter):
raise AssertionError(u'Received non-Filter global operation block. {}'
.format(global_operation_blocks[0]))
where_block = global_operation_blocks[0]
else:
where_block = None
match_steps = _split_ir_into_match_steps(pruned_ir_blocks)
match_traversals = _split_match_steps_into_match_traversals(match_steps)
return MatchQuery(
match_traversals=match_traversals,
folds=folds,
output_block=output_block,
where_block=where_block,
) | [
"def",
"convert_to_match_query",
"(",
"ir_blocks",
")",
":",
"output_block",
"=",
"ir_blocks",
"[",
"-",
"1",
"]",
"if",
"not",
"isinstance",
"(",
"output_block",
",",
"ConstructResult",
")",
":",
"raise",
"AssertionError",
"(",
"u'Expected last IR block to be Const... | Convert the list of IR blocks into a MatchQuery object, for easier manipulation. | [
"Convert",
"the",
"list",
"of",
"IR",
"blocks",
"into",
"a",
"MatchQuery",
"object",
"for",
"easier",
"manipulation",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/match_query.py#L178-L211 | train | 227,906 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/sql_formatting.py | insert_arguments_into_sql_query | def insert_arguments_into_sql_query(compilation_result, arguments):
"""Insert the arguments into the compiled SQL query to form a complete query.
Args:
compilation_result: CompilationResult, compilation result from the GraphQL compiler.
arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects.
Returns:
SQLAlchemy Selectable, a executable SQL query with parameters bound.
"""
if compilation_result.language != SQL_LANGUAGE:
raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result))
base_query = compilation_result.query
return base_query.params(**arguments) | python | def insert_arguments_into_sql_query(compilation_result, arguments):
"""Insert the arguments into the compiled SQL query to form a complete query.
Args:
compilation_result: CompilationResult, compilation result from the GraphQL compiler.
arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects.
Returns:
SQLAlchemy Selectable, a executable SQL query with parameters bound.
"""
if compilation_result.language != SQL_LANGUAGE:
raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result))
base_query = compilation_result.query
return base_query.params(**arguments) | [
"def",
"insert_arguments_into_sql_query",
"(",
"compilation_result",
",",
"arguments",
")",
":",
"if",
"compilation_result",
".",
"language",
"!=",
"SQL_LANGUAGE",
":",
"raise",
"AssertionError",
"(",
"u'Unexpected query output language: {}'",
".",
"format",
"(",
"compila... | Insert the arguments into the compiled SQL query to form a complete query.
Args:
compilation_result: CompilationResult, compilation result from the GraphQL compiler.
arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects.
Returns:
SQLAlchemy Selectable, a executable SQL query with parameters bound. | [
"Insert",
"the",
"arguments",
"into",
"the",
"compiled",
"SQL",
"query",
"to",
"form",
"a",
"complete",
"query",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/sql_formatting.py#L10-L23 | train | 227,907 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/utils.py | convert_coerce_type_to_instanceof_filter | def convert_coerce_type_to_instanceof_filter(coerce_type_block):
"""Create an "INSTANCEOF" Filter block from a CoerceType block."""
coerce_type_target = get_only_element_from_collection(coerce_type_block.target_class)
# INSTANCEOF requires the target class to be passed in as a string,
# so we make the target class a string literal.
new_predicate = BinaryComposition(
u'INSTANCEOF', LocalField('@this'), Literal(coerce_type_target))
return Filter(new_predicate) | python | def convert_coerce_type_to_instanceof_filter(coerce_type_block):
"""Create an "INSTANCEOF" Filter block from a CoerceType block."""
coerce_type_target = get_only_element_from_collection(coerce_type_block.target_class)
# INSTANCEOF requires the target class to be passed in as a string,
# so we make the target class a string literal.
new_predicate = BinaryComposition(
u'INSTANCEOF', LocalField('@this'), Literal(coerce_type_target))
return Filter(new_predicate) | [
"def",
"convert_coerce_type_to_instanceof_filter",
"(",
"coerce_type_block",
")",
":",
"coerce_type_target",
"=",
"get_only_element_from_collection",
"(",
"coerce_type_block",
".",
"target_class",
")",
"# INSTANCEOF requires the target class to be passed in as a string,",
"# so we make... | Create an "INSTANCEOF" Filter block from a CoerceType block. | [
"Create",
"an",
"INSTANCEOF",
"Filter",
"block",
"from",
"a",
"CoerceType",
"block",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L15-L24 | train | 227,908 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/utils.py | convert_coerce_type_and_add_to_where_block | def convert_coerce_type_and_add_to_where_block(coerce_type_block, where_block):
"""Create an "INSTANCEOF" Filter from a CoerceType, adding to an existing Filter if any."""
instanceof_filter = convert_coerce_type_to_instanceof_filter(coerce_type_block)
if where_block:
# There was already a Filter block -- we'll merge the two predicates together.
return Filter(BinaryComposition(u'&&', instanceof_filter.predicate, where_block.predicate))
else:
return instanceof_filter | python | def convert_coerce_type_and_add_to_where_block(coerce_type_block, where_block):
"""Create an "INSTANCEOF" Filter from a CoerceType, adding to an existing Filter if any."""
instanceof_filter = convert_coerce_type_to_instanceof_filter(coerce_type_block)
if where_block:
# There was already a Filter block -- we'll merge the two predicates together.
return Filter(BinaryComposition(u'&&', instanceof_filter.predicate, where_block.predicate))
else:
return instanceof_filter | [
"def",
"convert_coerce_type_and_add_to_where_block",
"(",
"coerce_type_block",
",",
"where_block",
")",
":",
"instanceof_filter",
"=",
"convert_coerce_type_to_instanceof_filter",
"(",
"coerce_type_block",
")",
"if",
"where_block",
":",
"# There was already a Filter block -- we'll m... | Create an "INSTANCEOF" Filter from a CoerceType, adding to an existing Filter if any. | [
"Create",
"an",
"INSTANCEOF",
"Filter",
"from",
"a",
"CoerceType",
"adding",
"to",
"an",
"existing",
"Filter",
"if",
"any",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L27-L35 | train | 227,909 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/utils.py | expression_list_to_conjunction | def expression_list_to_conjunction(expression_list):
"""Convert a list of expressions to an Expression that is the conjunction of all of them."""
if not isinstance(expression_list, list):
raise AssertionError(u'Expected `list`, Received {}.'.format(expression_list))
if len(expression_list) == 0:
return TrueLiteral
if not isinstance(expression_list[0], Expression):
raise AssertionError(u'Non-Expression object {} found in expression_list'
.format(expression_list[0]))
if len(expression_list) == 1:
return expression_list[0]
else:
return BinaryComposition(u'&&',
expression_list_to_conjunction(expression_list[1:]),
expression_list[0]) | python | def expression_list_to_conjunction(expression_list):
"""Convert a list of expressions to an Expression that is the conjunction of all of them."""
if not isinstance(expression_list, list):
raise AssertionError(u'Expected `list`, Received {}.'.format(expression_list))
if len(expression_list) == 0:
return TrueLiteral
if not isinstance(expression_list[0], Expression):
raise AssertionError(u'Non-Expression object {} found in expression_list'
.format(expression_list[0]))
if len(expression_list) == 1:
return expression_list[0]
else:
return BinaryComposition(u'&&',
expression_list_to_conjunction(expression_list[1:]),
expression_list[0]) | [
"def",
"expression_list_to_conjunction",
"(",
"expression_list",
")",
":",
"if",
"not",
"isinstance",
"(",
"expression_list",
",",
"list",
")",
":",
"raise",
"AssertionError",
"(",
"u'Expected `list`, Received {}.'",
".",
"format",
"(",
"expression_list",
")",
")",
... | Convert a list of expressions to an Expression that is the conjunction of all of them. | [
"Convert",
"a",
"list",
"of",
"expressions",
"to",
"an",
"Expression",
"that",
"is",
"the",
"conjunction",
"of",
"all",
"of",
"them",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L38-L54 | train | 227,910 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/utils.py | construct_where_filter_predicate | def construct_where_filter_predicate(query_metadata_table, simple_optional_root_info):
"""Return an Expression that is True if and only if each simple optional filter is True.
Construct filters for each simple optional, that are True if and only if `edge_field` does
not exist in the `simple_optional_root_location` OR the `inner_location` is not defined.
Return an Expression that evaluates to True if and only if *all* of the aforementioned filters
evaluate to True (conjunction).
Args:
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
simple_optional_root_info: dict mapping from simple_optional_root_location -> dict
containing keys
- 'inner_location_name': Location object correspoding to the
unique MarkLocation present within a
simple @optional (one that does not
expands vertex fields) scope
- 'edge_field': string representing the optional edge being
traversed
where simple_optional_root_to_inner_location is the location
preceding the @optional scope
Returns:
a new Expression object
"""
inner_location_name_to_where_filter = {}
for root_location, root_info_dict in six.iteritems(simple_optional_root_info):
inner_location_name = root_info_dict['inner_location_name']
edge_field = root_info_dict['edge_field']
optional_edge_location = root_location.navigate_to_field(edge_field)
optional_edge_where_filter = _filter_orientdb_simple_optional_edge(
query_metadata_table, optional_edge_location, inner_location_name)
inner_location_name_to_where_filter[inner_location_name] = optional_edge_where_filter
# Sort expressions by inner_location_name to obtain deterministic order
where_filter_expressions = [
inner_location_name_to_where_filter[key]
for key in sorted(inner_location_name_to_where_filter.keys())
]
return expression_list_to_conjunction(where_filter_expressions) | python | def construct_where_filter_predicate(query_metadata_table, simple_optional_root_info):
"""Return an Expression that is True if and only if each simple optional filter is True.
Construct filters for each simple optional, that are True if and only if `edge_field` does
not exist in the `simple_optional_root_location` OR the `inner_location` is not defined.
Return an Expression that evaluates to True if and only if *all* of the aforementioned filters
evaluate to True (conjunction).
Args:
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
simple_optional_root_info: dict mapping from simple_optional_root_location -> dict
containing keys
- 'inner_location_name': Location object correspoding to the
unique MarkLocation present within a
simple @optional (one that does not
expands vertex fields) scope
- 'edge_field': string representing the optional edge being
traversed
where simple_optional_root_to_inner_location is the location
preceding the @optional scope
Returns:
a new Expression object
"""
inner_location_name_to_where_filter = {}
for root_location, root_info_dict in six.iteritems(simple_optional_root_info):
inner_location_name = root_info_dict['inner_location_name']
edge_field = root_info_dict['edge_field']
optional_edge_location = root_location.navigate_to_field(edge_field)
optional_edge_where_filter = _filter_orientdb_simple_optional_edge(
query_metadata_table, optional_edge_location, inner_location_name)
inner_location_name_to_where_filter[inner_location_name] = optional_edge_where_filter
# Sort expressions by inner_location_name to obtain deterministic order
where_filter_expressions = [
inner_location_name_to_where_filter[key]
for key in sorted(inner_location_name_to_where_filter.keys())
]
return expression_list_to_conjunction(where_filter_expressions) | [
"def",
"construct_where_filter_predicate",
"(",
"query_metadata_table",
",",
"simple_optional_root_info",
")",
":",
"inner_location_name_to_where_filter",
"=",
"{",
"}",
"for",
"root_location",
",",
"root_info_dict",
"in",
"six",
".",
"iteritems",
"(",
"simple_optional_root... | Return an Expression that is True if and only if each simple optional filter is True.
Construct filters for each simple optional, that are True if and only if `edge_field` does
not exist in the `simple_optional_root_location` OR the `inner_location` is not defined.
Return an Expression that evaluates to True if and only if *all* of the aforementioned filters
evaluate to True (conjunction).
Args:
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
simple_optional_root_info: dict mapping from simple_optional_root_location -> dict
containing keys
- 'inner_location_name': Location object correspoding to the
unique MarkLocation present within a
simple @optional (one that does not
expands vertex fields) scope
- 'edge_field': string representing the optional edge being
traversed
where simple_optional_root_to_inner_location is the location
preceding the @optional scope
Returns:
a new Expression object | [
"Return",
"an",
"Expression",
"that",
"is",
"True",
"if",
"and",
"only",
"if",
"each",
"simple",
"optional",
"filter",
"is",
"True",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L192-L233 | train | 227,911 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/utils.py | construct_optional_traversal_tree | def construct_optional_traversal_tree(complex_optional_roots, location_to_optional_roots):
"""Return a tree of complex optional root locations.
Args:
complex_optional_roots: list of @optional locations (location immmediately preceding
an @optional Traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
OptionalTraversalTree object representing the tree of complex optional roots
"""
tree = OptionalTraversalTree(complex_optional_roots)
for optional_root_locations_stack in six.itervalues(location_to_optional_roots):
tree.insert(list(optional_root_locations_stack))
return tree | python | def construct_optional_traversal_tree(complex_optional_roots, location_to_optional_roots):
"""Return a tree of complex optional root locations.
Args:
complex_optional_roots: list of @optional locations (location immmediately preceding
an @optional Traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
OptionalTraversalTree object representing the tree of complex optional roots
"""
tree = OptionalTraversalTree(complex_optional_roots)
for optional_root_locations_stack in six.itervalues(location_to_optional_roots):
tree.insert(list(optional_root_locations_stack))
return tree | [
"def",
"construct_optional_traversal_tree",
"(",
"complex_optional_roots",
",",
"location_to_optional_roots",
")",
":",
"tree",
"=",
"OptionalTraversalTree",
"(",
"complex_optional_roots",
")",
"for",
"optional_root_locations_stack",
"in",
"six",
".",
"itervalues",
"(",
"lo... | Return a tree of complex optional root locations.
Args:
complex_optional_roots: list of @optional locations (location immmediately preceding
an @optional Traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
OptionalTraversalTree object representing the tree of complex optional roots | [
"Return",
"a",
"tree",
"of",
"complex",
"optional",
"root",
"locations",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L337-L355 | train | 227,912 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/utils.py | BetweenClause.validate | def validate(self):
"""Validate that the Between Expression is correctly representable."""
if not isinstance(self.field, LocalField):
raise TypeError(u'Expected LocalField field, got: {} {}'.format(
type(self.field).__name__, self.field))
if not isinstance(self.lower_bound, Expression):
raise TypeError(u'Expected Expression lower_bound, got: {} {}'.format(
type(self.lower_bound).__name__, self.lower_bound))
if not isinstance(self.upper_bound, Expression):
raise TypeError(u'Expected Expression upper_bound, got: {} {}'.format(
type(self.upper_bound).__name__, self.upper_bound)) | python | def validate(self):
"""Validate that the Between Expression is correctly representable."""
if not isinstance(self.field, LocalField):
raise TypeError(u'Expected LocalField field, got: {} {}'.format(
type(self.field).__name__, self.field))
if not isinstance(self.lower_bound, Expression):
raise TypeError(u'Expected Expression lower_bound, got: {} {}'.format(
type(self.lower_bound).__name__, self.lower_bound))
if not isinstance(self.upper_bound, Expression):
raise TypeError(u'Expected Expression upper_bound, got: {} {}'.format(
type(self.upper_bound).__name__, self.upper_bound)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"field",
",",
"LocalField",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected LocalField field, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"field",
")",
... | Validate that the Between Expression is correctly representable. | [
"Validate",
"that",
"the",
"Between",
"Expression",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L77-L89 | train | 227,913 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/utils.py | BetweenClause.to_match | def to_match(self):
"""Return a unicode object with the MATCH representation of this BetweenClause."""
template = u'({field_name} BETWEEN {lower_bound} AND {upper_bound})'
return template.format(
field_name=self.field.to_match(),
lower_bound=self.lower_bound.to_match(),
upper_bound=self.upper_bound.to_match()) | python | def to_match(self):
"""Return a unicode object with the MATCH representation of this BetweenClause."""
template = u'({field_name} BETWEEN {lower_bound} AND {upper_bound})'
return template.format(
field_name=self.field.to_match(),
lower_bound=self.lower_bound.to_match(),
upper_bound=self.upper_bound.to_match()) | [
"def",
"to_match",
"(",
"self",
")",
":",
"template",
"=",
"u'({field_name} BETWEEN {lower_bound} AND {upper_bound})'",
"return",
"template",
".",
"format",
"(",
"field_name",
"=",
"self",
".",
"field",
".",
"to_match",
"(",
")",
",",
"lower_bound",
"=",
"self",
... | Return a unicode object with the MATCH representation of this BetweenClause. | [
"Return",
"a",
"unicode",
"object",
"with",
"the",
"MATCH",
"representation",
"of",
"this",
"BetweenClause",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L101-L107 | train | 227,914 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/utils.py | OptionalTraversalTree.insert | def insert(self, optional_root_locations_path):
"""Insert a path of optional Locations into the tree.
Each OptionalTraversalTree object contains child Location objects as keys mapping to
other OptionalTraversalTree objects.
Args:
optional_root_locations_path: list of optional root Locations all except the last
of which must be present in complex_optional_roots
"""
encountered_simple_optional = False
parent_location = self._root_location
for optional_root_location in optional_root_locations_path:
if encountered_simple_optional:
raise AssertionError(u'Encountered simple optional root location {} in path, but'
u'further locations are present. This should not happen: {}'
.format(optional_root_location, optional_root_locations_path))
if optional_root_location not in self._location_to_children:
# Simple optionals are ignored.
# There should be no complex optionals after a simple optional.
encountered_simple_optional = True
else:
self._location_to_children[parent_location].add(optional_root_location)
parent_location = optional_root_location | python | def insert(self, optional_root_locations_path):
"""Insert a path of optional Locations into the tree.
Each OptionalTraversalTree object contains child Location objects as keys mapping to
other OptionalTraversalTree objects.
Args:
optional_root_locations_path: list of optional root Locations all except the last
of which must be present in complex_optional_roots
"""
encountered_simple_optional = False
parent_location = self._root_location
for optional_root_location in optional_root_locations_path:
if encountered_simple_optional:
raise AssertionError(u'Encountered simple optional root location {} in path, but'
u'further locations are present. This should not happen: {}'
.format(optional_root_location, optional_root_locations_path))
if optional_root_location not in self._location_to_children:
# Simple optionals are ignored.
# There should be no complex optionals after a simple optional.
encountered_simple_optional = True
else:
self._location_to_children[parent_location].add(optional_root_location)
parent_location = optional_root_location | [
"def",
"insert",
"(",
"self",
",",
"optional_root_locations_path",
")",
":",
"encountered_simple_optional",
"=",
"False",
"parent_location",
"=",
"self",
".",
"_root_location",
"for",
"optional_root_location",
"in",
"optional_root_locations_path",
":",
"if",
"encountered_... | Insert a path of optional Locations into the tree.
Each OptionalTraversalTree object contains child Location objects as keys mapping to
other OptionalTraversalTree objects.
Args:
optional_root_locations_path: list of optional root Locations all except the last
of which must be present in complex_optional_roots | [
"Insert",
"a",
"path",
"of",
"optional",
"Locations",
"into",
"the",
"tree",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L261-L285 | train | 227,915 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_sql.py | emit_code_from_ir | def emit_code_from_ir(sql_query_tree, compiler_metadata):
"""Return a SQLAlchemy Query from a passed SqlQueryTree.
Args:
sql_query_tree: SqlQueryTree, tree representation of the query to emit.
compiler_metadata: SqlMetadata, SQLAlchemy specific metadata.
Returns:
SQLAlchemy Query
"""
context = CompilationContext(
query_path_to_selectable=dict(),
query_path_to_location_info=sql_query_tree.query_path_to_location_info,
query_path_to_output_fields=sql_query_tree.query_path_to_output_fields,
query_path_to_filters=sql_query_tree.query_path_to_filters,
query_path_to_node=sql_query_tree.query_path_to_node,
compiler_metadata=compiler_metadata,
)
return _query_tree_to_query(sql_query_tree.root, context) | python | def emit_code_from_ir(sql_query_tree, compiler_metadata):
"""Return a SQLAlchemy Query from a passed SqlQueryTree.
Args:
sql_query_tree: SqlQueryTree, tree representation of the query to emit.
compiler_metadata: SqlMetadata, SQLAlchemy specific metadata.
Returns:
SQLAlchemy Query
"""
context = CompilationContext(
query_path_to_selectable=dict(),
query_path_to_location_info=sql_query_tree.query_path_to_location_info,
query_path_to_output_fields=sql_query_tree.query_path_to_output_fields,
query_path_to_filters=sql_query_tree.query_path_to_filters,
query_path_to_node=sql_query_tree.query_path_to_node,
compiler_metadata=compiler_metadata,
)
return _query_tree_to_query(sql_query_tree.root, context) | [
"def",
"emit_code_from_ir",
"(",
"sql_query_tree",
",",
"compiler_metadata",
")",
":",
"context",
"=",
"CompilationContext",
"(",
"query_path_to_selectable",
"=",
"dict",
"(",
")",
",",
"query_path_to_location_info",
"=",
"sql_query_tree",
".",
"query_path_to_location_inf... | Return a SQLAlchemy Query from a passed SqlQueryTree.
Args:
sql_query_tree: SqlQueryTree, tree representation of the query to emit.
compiler_metadata: SqlMetadata, SQLAlchemy specific metadata.
Returns:
SQLAlchemy Query | [
"Return",
"a",
"SQLAlchemy",
"Query",
"from",
"a",
"passed",
"SqlQueryTree",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L39-L58 | train | 227,916 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_sql.py | _create_table_and_update_context | def _create_table_and_update_context(node, context):
"""Create an aliased table for a SqlNode.
Updates the relevant Selectable global context.
Args:
node: SqlNode, the current node.
context: CompilationContext, global compilation state and metadata.
Returns:
Table, the newly aliased SQLAlchemy table.
"""
schema_type_name = sql_context_helpers.get_schema_type_name(node, context)
table = context.compiler_metadata.get_table(schema_type_name).alias()
context.query_path_to_selectable[node.query_path] = table
return table | python | def _create_table_and_update_context(node, context):
"""Create an aliased table for a SqlNode.
Updates the relevant Selectable global context.
Args:
node: SqlNode, the current node.
context: CompilationContext, global compilation state and metadata.
Returns:
Table, the newly aliased SQLAlchemy table.
"""
schema_type_name = sql_context_helpers.get_schema_type_name(node, context)
table = context.compiler_metadata.get_table(schema_type_name).alias()
context.query_path_to_selectable[node.query_path] = table
return table | [
"def",
"_create_table_and_update_context",
"(",
"node",
",",
"context",
")",
":",
"schema_type_name",
"=",
"sql_context_helpers",
".",
"get_schema_type_name",
"(",
"node",
",",
"context",
")",
"table",
"=",
"context",
".",
"compiler_metadata",
".",
"get_table",
"(",... | Create an aliased table for a SqlNode.
Updates the relevant Selectable global context.
Args:
node: SqlNode, the current node.
context: CompilationContext, global compilation state and metadata.
Returns:
Table, the newly aliased SQLAlchemy table. | [
"Create",
"an",
"aliased",
"table",
"for",
"a",
"SqlNode",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L75-L90 | train | 227,917 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_sql.py | _create_query | def _create_query(node, context):
"""Create a query from a SqlNode.
Args:
node: SqlNode, the current node.
context: CompilationContext, global compilation state and metadata.
Returns:
Selectable, selectable of the generated query.
"""
visited_nodes = [node]
output_columns = _get_output_columns(visited_nodes, context)
filters = _get_filters(visited_nodes, context)
selectable = sql_context_helpers.get_node_selectable(node, context)
query = select(output_columns).select_from(selectable).where(and_(*filters))
return query | python | def _create_query(node, context):
"""Create a query from a SqlNode.
Args:
node: SqlNode, the current node.
context: CompilationContext, global compilation state and metadata.
Returns:
Selectable, selectable of the generated query.
"""
visited_nodes = [node]
output_columns = _get_output_columns(visited_nodes, context)
filters = _get_filters(visited_nodes, context)
selectable = sql_context_helpers.get_node_selectable(node, context)
query = select(output_columns).select_from(selectable).where(and_(*filters))
return query | [
"def",
"_create_query",
"(",
"node",
",",
"context",
")",
":",
"visited_nodes",
"=",
"[",
"node",
"]",
"output_columns",
"=",
"_get_output_columns",
"(",
"visited_nodes",
",",
"context",
")",
"filters",
"=",
"_get_filters",
"(",
"visited_nodes",
",",
"context",
... | Create a query from a SqlNode.
Args:
node: SqlNode, the current node.
context: CompilationContext, global compilation state and metadata.
Returns:
Selectable, selectable of the generated query. | [
"Create",
"a",
"query",
"from",
"a",
"SqlNode",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L93-L108 | train | 227,918 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_sql.py | _get_output_columns | def _get_output_columns(nodes, context):
"""Get the output columns for a list of SqlNodes.
Args:
nodes: List[SqlNode], the nodes to get output columns from.
context: CompilationContext, global compilation state and metadata.
Returns:
List[Column], list of SqlAlchemy Columns to output for this query.
"""
columns = []
for node in nodes:
for sql_output in sql_context_helpers.get_outputs(node, context):
field_name = sql_output.field_name
column = sql_context_helpers.get_column(field_name, node, context)
column = column.label(sql_output.output_name)
columns.append(column)
return columns | python | def _get_output_columns(nodes, context):
"""Get the output columns for a list of SqlNodes.
Args:
nodes: List[SqlNode], the nodes to get output columns from.
context: CompilationContext, global compilation state and metadata.
Returns:
List[Column], list of SqlAlchemy Columns to output for this query.
"""
columns = []
for node in nodes:
for sql_output in sql_context_helpers.get_outputs(node, context):
field_name = sql_output.field_name
column = sql_context_helpers.get_column(field_name, node, context)
column = column.label(sql_output.output_name)
columns.append(column)
return columns | [
"def",
"_get_output_columns",
"(",
"nodes",
",",
"context",
")",
":",
"columns",
"=",
"[",
"]",
"for",
"node",
"in",
"nodes",
":",
"for",
"sql_output",
"in",
"sql_context_helpers",
".",
"get_outputs",
"(",
"node",
",",
"context",
")",
":",
"field_name",
"=... | Get the output columns for a list of SqlNodes.
Args:
nodes: List[SqlNode], the nodes to get output columns from.
context: CompilationContext, global compilation state and metadata.
Returns:
List[Column], list of SqlAlchemy Columns to output for this query. | [
"Get",
"the",
"output",
"columns",
"for",
"a",
"list",
"of",
"SqlNodes",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L111-L128 | train | 227,919 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_sql.py | _get_filters | def _get_filters(nodes, context):
"""Get filters to apply to a list of SqlNodes.
Args:
nodes: List[SqlNode], the SqlNodes to get filters for.
context: CompilationContext, global compilation state and metadata.
Returns:
List[Expression], list of SQLAlchemy expressions.
"""
filters = []
for node in nodes:
for filter_block in sql_context_helpers.get_filters(node, context):
filter_sql_expression = _transform_filter_to_sql(filter_block, node, context)
filters.append(filter_sql_expression)
return filters | python | def _get_filters(nodes, context):
"""Get filters to apply to a list of SqlNodes.
Args:
nodes: List[SqlNode], the SqlNodes to get filters for.
context: CompilationContext, global compilation state and metadata.
Returns:
List[Expression], list of SQLAlchemy expressions.
"""
filters = []
for node in nodes:
for filter_block in sql_context_helpers.get_filters(node, context):
filter_sql_expression = _transform_filter_to_sql(filter_block, node, context)
filters.append(filter_sql_expression)
return filters | [
"def",
"_get_filters",
"(",
"nodes",
",",
"context",
")",
":",
"filters",
"=",
"[",
"]",
"for",
"node",
"in",
"nodes",
":",
"for",
"filter_block",
"in",
"sql_context_helpers",
".",
"get_filters",
"(",
"node",
",",
"context",
")",
":",
"filter_sql_expression"... | Get filters to apply to a list of SqlNodes.
Args:
nodes: List[SqlNode], the SqlNodes to get filters for.
context: CompilationContext, global compilation state and metadata.
Returns:
List[Expression], list of SQLAlchemy expressions. | [
"Get",
"filters",
"to",
"apply",
"to",
"a",
"list",
"of",
"SqlNodes",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L131-L146 | train | 227,920 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_sql.py | _transform_filter_to_sql | def _transform_filter_to_sql(filter_block, node, context):
"""Transform a Filter block to its corresponding SQLAlchemy expression.
Args:
filter_block: Filter, the Filter block to transform.
node: SqlNode, the node Filter block applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression equivalent to the Filter.predicate expression.
"""
expression = filter_block.predicate
return _expression_to_sql(expression, node, context) | python | def _transform_filter_to_sql(filter_block, node, context):
"""Transform a Filter block to its corresponding SQLAlchemy expression.
Args:
filter_block: Filter, the Filter block to transform.
node: SqlNode, the node Filter block applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression equivalent to the Filter.predicate expression.
"""
expression = filter_block.predicate
return _expression_to_sql(expression, node, context) | [
"def",
"_transform_filter_to_sql",
"(",
"filter_block",
",",
"node",
",",
"context",
")",
":",
"expression",
"=",
"filter_block",
".",
"predicate",
"return",
"_expression_to_sql",
"(",
"expression",
",",
"node",
",",
"context",
")"
] | Transform a Filter block to its corresponding SQLAlchemy expression.
Args:
filter_block: Filter, the Filter block to transform.
node: SqlNode, the node Filter block applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression equivalent to the Filter.predicate expression. | [
"Transform",
"a",
"Filter",
"block",
"to",
"its",
"corresponding",
"SQLAlchemy",
"expression",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L149-L161 | train | 227,921 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_sql.py | _expression_to_sql | def _expression_to_sql(expression, node, context):
"""Recursively transform a Filter block predicate to its SQLAlchemy expression representation.
Args:
expression: expression, the compiler expression to transform.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy Expression equivalent to the passed compiler expression.
"""
_expression_transformers = {
expressions.LocalField: _transform_local_field_to_expression,
expressions.Variable: _transform_variable_to_expression,
expressions.Literal: _transform_literal_to_expression,
expressions.BinaryComposition: _transform_binary_composition_to_expression,
}
expression_type = type(expression)
if expression_type not in _expression_transformers:
raise NotImplementedError(
u'Unsupported compiler expression "{}" of type "{}" cannot be converted to SQL '
u'expression.'.format(expression, type(expression)))
return _expression_transformers[expression_type](expression, node, context) | python | def _expression_to_sql(expression, node, context):
"""Recursively transform a Filter block predicate to its SQLAlchemy expression representation.
Args:
expression: expression, the compiler expression to transform.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy Expression equivalent to the passed compiler expression.
"""
_expression_transformers = {
expressions.LocalField: _transform_local_field_to_expression,
expressions.Variable: _transform_variable_to_expression,
expressions.Literal: _transform_literal_to_expression,
expressions.BinaryComposition: _transform_binary_composition_to_expression,
}
expression_type = type(expression)
if expression_type not in _expression_transformers:
raise NotImplementedError(
u'Unsupported compiler expression "{}" of type "{}" cannot be converted to SQL '
u'expression.'.format(expression, type(expression)))
return _expression_transformers[expression_type](expression, node, context) | [
"def",
"_expression_to_sql",
"(",
"expression",
",",
"node",
",",
"context",
")",
":",
"_expression_transformers",
"=",
"{",
"expressions",
".",
"LocalField",
":",
"_transform_local_field_to_expression",
",",
"expressions",
".",
"Variable",
":",
"_transform_variable_to_... | Recursively transform a Filter block predicate to its SQLAlchemy expression representation.
Args:
expression: expression, the compiler expression to transform.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy Expression equivalent to the passed compiler expression. | [
"Recursively",
"transform",
"a",
"Filter",
"block",
"predicate",
"to",
"its",
"SQLAlchemy",
"expression",
"representation",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L164-L186 | train | 227,922 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_sql.py | _transform_binary_composition_to_expression | def _transform_binary_composition_to_expression(expression, node, context):
"""Transform a BinaryComposition compiler expression into a SQLAlchemy expression.
Recursively calls _expression_to_sql to convert its left and right sub-expressions.
Args:
expression: expression, BinaryComposition compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
"""
if expression.operator not in constants.SUPPORTED_OPERATORS:
raise NotImplementedError(
u'Filter operation "{}" is not supported by the SQL backend.'.format(
expression.operator))
sql_operator = constants.SUPPORTED_OPERATORS[expression.operator]
left = _expression_to_sql(expression.left, node, context)
right = _expression_to_sql(expression.right, node, context)
if sql_operator.cardinality == constants.CARDINALITY_UNARY:
left, right = _get_column_and_bindparam(left, right, sql_operator)
clause = getattr(left, sql_operator.name)(right)
return clause
elif sql_operator.cardinality == constants.CARDINALITY_BINARY:
clause = getattr(sql_expressions, sql_operator.name)(left, right)
return clause
elif sql_operator.cardinality == constants.CARDINALITY_LIST_VALUED:
left, right = _get_column_and_bindparam(left, right, sql_operator)
# ensure that SQLAlchemy treats the right bind parameter as list valued
right.expanding = True
clause = getattr(left, sql_operator.name)(right)
return clause
raise AssertionError(u'Unreachable, operator cardinality {} for compiler expression {} is '
u'unknown'.format(sql_operator.cardinality, expression)) | python | def _transform_binary_composition_to_expression(expression, node, context):
"""Transform a BinaryComposition compiler expression into a SQLAlchemy expression.
Recursively calls _expression_to_sql to convert its left and right sub-expressions.
Args:
expression: expression, BinaryComposition compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
"""
if expression.operator not in constants.SUPPORTED_OPERATORS:
raise NotImplementedError(
u'Filter operation "{}" is not supported by the SQL backend.'.format(
expression.operator))
sql_operator = constants.SUPPORTED_OPERATORS[expression.operator]
left = _expression_to_sql(expression.left, node, context)
right = _expression_to_sql(expression.right, node, context)
if sql_operator.cardinality == constants.CARDINALITY_UNARY:
left, right = _get_column_and_bindparam(left, right, sql_operator)
clause = getattr(left, sql_operator.name)(right)
return clause
elif sql_operator.cardinality == constants.CARDINALITY_BINARY:
clause = getattr(sql_expressions, sql_operator.name)(left, right)
return clause
elif sql_operator.cardinality == constants.CARDINALITY_LIST_VALUED:
left, right = _get_column_and_bindparam(left, right, sql_operator)
# ensure that SQLAlchemy treats the right bind parameter as list valued
right.expanding = True
clause = getattr(left, sql_operator.name)(right)
return clause
raise AssertionError(u'Unreachable, operator cardinality {} for compiler expression {} is '
u'unknown'.format(sql_operator.cardinality, expression)) | [
"def",
"_transform_binary_composition_to_expression",
"(",
"expression",
",",
"node",
",",
"context",
")",
":",
"if",
"expression",
".",
"operator",
"not",
"in",
"constants",
".",
"SUPPORTED_OPERATORS",
":",
"raise",
"NotImplementedError",
"(",
"u'Filter operation \"{}\... | Transform a BinaryComposition compiler expression into a SQLAlchemy expression.
Recursively calls _expression_to_sql to convert its left and right sub-expressions.
Args:
expression: expression, BinaryComposition compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression. | [
"Transform",
"a",
"BinaryComposition",
"compiler",
"expression",
"into",
"a",
"SQLAlchemy",
"expression",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L189-L223 | train | 227,923 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_sql.py | _transform_variable_to_expression | def _transform_variable_to_expression(expression, node, context):
"""Transform a Variable compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, Variable compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
"""
variable_name = expression.variable_name
if not variable_name.startswith(u'$'):
raise AssertionError(u'Unexpectedly received variable name {} that is not '
u'prefixed with "$"'.format(variable_name))
return bindparam(variable_name[1:]) | python | def _transform_variable_to_expression(expression, node, context):
"""Transform a Variable compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, Variable compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
"""
variable_name = expression.variable_name
if not variable_name.startswith(u'$'):
raise AssertionError(u'Unexpectedly received variable name {} that is not '
u'prefixed with "$"'.format(variable_name))
return bindparam(variable_name[1:]) | [
"def",
"_transform_variable_to_expression",
"(",
"expression",
",",
"node",
",",
"context",
")",
":",
"variable_name",
"=",
"expression",
".",
"variable_name",
"if",
"not",
"variable_name",
".",
"startswith",
"(",
"u'$'",
")",
":",
"raise",
"AssertionError",
"(",
... | Transform a Variable compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, Variable compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression. | [
"Transform",
"a",
"Variable",
"compiler",
"expression",
"into",
"its",
"SQLAlchemy",
"expression",
"representation",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L255-L270 | train | 227,924 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/emit_sql.py | _transform_local_field_to_expression | def _transform_local_field_to_expression(expression, node, context):
"""Transform a LocalField compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, LocalField compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
"""
column_name = expression.field_name
column = sql_context_helpers.get_column(column_name, node, context)
return column | python | def _transform_local_field_to_expression(expression, node, context):
"""Transform a LocalField compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, LocalField compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression.
"""
column_name = expression.field_name
column = sql_context_helpers.get_column(column_name, node, context)
return column | [
"def",
"_transform_local_field_to_expression",
"(",
"expression",
",",
"node",
",",
"context",
")",
":",
"column_name",
"=",
"expression",
".",
"field_name",
"column",
"=",
"sql_context_helpers",
".",
"get_column",
"(",
"column_name",
",",
"node",
",",
"context",
... | Transform a LocalField compiler expression into its SQLAlchemy expression representation.
Args:
expression: expression, LocalField compiler expression.
node: SqlNode, the SqlNode the expression applies to.
context: CompilationContext, global compilation state and metadata.
Returns:
Expression, SQLAlchemy expression. | [
"Transform",
"a",
"LocalField",
"compiler",
"expression",
"into",
"its",
"SQLAlchemy",
"expression",
"representation",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L273-L286 | train | 227,925 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_common.py | lower_context_field_existence | def lower_context_field_existence(ir_blocks, query_metadata_table):
"""Lower ContextFieldExistence expressions into lower-level expressions."""
def regular_visitor_fn(expression):
"""Expression visitor function that rewrites ContextFieldExistence expressions."""
if not isinstance(expression, ContextFieldExistence):
return expression
location_type = query_metadata_table.get_location_info(expression.location).type
# Since this function is only used in blocks that aren't ConstructResult,
# the location check is performed using a regular ContextField expression.
return BinaryComposition(
u'!=',
ContextField(expression.location, location_type),
NullLiteral)
def construct_result_visitor_fn(expression):
"""Expression visitor function that rewrites ContextFieldExistence expressions."""
if not isinstance(expression, ContextFieldExistence):
return expression
location_type = query_metadata_table.get_location_info(expression.location).type
# Since this function is only used in ConstructResult blocks,
# the location check is performed using the special OutputContextVertex expression.
return BinaryComposition(
u'!=',
OutputContextVertex(expression.location, location_type),
NullLiteral)
new_ir_blocks = []
for block in ir_blocks:
new_block = None
if isinstance(block, ConstructResult):
new_block = block.visit_and_update_expressions(construct_result_visitor_fn)
else:
new_block = block.visit_and_update_expressions(regular_visitor_fn)
new_ir_blocks.append(new_block)
return new_ir_blocks | python | def lower_context_field_existence(ir_blocks, query_metadata_table):
"""Lower ContextFieldExistence expressions into lower-level expressions."""
def regular_visitor_fn(expression):
"""Expression visitor function that rewrites ContextFieldExistence expressions."""
if not isinstance(expression, ContextFieldExistence):
return expression
location_type = query_metadata_table.get_location_info(expression.location).type
# Since this function is only used in blocks that aren't ConstructResult,
# the location check is performed using a regular ContextField expression.
return BinaryComposition(
u'!=',
ContextField(expression.location, location_type),
NullLiteral)
def construct_result_visitor_fn(expression):
"""Expression visitor function that rewrites ContextFieldExistence expressions."""
if not isinstance(expression, ContextFieldExistence):
return expression
location_type = query_metadata_table.get_location_info(expression.location).type
# Since this function is only used in ConstructResult blocks,
# the location check is performed using the special OutputContextVertex expression.
return BinaryComposition(
u'!=',
OutputContextVertex(expression.location, location_type),
NullLiteral)
new_ir_blocks = []
for block in ir_blocks:
new_block = None
if isinstance(block, ConstructResult):
new_block = block.visit_and_update_expressions(construct_result_visitor_fn)
else:
new_block = block.visit_and_update_expressions(regular_visitor_fn)
new_ir_blocks.append(new_block)
return new_ir_blocks | [
"def",
"lower_context_field_existence",
"(",
"ir_blocks",
",",
"query_metadata_table",
")",
":",
"def",
"regular_visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Expression visitor function that rewrites ContextFieldExistence expressions.\"\"\"",
"if",
"not",
"isinstance",
"(",
... | Lower ContextFieldExistence expressions into lower-level expressions. | [
"Lower",
"ContextFieldExistence",
"expressions",
"into",
"lower",
"-",
"level",
"expressions",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_common.py#L56-L95 | train | 227,926 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_common.py | optimize_boolean_expression_comparisons | def optimize_boolean_expression_comparisons(ir_blocks):
"""Optimize comparisons of a boolean binary comparison expression against a boolean literal.
Rewriting example:
BinaryComposition(
'=',
BinaryComposition('!=', something, NullLiteral)
False)
The above is rewritten into:
BinaryComposition('=', something, NullLiteral)
Args:
ir_blocks: list of basic block objects
Returns:
a new list of basic block objects, with the optimization applied
"""
operator_inverses = {
u'=': u'!=',
u'!=': u'=',
}
def visitor_fn(expression):
"""Expression visitor function that performs the above rewriting."""
if not isinstance(expression, BinaryComposition):
return expression
left_is_binary_composition = isinstance(expression.left, BinaryComposition)
right_is_binary_composition = isinstance(expression.right, BinaryComposition)
if not left_is_binary_composition and not right_is_binary_composition:
# Nothing to rewrite, return the expression as-is.
return expression
identity_literal = None # The boolean literal for which we just use the inner expression.
inverse_literal = None # The boolean literal for which we negate the inner expression.
if expression.operator == u'=':
identity_literal = TrueLiteral
inverse_literal = FalseLiteral
elif expression.operator == u'!=':
identity_literal = FalseLiteral
inverse_literal = TrueLiteral
else:
return expression
expression_to_rewrite = None
if expression.left == identity_literal and right_is_binary_composition:
return expression.right
elif expression.right == identity_literal and left_is_binary_composition:
return expression.left
elif expression.left == inverse_literal and right_is_binary_composition:
expression_to_rewrite = expression.right
elif expression.right == inverse_literal and left_is_binary_composition:
expression_to_rewrite = expression.left
if expression_to_rewrite is None:
# We couldn't find anything to rewrite, return the expression as-is.
return expression
elif expression_to_rewrite.operator not in operator_inverses:
# We can't rewrite the inner expression since we don't know its inverse operator.
return expression
else:
return BinaryComposition(
operator_inverses[expression_to_rewrite.operator],
expression_to_rewrite.left,
expression_to_rewrite.right)
new_ir_blocks = []
for block in ir_blocks:
new_block = block.visit_and_update_expressions(visitor_fn)
new_ir_blocks.append(new_block)
return new_ir_blocks | python | def optimize_boolean_expression_comparisons(ir_blocks):
"""Optimize comparisons of a boolean binary comparison expression against a boolean literal.
Rewriting example:
BinaryComposition(
'=',
BinaryComposition('!=', something, NullLiteral)
False)
The above is rewritten into:
BinaryComposition('=', something, NullLiteral)
Args:
ir_blocks: list of basic block objects
Returns:
a new list of basic block objects, with the optimization applied
"""
operator_inverses = {
u'=': u'!=',
u'!=': u'=',
}
def visitor_fn(expression):
"""Expression visitor function that performs the above rewriting."""
if not isinstance(expression, BinaryComposition):
return expression
left_is_binary_composition = isinstance(expression.left, BinaryComposition)
right_is_binary_composition = isinstance(expression.right, BinaryComposition)
if not left_is_binary_composition and not right_is_binary_composition:
# Nothing to rewrite, return the expression as-is.
return expression
identity_literal = None # The boolean literal for which we just use the inner expression.
inverse_literal = None # The boolean literal for which we negate the inner expression.
if expression.operator == u'=':
identity_literal = TrueLiteral
inverse_literal = FalseLiteral
elif expression.operator == u'!=':
identity_literal = FalseLiteral
inverse_literal = TrueLiteral
else:
return expression
expression_to_rewrite = None
if expression.left == identity_literal and right_is_binary_composition:
return expression.right
elif expression.right == identity_literal and left_is_binary_composition:
return expression.left
elif expression.left == inverse_literal and right_is_binary_composition:
expression_to_rewrite = expression.right
elif expression.right == inverse_literal and left_is_binary_composition:
expression_to_rewrite = expression.left
if expression_to_rewrite is None:
# We couldn't find anything to rewrite, return the expression as-is.
return expression
elif expression_to_rewrite.operator not in operator_inverses:
# We can't rewrite the inner expression since we don't know its inverse operator.
return expression
else:
return BinaryComposition(
operator_inverses[expression_to_rewrite.operator],
expression_to_rewrite.left,
expression_to_rewrite.right)
new_ir_blocks = []
for block in ir_blocks:
new_block = block.visit_and_update_expressions(visitor_fn)
new_ir_blocks.append(new_block)
return new_ir_blocks | [
"def",
"optimize_boolean_expression_comparisons",
"(",
"ir_blocks",
")",
":",
"operator_inverses",
"=",
"{",
"u'='",
":",
"u'!='",
",",
"u'!='",
":",
"u'='",
",",
"}",
"def",
"visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Expression visitor function that performs t... | Optimize comparisons of a boolean binary comparison expression against a boolean literal.
Rewriting example:
BinaryComposition(
'=',
BinaryComposition('!=', something, NullLiteral)
False)
The above is rewritten into:
BinaryComposition('=', something, NullLiteral)
Args:
ir_blocks: list of basic block objects
Returns:
a new list of basic block objects, with the optimization applied | [
"Optimize",
"comparisons",
"of",
"a",
"boolean",
"binary",
"comparison",
"expression",
"against",
"a",
"boolean",
"literal",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_common.py#L98-L171 | train | 227,927 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_common.py | extract_simple_optional_location_info | def extract_simple_optional_location_info(
ir_blocks, complex_optional_roots, location_to_optional_roots):
"""Construct a map from simple optional locations to their inner location and traversed edge.
Args:
ir_blocks: list of IR blocks to extract optional data from
complex_optional_roots: list of @optional locations (location immmediately preceding
an @optional traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
dict mapping from simple_optional_root_location -> dict containing keys
- 'inner_location_name': Location object correspoding to the unique MarkLocation present
within a simple optional (one that does not expand vertex fields)
scope
- 'edge_field': string representing the optional edge being traversed
where simple_optional_root_to_inner_location is the location preceding the @optional scope
"""
# Simple optional roots are a subset of location_to_optional_roots.values() (all optional roots)
# We filter out the ones that are also present in complex_optional_roots.
location_to_preceding_optional_root_iteritems = six.iteritems({
location: optional_root_locations_stack[-1]
for location, optional_root_locations_stack in six.iteritems(location_to_optional_roots)
})
simple_optional_root_to_inner_location = {
optional_root_location: inner_location
for inner_location, optional_root_location in location_to_preceding_optional_root_iteritems
if optional_root_location not in complex_optional_roots
}
simple_optional_root_locations = set(simple_optional_root_to_inner_location.keys())
# Blocks within folded scopes should not be taken into account in this function.
_, non_folded_ir_blocks = extract_folds_from_ir_blocks(ir_blocks)
simple_optional_root_info = {}
preceding_location = None
for current_block in non_folded_ir_blocks:
if isinstance(current_block, MarkLocation):
preceding_location = current_block.location
elif isinstance(current_block, Traverse) and current_block.optional:
if preceding_location in simple_optional_root_locations:
# The current optional Traverse is "simple"
# i.e. it does not contain any Traverses within.
inner_location = simple_optional_root_to_inner_location[preceding_location]
inner_location_name, _ = inner_location.get_location_name()
simple_optional_info_dict = {
'inner_location_name': inner_location_name,
'edge_field': current_block.get_field_name(),
}
simple_optional_root_info[preceding_location] = simple_optional_info_dict
return simple_optional_root_info | python | def extract_simple_optional_location_info(
ir_blocks, complex_optional_roots, location_to_optional_roots):
"""Construct a map from simple optional locations to their inner location and traversed edge.
Args:
ir_blocks: list of IR blocks to extract optional data from
complex_optional_roots: list of @optional locations (location immmediately preceding
an @optional traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
dict mapping from simple_optional_root_location -> dict containing keys
- 'inner_location_name': Location object correspoding to the unique MarkLocation present
within a simple optional (one that does not expand vertex fields)
scope
- 'edge_field': string representing the optional edge being traversed
where simple_optional_root_to_inner_location is the location preceding the @optional scope
"""
# Simple optional roots are a subset of location_to_optional_roots.values() (all optional roots)
# We filter out the ones that are also present in complex_optional_roots.
location_to_preceding_optional_root_iteritems = six.iteritems({
location: optional_root_locations_stack[-1]
for location, optional_root_locations_stack in six.iteritems(location_to_optional_roots)
})
simple_optional_root_to_inner_location = {
optional_root_location: inner_location
for inner_location, optional_root_location in location_to_preceding_optional_root_iteritems
if optional_root_location not in complex_optional_roots
}
simple_optional_root_locations = set(simple_optional_root_to_inner_location.keys())
# Blocks within folded scopes should not be taken into account in this function.
_, non_folded_ir_blocks = extract_folds_from_ir_blocks(ir_blocks)
simple_optional_root_info = {}
preceding_location = None
for current_block in non_folded_ir_blocks:
if isinstance(current_block, MarkLocation):
preceding_location = current_block.location
elif isinstance(current_block, Traverse) and current_block.optional:
if preceding_location in simple_optional_root_locations:
# The current optional Traverse is "simple"
# i.e. it does not contain any Traverses within.
inner_location = simple_optional_root_to_inner_location[preceding_location]
inner_location_name, _ = inner_location.get_location_name()
simple_optional_info_dict = {
'inner_location_name': inner_location_name,
'edge_field': current_block.get_field_name(),
}
simple_optional_root_info[preceding_location] = simple_optional_info_dict
return simple_optional_root_info | [
"def",
"extract_simple_optional_location_info",
"(",
"ir_blocks",
",",
"complex_optional_roots",
",",
"location_to_optional_roots",
")",
":",
"# Simple optional roots are a subset of location_to_optional_roots.values() (all optional roots)",
"# We filter out the ones that are also present in c... | Construct a map from simple optional locations to their inner location and traversed edge.
Args:
ir_blocks: list of IR blocks to extract optional data from
complex_optional_roots: list of @optional locations (location immmediately preceding
an @optional traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
dict mapping from simple_optional_root_location -> dict containing keys
- 'inner_location_name': Location object correspoding to the unique MarkLocation present
within a simple optional (one that does not expand vertex fields)
scope
- 'edge_field': string representing the optional edge being traversed
where simple_optional_root_to_inner_location is the location preceding the @optional scope | [
"Construct",
"a",
"map",
"from",
"simple",
"optional",
"locations",
"to",
"their",
"inner",
"location",
"and",
"traversed",
"edge",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_common.py#L283-L337 | train | 227,928 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_common.py | remove_end_optionals | def remove_end_optionals(ir_blocks):
"""Return a list of IR blocks as a copy of the original, with EndOptional blocks removed."""
new_ir_blocks = []
for block in ir_blocks:
if not isinstance(block, EndOptional):
new_ir_blocks.append(block)
return new_ir_blocks | python | def remove_end_optionals(ir_blocks):
"""Return a list of IR blocks as a copy of the original, with EndOptional blocks removed."""
new_ir_blocks = []
for block in ir_blocks:
if not isinstance(block, EndOptional):
new_ir_blocks.append(block)
return new_ir_blocks | [
"def",
"remove_end_optionals",
"(",
"ir_blocks",
")",
":",
"new_ir_blocks",
"=",
"[",
"]",
"for",
"block",
"in",
"ir_blocks",
":",
"if",
"not",
"isinstance",
"(",
"block",
",",
"EndOptional",
")",
":",
"new_ir_blocks",
".",
"append",
"(",
"block",
")",
"re... | Return a list of IR blocks as a copy of the original, with EndOptional blocks removed. | [
"Return",
"a",
"list",
"of",
"IR",
"blocks",
"as",
"a",
"copy",
"of",
"the",
"original",
"with",
"EndOptional",
"blocks",
"removed",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_common.py#L340-L346 | train | 227,929 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_common.py | OutputContextVertex.validate | def validate(self):
"""Validate that the OutputContextVertex is correctly representable."""
super(OutputContextVertex, self).validate()
if self.location.field is not None:
raise ValueError(u'Expected location at a vertex, but got: {}'.format(self.location)) | python | def validate(self):
"""Validate that the OutputContextVertex is correctly representable."""
super(OutputContextVertex, self).validate()
if self.location.field is not None:
raise ValueError(u'Expected location at a vertex, but got: {}'.format(self.location)) | [
"def",
"validate",
"(",
"self",
")",
":",
"super",
"(",
"OutputContextVertex",
",",
"self",
")",
".",
"validate",
"(",
")",
"if",
"self",
".",
"location",
".",
"field",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"u'Expected location at a vertex, b... | Validate that the OutputContextVertex is correctly representable. | [
"Validate",
"that",
"the",
"OutputContextVertex",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_common.py#L35-L40 | train | 227,930 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/ir_lowering.py | lower_has_substring_binary_compositions | def lower_has_substring_binary_compositions(ir_blocks):
"""Lower Filter blocks that use the "has_substring" operation into MATCH-representable form."""
def visitor_fn(expression):
"""Rewrite BinaryComposition expressions with "has_substring" into representable form."""
# The implementation of "has_substring" must use the LIKE operator in MATCH, and must
# prepend and append "%" symbols to the substring being matched.
# We transform any structures that resemble the following:
# BinaryComposition(u'has_substring', X, Y)
# into the following:
# BinaryComposition(
# u'LIKE',
# X,
# BinaryComposition(
# u'+',
# Literal("%"),
# BinaryComposition(
# u'+',
# Y,
# Literal("%")
# )
# )
# )
if not isinstance(expression, BinaryComposition) or expression.operator != u'has_substring':
return expression
return BinaryComposition(
u'LIKE',
expression.left,
BinaryComposition(
u'+',
Literal('%'),
BinaryComposition(
u'+',
expression.right,
Literal('%')
)
)
)
new_ir_blocks = [
block.visit_and_update_expressions(visitor_fn)
for block in ir_blocks
]
return new_ir_blocks | python | def lower_has_substring_binary_compositions(ir_blocks):
"""Lower Filter blocks that use the "has_substring" operation into MATCH-representable form."""
def visitor_fn(expression):
"""Rewrite BinaryComposition expressions with "has_substring" into representable form."""
# The implementation of "has_substring" must use the LIKE operator in MATCH, and must
# prepend and append "%" symbols to the substring being matched.
# We transform any structures that resemble the following:
# BinaryComposition(u'has_substring', X, Y)
# into the following:
# BinaryComposition(
# u'LIKE',
# X,
# BinaryComposition(
# u'+',
# Literal("%"),
# BinaryComposition(
# u'+',
# Y,
# Literal("%")
# )
# )
# )
if not isinstance(expression, BinaryComposition) or expression.operator != u'has_substring':
return expression
return BinaryComposition(
u'LIKE',
expression.left,
BinaryComposition(
u'+',
Literal('%'),
BinaryComposition(
u'+',
expression.right,
Literal('%')
)
)
)
new_ir_blocks = [
block.visit_and_update_expressions(visitor_fn)
for block in ir_blocks
]
return new_ir_blocks | [
"def",
"lower_has_substring_binary_compositions",
"(",
"ir_blocks",
")",
":",
"def",
"visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Rewrite BinaryComposition expressions with \"has_substring\" into representable form.\"\"\"",
"# The implementation of \"has_substring\" must use the LIKE ... | Lower Filter blocks that use the "has_substring" operation into MATCH-representable form. | [
"Lower",
"Filter",
"blocks",
"that",
"use",
"the",
"has_substring",
"operation",
"into",
"MATCH",
"-",
"representable",
"form",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L96-L140 | train | 227,931 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/ir_lowering.py | truncate_repeated_single_step_traversals | def truncate_repeated_single_step_traversals(match_query):
"""Truncate one-step traversals that overlap a previous traversal location."""
# Such traversals frequently happen as side-effects of the lowering process
# of Backtrack blocks, and needlessly complicate the executed queries.
new_match_traversals = []
visited_locations = set()
for current_match_traversal in match_query.match_traversals:
ignore_traversal = False
if len(current_match_traversal) == 1:
# Single-step traversal detected. If its location was visited already, ignore it.
single_step = current_match_traversal[0]
if single_step.as_block is None:
raise AssertionError(u'Unexpectedly found a single-step traversal with no as_block:'
u' {} {}'.format(current_match_traversal, match_query))
if single_step.as_block.location in visited_locations:
# This location was visited before, omit the traversal.
ignore_traversal = True
if not ignore_traversal:
# For each step in this traversal, mark its location as visited.
for step in current_match_traversal:
if step.as_block is not None:
visited_locations.add(step.as_block.location)
new_match_traversals.append(current_match_traversal)
return match_query._replace(match_traversals=new_match_traversals) | python | def truncate_repeated_single_step_traversals(match_query):
"""Truncate one-step traversals that overlap a previous traversal location."""
# Such traversals frequently happen as side-effects of the lowering process
# of Backtrack blocks, and needlessly complicate the executed queries.
new_match_traversals = []
visited_locations = set()
for current_match_traversal in match_query.match_traversals:
ignore_traversal = False
if len(current_match_traversal) == 1:
# Single-step traversal detected. If its location was visited already, ignore it.
single_step = current_match_traversal[0]
if single_step.as_block is None:
raise AssertionError(u'Unexpectedly found a single-step traversal with no as_block:'
u' {} {}'.format(current_match_traversal, match_query))
if single_step.as_block.location in visited_locations:
# This location was visited before, omit the traversal.
ignore_traversal = True
if not ignore_traversal:
# For each step in this traversal, mark its location as visited.
for step in current_match_traversal:
if step.as_block is not None:
visited_locations.add(step.as_block.location)
new_match_traversals.append(current_match_traversal)
return match_query._replace(match_traversals=new_match_traversals) | [
"def",
"truncate_repeated_single_step_traversals",
"(",
"match_query",
")",
":",
"# Such traversals frequently happen as side-effects of the lowering process",
"# of Backtrack blocks, and needlessly complicate the executed queries.",
"new_match_traversals",
"=",
"[",
"]",
"visited_locations"... | Truncate one-step traversals that overlap a previous traversal location. | [
"Truncate",
"one",
"-",
"step",
"traversals",
"that",
"overlap",
"a",
"previous",
"traversal",
"location",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L143-L171 | train | 227,932 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/ir_lowering.py | _flatten_location_translations | def _flatten_location_translations(location_translations):
"""If location A translates to B, and B to C, then make A translate directly to C.
Args:
location_translations: dict of Location -> Location, where the key translates to the value.
Mutated in place for efficiency and simplicity of implementation.
"""
sources_to_process = set(six.iterkeys(location_translations))
def _update_translation(source):
"""Return the proper (fully-flattened) translation for the given location."""
destination = location_translations[source]
if destination not in location_translations:
# "destination" cannot be translated, no further flattening required.
return destination
else:
# "destination" can itself be translated -- do so,
# and then flatten "source" to the final translation as well.
sources_to_process.discard(destination)
final_destination = _update_translation(destination)
location_translations[source] = final_destination
return final_destination
while sources_to_process:
_update_translation(sources_to_process.pop()) | python | def _flatten_location_translations(location_translations):
"""If location A translates to B, and B to C, then make A translate directly to C.
Args:
location_translations: dict of Location -> Location, where the key translates to the value.
Mutated in place for efficiency and simplicity of implementation.
"""
sources_to_process = set(six.iterkeys(location_translations))
def _update_translation(source):
"""Return the proper (fully-flattened) translation for the given location."""
destination = location_translations[source]
if destination not in location_translations:
# "destination" cannot be translated, no further flattening required.
return destination
else:
# "destination" can itself be translated -- do so,
# and then flatten "source" to the final translation as well.
sources_to_process.discard(destination)
final_destination = _update_translation(destination)
location_translations[source] = final_destination
return final_destination
while sources_to_process:
_update_translation(sources_to_process.pop()) | [
"def",
"_flatten_location_translations",
"(",
"location_translations",
")",
":",
"sources_to_process",
"=",
"set",
"(",
"six",
".",
"iterkeys",
"(",
"location_translations",
")",
")",
"def",
"_update_translation",
"(",
"source",
")",
":",
"\"\"\"Return the proper (fully... | If location A translates to B, and B to C, then make A translate directly to C.
Args:
location_translations: dict of Location -> Location, where the key translates to the value.
Mutated in place for efficiency and simplicity of implementation. | [
"If",
"location",
"A",
"translates",
"to",
"B",
"and",
"B",
"to",
"C",
"then",
"make",
"A",
"translate",
"directly",
"to",
"C",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L224-L248 | train | 227,933 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/ir_lowering.py | _translate_equivalent_locations | def _translate_equivalent_locations(match_query, location_translations):
"""Translate Location objects into their equivalent locations, based on the given dict."""
new_match_traversals = []
def visitor_fn(expression):
"""Expression visitor function used to rewrite expressions with updated Location data."""
if isinstance(expression, (ContextField, GlobalContextField)):
old_location = expression.location.at_vertex()
new_location = location_translations.get(old_location, old_location)
if expression.location.field is not None:
new_location = new_location.navigate_to_field(expression.location.field)
# The Expression could be one of many types, including:
# - ContextField
# - GlobalContextField
# We determine its exact class to make sure we return an object of the same class
# as the expression being replaced.
expression_cls = type(expression)
return expression_cls(new_location, expression.field_type)
elif isinstance(expression, ContextFieldExistence):
old_location = expression.location
new_location = location_translations.get(old_location, old_location)
return ContextFieldExistence(new_location)
elif isinstance(expression, FoldedContextField):
# Update the Location within FoldedContextField
old_location = expression.fold_scope_location.base_location
new_location = location_translations.get(old_location, old_location)
fold_path = expression.fold_scope_location.fold_path
fold_field = expression.fold_scope_location.field
new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field)
field_type = expression.field_type
return FoldedContextField(new_fold_scope_location, field_type)
else:
return expression
# Rewrite the Locations in the steps of each MATCH traversal.
for current_match_traversal in match_query.match_traversals:
new_traversal = []
for step in current_match_traversal:
new_step = step
# If the root_block is a Backtrack, translate its Location if necessary.
if isinstance(new_step.root_block, Backtrack):
old_location = new_step.root_block.location
if old_location in location_translations:
new_location = location_translations[old_location]
new_step = new_step._replace(root_block=Backtrack(new_location))
# If the as_block exists, translate its Location if necessary.
if new_step.as_block is not None:
old_location = new_step.as_block.location
if old_location in location_translations:
new_location = location_translations[old_location]
new_step = new_step._replace(as_block=MarkLocation(new_location))
# If the where_block exists, update any Location objects in its predicate.
if new_step.where_block is not None:
new_where_block = new_step.where_block.visit_and_update_expressions(visitor_fn)
new_step = new_step._replace(where_block=new_where_block)
new_traversal.append(new_step)
new_match_traversals.append(new_traversal)
new_folds = {}
# Update the Location within each FoldScopeLocation
for fold_scope_location, fold_ir_blocks in six.iteritems(match_query.folds):
fold_path = fold_scope_location.fold_path
fold_field = fold_scope_location.field
old_location = fold_scope_location.base_location
new_location = location_translations.get(old_location, old_location)
new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field)
new_folds[new_fold_scope_location] = fold_ir_blocks
# Rewrite the Locations in the ConstructResult output block.
new_output_block = match_query.output_block.visit_and_update_expressions(visitor_fn)
# Rewrite the Locations in the global where block.
new_where_block = None
if match_query.where_block is not None:
new_where_block = match_query.where_block.visit_and_update_expressions(visitor_fn)
return match_query._replace(match_traversals=new_match_traversals, folds=new_folds,
output_block=new_output_block, where_block=new_where_block) | python | def _translate_equivalent_locations(match_query, location_translations):
"""Translate Location objects into their equivalent locations, based on the given dict."""
new_match_traversals = []
def visitor_fn(expression):
"""Expression visitor function used to rewrite expressions with updated Location data."""
if isinstance(expression, (ContextField, GlobalContextField)):
old_location = expression.location.at_vertex()
new_location = location_translations.get(old_location, old_location)
if expression.location.field is not None:
new_location = new_location.navigate_to_field(expression.location.field)
# The Expression could be one of many types, including:
# - ContextField
# - GlobalContextField
# We determine its exact class to make sure we return an object of the same class
# as the expression being replaced.
expression_cls = type(expression)
return expression_cls(new_location, expression.field_type)
elif isinstance(expression, ContextFieldExistence):
old_location = expression.location
new_location = location_translations.get(old_location, old_location)
return ContextFieldExistence(new_location)
elif isinstance(expression, FoldedContextField):
# Update the Location within FoldedContextField
old_location = expression.fold_scope_location.base_location
new_location = location_translations.get(old_location, old_location)
fold_path = expression.fold_scope_location.fold_path
fold_field = expression.fold_scope_location.field
new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field)
field_type = expression.field_type
return FoldedContextField(new_fold_scope_location, field_type)
else:
return expression
# Rewrite the Locations in the steps of each MATCH traversal.
for current_match_traversal in match_query.match_traversals:
new_traversal = []
for step in current_match_traversal:
new_step = step
# If the root_block is a Backtrack, translate its Location if necessary.
if isinstance(new_step.root_block, Backtrack):
old_location = new_step.root_block.location
if old_location in location_translations:
new_location = location_translations[old_location]
new_step = new_step._replace(root_block=Backtrack(new_location))
# If the as_block exists, translate its Location if necessary.
if new_step.as_block is not None:
old_location = new_step.as_block.location
if old_location in location_translations:
new_location = location_translations[old_location]
new_step = new_step._replace(as_block=MarkLocation(new_location))
# If the where_block exists, update any Location objects in its predicate.
if new_step.where_block is not None:
new_where_block = new_step.where_block.visit_and_update_expressions(visitor_fn)
new_step = new_step._replace(where_block=new_where_block)
new_traversal.append(new_step)
new_match_traversals.append(new_traversal)
new_folds = {}
# Update the Location within each FoldScopeLocation
for fold_scope_location, fold_ir_blocks in six.iteritems(match_query.folds):
fold_path = fold_scope_location.fold_path
fold_field = fold_scope_location.field
old_location = fold_scope_location.base_location
new_location = location_translations.get(old_location, old_location)
new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field)
new_folds[new_fold_scope_location] = fold_ir_blocks
# Rewrite the Locations in the ConstructResult output block.
new_output_block = match_query.output_block.visit_and_update_expressions(visitor_fn)
# Rewrite the Locations in the global where block.
new_where_block = None
if match_query.where_block is not None:
new_where_block = match_query.where_block.visit_and_update_expressions(visitor_fn)
return match_query._replace(match_traversals=new_match_traversals, folds=new_folds,
output_block=new_output_block, where_block=new_where_block) | [
"def",
"_translate_equivalent_locations",
"(",
"match_query",
",",
"location_translations",
")",
":",
"new_match_traversals",
"=",
"[",
"]",
"def",
"visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Expression visitor function used to rewrite expressions with updated Location data... | Translate Location objects into their equivalent locations, based on the given dict. | [
"Translate",
"Location",
"objects",
"into",
"their",
"equivalent",
"locations",
"based",
"on",
"the",
"given",
"dict",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L251-L338 | train | 227,934 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/ir_lowering.py | lower_folded_coerce_types_into_filter_blocks | def lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks):
"""Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks."""
new_folded_ir_blocks = []
for block in folded_ir_blocks:
if isinstance(block, CoerceType):
new_block = convert_coerce_type_to_instanceof_filter(block)
else:
new_block = block
new_folded_ir_blocks.append(new_block)
return new_folded_ir_blocks | python | def lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks):
"""Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks."""
new_folded_ir_blocks = []
for block in folded_ir_blocks:
if isinstance(block, CoerceType):
new_block = convert_coerce_type_to_instanceof_filter(block)
else:
new_block = block
new_folded_ir_blocks.append(new_block)
return new_folded_ir_blocks | [
"def",
"lower_folded_coerce_types_into_filter_blocks",
"(",
"folded_ir_blocks",
")",
":",
"new_folded_ir_blocks",
"=",
"[",
"]",
"for",
"block",
"in",
"folded_ir_blocks",
":",
"if",
"isinstance",
"(",
"block",
",",
"CoerceType",
")",
":",
"new_block",
"=",
"convert_... | Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks. | [
"Lower",
"CoerceType",
"blocks",
"into",
"INSTANCEOF",
"Filter",
"blocks",
".",
"Indended",
"for",
"folded",
"IR",
"blocks",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L341-L352 | train | 227,935 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/ir_lowering.py | remove_backtrack_blocks_from_fold | def remove_backtrack_blocks_from_fold(folded_ir_blocks):
"""Return a list of IR blocks with all Backtrack blocks removed."""
new_folded_ir_blocks = []
for block in folded_ir_blocks:
if not isinstance(block, Backtrack):
new_folded_ir_blocks.append(block)
return new_folded_ir_blocks | python | def remove_backtrack_blocks_from_fold(folded_ir_blocks):
"""Return a list of IR blocks with all Backtrack blocks removed."""
new_folded_ir_blocks = []
for block in folded_ir_blocks:
if not isinstance(block, Backtrack):
new_folded_ir_blocks.append(block)
return new_folded_ir_blocks | [
"def",
"remove_backtrack_blocks_from_fold",
"(",
"folded_ir_blocks",
")",
":",
"new_folded_ir_blocks",
"=",
"[",
"]",
"for",
"block",
"in",
"folded_ir_blocks",
":",
"if",
"not",
"isinstance",
"(",
"block",
",",
"Backtrack",
")",
":",
"new_folded_ir_blocks",
".",
"... | Return a list of IR blocks with all Backtrack blocks removed. | [
"Return",
"a",
"list",
"of",
"IR",
"blocks",
"with",
"all",
"Backtrack",
"blocks",
"removed",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L355-L361 | train | 227,936 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/ir_lowering.py | truncate_repeated_single_step_traversals_in_sub_queries | def truncate_repeated_single_step_traversals_in_sub_queries(compound_match_query):
"""For each sub-query, remove one-step traversals that overlap a previous traversal location."""
lowered_match_queries = []
for match_query in compound_match_query.match_queries:
new_match_query = truncate_repeated_single_step_traversals(match_query)
lowered_match_queries.append(new_match_query)
return compound_match_query._replace(match_queries=lowered_match_queries) | python | def truncate_repeated_single_step_traversals_in_sub_queries(compound_match_query):
"""For each sub-query, remove one-step traversals that overlap a previous traversal location."""
lowered_match_queries = []
for match_query in compound_match_query.match_queries:
new_match_query = truncate_repeated_single_step_traversals(match_query)
lowered_match_queries.append(new_match_query)
return compound_match_query._replace(match_queries=lowered_match_queries) | [
"def",
"truncate_repeated_single_step_traversals_in_sub_queries",
"(",
"compound_match_query",
")",
":",
"lowered_match_queries",
"=",
"[",
"]",
"for",
"match_query",
"in",
"compound_match_query",
".",
"match_queries",
":",
"new_match_query",
"=",
"truncate_repeated_single_step... | For each sub-query, remove one-step traversals that overlap a previous traversal location. | [
"For",
"each",
"sub",
"-",
"query",
"remove",
"one",
"-",
"step",
"traversals",
"that",
"overlap",
"a",
"previous",
"traversal",
"location",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L364-L371 | train | 227,937 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _prune_traverse_using_omitted_locations | def _prune_traverse_using_omitted_locations(match_traversal, omitted_locations,
complex_optional_roots, location_to_optional_roots):
"""Return a prefix of the given traverse, excluding any blocks after an omitted optional.
Given a subset (omitted_locations) of complex_optional_roots, return a new match traversal
removing all MatchStep objects that are within any omitted location.
Args:
match_traversal: list of MatchStep objects to be pruned
omitted_locations: subset of complex_optional_roots to be omitted
complex_optional_roots: list of all @optional locations (location immmediately preceding
an @optional traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
list of MatchStep objects as a copy of the given match traversal
with all steps within any omitted location removed.
"""
new_match_traversal = []
for step in match_traversal:
new_step = step
if isinstance(step.root_block, Traverse) and step.root_block.optional:
current_location = step.as_block.location
optional_root_locations_stack = location_to_optional_roots.get(current_location, None)
optional_root_location = optional_root_locations_stack[-1]
if optional_root_location is None:
raise AssertionError(u'Found optional Traverse location {} that was not present '
u'in location_to_optional_roots dict: {}'
.format(current_location, location_to_optional_roots))
elif optional_root_location in omitted_locations:
# Add filter to indicate that the omitted edge(s) shoud not exist
field_name = step.root_block.get_field_name()
new_predicate = filter_edge_field_non_existence(LocalField(field_name))
old_filter = new_match_traversal[-1].where_block
if old_filter is not None:
new_predicate = BinaryComposition(u'&&', old_filter.predicate, new_predicate)
new_match_step = new_match_traversal[-1]._replace(
where_block=Filter(new_predicate))
new_match_traversal[-1] = new_match_step
# Discard all steps following the omitted @optional traverse
new_step = None
elif optional_root_location in complex_optional_roots:
# Any non-omitted @optional traverse (that expands vertex fields)
# becomes a normal mandatory traverse (discard the optional flag).
new_root_block = Traverse(step.root_block.direction, step.root_block.edge_name)
new_step = step._replace(root_block=new_root_block)
else:
# The current optional traverse is a "simple optional" (one that does not
# expand vertex fields). No further action is required since MATCH supports it.
pass
# If new_step was set to None,
# we have encountered a Traverse that is within an omitted location.
# We discard the remainder of the match traversal (everything following is also omitted).
if new_step is None:
break
else:
new_match_traversal.append(new_step)
return new_match_traversal | python | def _prune_traverse_using_omitted_locations(match_traversal, omitted_locations,
complex_optional_roots, location_to_optional_roots):
"""Return a prefix of the given traverse, excluding any blocks after an omitted optional.
Given a subset (omitted_locations) of complex_optional_roots, return a new match traversal
removing all MatchStep objects that are within any omitted location.
Args:
match_traversal: list of MatchStep objects to be pruned
omitted_locations: subset of complex_optional_roots to be omitted
complex_optional_roots: list of all @optional locations (location immmediately preceding
an @optional traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
list of MatchStep objects as a copy of the given match traversal
with all steps within any omitted location removed.
"""
new_match_traversal = []
for step in match_traversal:
new_step = step
if isinstance(step.root_block, Traverse) and step.root_block.optional:
current_location = step.as_block.location
optional_root_locations_stack = location_to_optional_roots.get(current_location, None)
optional_root_location = optional_root_locations_stack[-1]
if optional_root_location is None:
raise AssertionError(u'Found optional Traverse location {} that was not present '
u'in location_to_optional_roots dict: {}'
.format(current_location, location_to_optional_roots))
elif optional_root_location in omitted_locations:
# Add filter to indicate that the omitted edge(s) shoud not exist
field_name = step.root_block.get_field_name()
new_predicate = filter_edge_field_non_existence(LocalField(field_name))
old_filter = new_match_traversal[-1].where_block
if old_filter is not None:
new_predicate = BinaryComposition(u'&&', old_filter.predicate, new_predicate)
new_match_step = new_match_traversal[-1]._replace(
where_block=Filter(new_predicate))
new_match_traversal[-1] = new_match_step
# Discard all steps following the omitted @optional traverse
new_step = None
elif optional_root_location in complex_optional_roots:
# Any non-omitted @optional traverse (that expands vertex fields)
# becomes a normal mandatory traverse (discard the optional flag).
new_root_block = Traverse(step.root_block.direction, step.root_block.edge_name)
new_step = step._replace(root_block=new_root_block)
else:
# The current optional traverse is a "simple optional" (one that does not
# expand vertex fields). No further action is required since MATCH supports it.
pass
# If new_step was set to None,
# we have encountered a Traverse that is within an omitted location.
# We discard the remainder of the match traversal (everything following is also omitted).
if new_step is None:
break
else:
new_match_traversal.append(new_step)
return new_match_traversal | [
"def",
"_prune_traverse_using_omitted_locations",
"(",
"match_traversal",
",",
"omitted_locations",
",",
"complex_optional_roots",
",",
"location_to_optional_roots",
")",
":",
"new_match_traversal",
"=",
"[",
"]",
"for",
"step",
"in",
"match_traversal",
":",
"new_step",
"... | Return a prefix of the given traverse, excluding any blocks after an omitted optional.
Given a subset (omitted_locations) of complex_optional_roots, return a new match traversal
removing all MatchStep objects that are within any omitted location.
Args:
match_traversal: list of MatchStep objects to be pruned
omitted_locations: subset of complex_optional_roots to be omitted
complex_optional_roots: list of all @optional locations (location immmediately preceding
an @optional traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
list of MatchStep objects as a copy of the given match traversal
with all steps within any omitted location removed. | [
"Return",
"a",
"prefix",
"of",
"the",
"given",
"traverse",
"excluding",
"any",
"blocks",
"after",
"an",
"omitted",
"optional",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L18-L82 | train | 227,938 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | convert_optional_traversals_to_compound_match_query | def convert_optional_traversals_to_compound_match_query(
match_query, complex_optional_roots, location_to_optional_roots):
"""Return 2^n distinct MatchQuery objects in a CompoundMatchQuery.
Given a MatchQuery containing `n` optional traverses that expand vertex fields,
construct `2^n` different MatchQuery objects:
one for each possible subset of optional edges that can be followed.
For each edge `e` in a subset of optional edges chosen to be omitted,
discard all traversals following `e`, and add filters specifying that `e` *does not exist*.
Args:
match_query: MatchQuery object containing n `@optional` scopes which expand vertex fields
complex_optional_roots: list of @optional locations (location preceding an @optional
traverse) that expand vertex fields within
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
CompoundMatchQuery object containing 2^n MatchQuery objects,
one for each possible subset of the n optional edges being followed
"""
tree = construct_optional_traversal_tree(
complex_optional_roots, location_to_optional_roots)
rooted_optional_root_location_subsets = tree.get_all_rooted_subtrees_as_lists()
omitted_location_subsets = [
set(complex_optional_roots) - set(subset)
for subset in rooted_optional_root_location_subsets
]
sorted_omitted_location_subsets = sorted(omitted_location_subsets)
compound_match_traversals = []
for omitted_locations in reversed(sorted_omitted_location_subsets):
new_match_traversals = []
for match_traversal in match_query.match_traversals:
location = match_traversal[0].as_block.location
optional_root_locations_stack = location_to_optional_roots.get(location, None)
if optional_root_locations_stack is not None:
optional_root_location = optional_root_locations_stack[-1]
else:
optional_root_location = None
if optional_root_location is None or optional_root_location not in omitted_locations:
new_match_traversal = _prune_traverse_using_omitted_locations(
match_traversal, set(omitted_locations),
complex_optional_roots, location_to_optional_roots)
new_match_traversals.append(new_match_traversal)
else:
# The root_block is within an omitted scope.
# Discard the entire match traversal (do not append to new_match_traversals)
pass
compound_match_traversals.append(new_match_traversals)
match_queries = [
MatchQuery(
match_traversals=match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
for match_traversals in compound_match_traversals
]
return CompoundMatchQuery(match_queries=match_queries) | python | def convert_optional_traversals_to_compound_match_query(
match_query, complex_optional_roots, location_to_optional_roots):
"""Return 2^n distinct MatchQuery objects in a CompoundMatchQuery.
Given a MatchQuery containing `n` optional traverses that expand vertex fields,
construct `2^n` different MatchQuery objects:
one for each possible subset of optional edges that can be followed.
For each edge `e` in a subset of optional edges chosen to be omitted,
discard all traversals following `e`, and add filters specifying that `e` *does not exist*.
Args:
match_query: MatchQuery object containing n `@optional` scopes which expand vertex fields
complex_optional_roots: list of @optional locations (location preceding an @optional
traverse) that expand vertex fields within
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
CompoundMatchQuery object containing 2^n MatchQuery objects,
one for each possible subset of the n optional edges being followed
"""
tree = construct_optional_traversal_tree(
complex_optional_roots, location_to_optional_roots)
rooted_optional_root_location_subsets = tree.get_all_rooted_subtrees_as_lists()
omitted_location_subsets = [
set(complex_optional_roots) - set(subset)
for subset in rooted_optional_root_location_subsets
]
sorted_omitted_location_subsets = sorted(omitted_location_subsets)
compound_match_traversals = []
for omitted_locations in reversed(sorted_omitted_location_subsets):
new_match_traversals = []
for match_traversal in match_query.match_traversals:
location = match_traversal[0].as_block.location
optional_root_locations_stack = location_to_optional_roots.get(location, None)
if optional_root_locations_stack is not None:
optional_root_location = optional_root_locations_stack[-1]
else:
optional_root_location = None
if optional_root_location is None or optional_root_location not in omitted_locations:
new_match_traversal = _prune_traverse_using_omitted_locations(
match_traversal, set(omitted_locations),
complex_optional_roots, location_to_optional_roots)
new_match_traversals.append(new_match_traversal)
else:
# The root_block is within an omitted scope.
# Discard the entire match traversal (do not append to new_match_traversals)
pass
compound_match_traversals.append(new_match_traversals)
match_queries = [
MatchQuery(
match_traversals=match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
for match_traversals in compound_match_traversals
]
return CompoundMatchQuery(match_queries=match_queries) | [
"def",
"convert_optional_traversals_to_compound_match_query",
"(",
"match_query",
",",
"complex_optional_roots",
",",
"location_to_optional_roots",
")",
":",
"tree",
"=",
"construct_optional_traversal_tree",
"(",
"complex_optional_roots",
",",
"location_to_optional_roots",
")",
"... | Return 2^n distinct MatchQuery objects in a CompoundMatchQuery.
Given a MatchQuery containing `n` optional traverses that expand vertex fields,
construct `2^n` different MatchQuery objects:
one for each possible subset of optional edges that can be followed.
For each edge `e` in a subset of optional edges chosen to be omitted,
discard all traversals following `e`, and add filters specifying that `e` *does not exist*.
Args:
match_query: MatchQuery object containing n `@optional` scopes which expand vertex fields
complex_optional_roots: list of @optional locations (location preceding an @optional
traverse) that expand vertex fields within
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
CompoundMatchQuery object containing 2^n MatchQuery objects,
one for each possible subset of the n optional edges being followed | [
"Return",
"2^n",
"distinct",
"MatchQuery",
"objects",
"in",
"a",
"CompoundMatchQuery",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L85-L151 | train | 227,939 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _get_present_locations | def _get_present_locations(match_traversals):
"""Return the set of locations and non-optional locations present in the given match traversals.
When enumerating the possibilities for optional traversals,
the resulting match traversals may have sections of the query omitted.
These locations will not be included in the returned `present_locations`.
All of the above locations that are not optional traverse locations
will be included in present_non_optional_locations.
Args:
match_traversals: one possible list of match traversals generated from a query
containing @optional traversal(s)
Returns:
tuple (present_locations, present_non_optional_locations):
- present_locations: set of all locations present in the given match traversals
- present_non_optional_locations: set of all locations present in the match traversals
that are not reached through optional traverses.
Guaranteed to be a subset of present_locations.
"""
present_locations = set()
present_non_optional_locations = set()
for match_traversal in match_traversals:
for step in match_traversal:
if step.as_block is not None:
location_name, _ = step.as_block.location.get_location_name()
present_locations.add(location_name)
if isinstance(step.root_block, Traverse) and not step.root_block.optional:
present_non_optional_locations.add(location_name)
if not present_non_optional_locations.issubset(present_locations):
raise AssertionError(u'present_non_optional_locations {} was not a subset of '
u'present_locations {}. THis hould never happen.'
.format(present_non_optional_locations, present_locations))
return present_locations, present_non_optional_locations | python | def _get_present_locations(match_traversals):
"""Return the set of locations and non-optional locations present in the given match traversals.
When enumerating the possibilities for optional traversals,
the resulting match traversals may have sections of the query omitted.
These locations will not be included in the returned `present_locations`.
All of the above locations that are not optional traverse locations
will be included in present_non_optional_locations.
Args:
match_traversals: one possible list of match traversals generated from a query
containing @optional traversal(s)
Returns:
tuple (present_locations, present_non_optional_locations):
- present_locations: set of all locations present in the given match traversals
- present_non_optional_locations: set of all locations present in the match traversals
that are not reached through optional traverses.
Guaranteed to be a subset of present_locations.
"""
present_locations = set()
present_non_optional_locations = set()
for match_traversal in match_traversals:
for step in match_traversal:
if step.as_block is not None:
location_name, _ = step.as_block.location.get_location_name()
present_locations.add(location_name)
if isinstance(step.root_block, Traverse) and not step.root_block.optional:
present_non_optional_locations.add(location_name)
if not present_non_optional_locations.issubset(present_locations):
raise AssertionError(u'present_non_optional_locations {} was not a subset of '
u'present_locations {}. THis hould never happen.'
.format(present_non_optional_locations, present_locations))
return present_locations, present_non_optional_locations | [
"def",
"_get_present_locations",
"(",
"match_traversals",
")",
":",
"present_locations",
"=",
"set",
"(",
")",
"present_non_optional_locations",
"=",
"set",
"(",
")",
"for",
"match_traversal",
"in",
"match_traversals",
":",
"for",
"step",
"in",
"match_traversal",
":... | Return the set of locations and non-optional locations present in the given match traversals.
When enumerating the possibilities for optional traversals,
the resulting match traversals may have sections of the query omitted.
These locations will not be included in the returned `present_locations`.
All of the above locations that are not optional traverse locations
will be included in present_non_optional_locations.
Args:
match_traversals: one possible list of match traversals generated from a query
containing @optional traversal(s)
Returns:
tuple (present_locations, present_non_optional_locations):
- present_locations: set of all locations present in the given match traversals
- present_non_optional_locations: set of all locations present in the match traversals
that are not reached through optional traverses.
Guaranteed to be a subset of present_locations. | [
"Return",
"the",
"set",
"of",
"locations",
"and",
"non",
"-",
"optional",
"locations",
"present",
"in",
"the",
"given",
"match",
"traversals",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L154-L190 | train | 227,940 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | prune_non_existent_outputs | def prune_non_existent_outputs(compound_match_query):
"""Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery.
Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks,
For each of these, remove the outputs (that have been implicitly pruned away) from each
corresponding ConstructResult block.
Args:
compound_match_query: CompoundMatchQuery object containing 2^n pruned MatchQuery objects
(see convert_optional_traversals_to_compound_match_query)
Returns:
CompoundMatchQuery with pruned ConstructResult blocks for each of the 2^n MatchQuery objects
"""
if len(compound_match_query.match_queries) == 1:
return compound_match_query
elif len(compound_match_query.match_queries) == 0:
raise AssertionError(u'Received CompoundMatchQuery with '
u'an empty list of MatchQuery objects.')
else:
match_queries = []
for match_query in compound_match_query.match_queries:
match_traversals = match_query.match_traversals
output_block = match_query.output_block
present_locations_tuple = _get_present_locations(match_traversals)
present_locations, present_non_optional_locations = present_locations_tuple
new_output_fields = {}
for output_name, expression in six.iteritems(output_block.fields):
if isinstance(expression, OutputContextField):
# An OutputContextField as an output Expression indicates that we are not
# within an @optional scope. Therefore, the location this output uses must
# be in present_locations, and the output is never pruned.
location_name, _ = expression.location.get_location_name()
if location_name not in present_locations:
raise AssertionError(u'Non-optional output location {} was not found in '
u'present_locations: {}'
.format(expression.location, present_locations))
new_output_fields[output_name] = expression
elif isinstance(expression, FoldedContextField):
# A FoldedContextField as an output Expression indicates that we are not
# within an @optional scope. Therefore, the location this output uses must
# be in present_locations, and the output is never pruned.
base_location = expression.fold_scope_location.base_location
location_name, _ = base_location.get_location_name()
if location_name not in present_locations:
raise AssertionError(u'Folded output location {} was found in '
u'present_locations: {}'
.format(base_location, present_locations))
new_output_fields[output_name] = expression
elif isinstance(expression, TernaryConditional):
# A TernaryConditional indicates that this output is within some optional scope.
# This may be pruned away based on the contents of present_locations.
location_name, _ = expression.if_true.location.get_location_name()
if location_name in present_locations:
if location_name in present_non_optional_locations:
new_output_fields[output_name] = expression.if_true
else:
new_output_fields[output_name] = expression
else:
raise AssertionError(u'Invalid expression of type {} in output block: '
u'{}'.format(type(expression).__name__, output_block))
match_queries.append(
MatchQuery(
match_traversals=match_traversals,
folds=match_query.folds,
output_block=ConstructResult(new_output_fields),
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=match_queries) | python | def prune_non_existent_outputs(compound_match_query):
"""Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery.
Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks,
For each of these, remove the outputs (that have been implicitly pruned away) from each
corresponding ConstructResult block.
Args:
compound_match_query: CompoundMatchQuery object containing 2^n pruned MatchQuery objects
(see convert_optional_traversals_to_compound_match_query)
Returns:
CompoundMatchQuery with pruned ConstructResult blocks for each of the 2^n MatchQuery objects
"""
if len(compound_match_query.match_queries) == 1:
return compound_match_query
elif len(compound_match_query.match_queries) == 0:
raise AssertionError(u'Received CompoundMatchQuery with '
u'an empty list of MatchQuery objects.')
else:
match_queries = []
for match_query in compound_match_query.match_queries:
match_traversals = match_query.match_traversals
output_block = match_query.output_block
present_locations_tuple = _get_present_locations(match_traversals)
present_locations, present_non_optional_locations = present_locations_tuple
new_output_fields = {}
for output_name, expression in six.iteritems(output_block.fields):
if isinstance(expression, OutputContextField):
# An OutputContextField as an output Expression indicates that we are not
# within an @optional scope. Therefore, the location this output uses must
# be in present_locations, and the output is never pruned.
location_name, _ = expression.location.get_location_name()
if location_name not in present_locations:
raise AssertionError(u'Non-optional output location {} was not found in '
u'present_locations: {}'
.format(expression.location, present_locations))
new_output_fields[output_name] = expression
elif isinstance(expression, FoldedContextField):
# A FoldedContextField as an output Expression indicates that we are not
# within an @optional scope. Therefore, the location this output uses must
# be in present_locations, and the output is never pruned.
base_location = expression.fold_scope_location.base_location
location_name, _ = base_location.get_location_name()
if location_name not in present_locations:
raise AssertionError(u'Folded output location {} was found in '
u'present_locations: {}'
.format(base_location, present_locations))
new_output_fields[output_name] = expression
elif isinstance(expression, TernaryConditional):
# A TernaryConditional indicates that this output is within some optional scope.
# This may be pruned away based on the contents of present_locations.
location_name, _ = expression.if_true.location.get_location_name()
if location_name in present_locations:
if location_name in present_non_optional_locations:
new_output_fields[output_name] = expression.if_true
else:
new_output_fields[output_name] = expression
else:
raise AssertionError(u'Invalid expression of type {} in output block: '
u'{}'.format(type(expression).__name__, output_block))
match_queries.append(
MatchQuery(
match_traversals=match_traversals,
folds=match_query.folds,
output_block=ConstructResult(new_output_fields),
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=match_queries) | [
"def",
"prune_non_existent_outputs",
"(",
"compound_match_query",
")",
":",
"if",
"len",
"(",
"compound_match_query",
".",
"match_queries",
")",
"==",
"1",
":",
"return",
"compound_match_query",
"elif",
"len",
"(",
"compound_match_query",
".",
"match_queries",
")",
... | Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery.
Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks,
For each of these, remove the outputs (that have been implicitly pruned away) from each
corresponding ConstructResult block.
Args:
compound_match_query: CompoundMatchQuery object containing 2^n pruned MatchQuery objects
(see convert_optional_traversals_to_compound_match_query)
Returns:
CompoundMatchQuery with pruned ConstructResult blocks for each of the 2^n MatchQuery objects | [
"Remove",
"non",
"-",
"existent",
"outputs",
"from",
"each",
"MatchQuery",
"in",
"the",
"given",
"CompoundMatchQuery",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L193-L266 | train | 227,941 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _construct_location_to_filter_list | def _construct_location_to_filter_list(match_query):
"""Return a dict mapping location -> list of filters applied at that location.
Args:
match_query: MatchQuery object from which to extract location -> filters dict
Returns:
dict mapping each location in match_query to a list of
Filter objects applied at that location
"""
# For each location, all filters for that location should be applied at the first instance.
# This function collects a list of all filters corresponding to each location
# present in the given MatchQuery.
location_to_filters = {}
for match_traversal in match_query.match_traversals:
for match_step in match_traversal:
current_filter = match_step.where_block
if current_filter is not None:
current_location = match_step.as_block.location
location_to_filters.setdefault(current_location, []).append(
current_filter)
return location_to_filters | python | def _construct_location_to_filter_list(match_query):
"""Return a dict mapping location -> list of filters applied at that location.
Args:
match_query: MatchQuery object from which to extract location -> filters dict
Returns:
dict mapping each location in match_query to a list of
Filter objects applied at that location
"""
# For each location, all filters for that location should be applied at the first instance.
# This function collects a list of all filters corresponding to each location
# present in the given MatchQuery.
location_to_filters = {}
for match_traversal in match_query.match_traversals:
for match_step in match_traversal:
current_filter = match_step.where_block
if current_filter is not None:
current_location = match_step.as_block.location
location_to_filters.setdefault(current_location, []).append(
current_filter)
return location_to_filters | [
"def",
"_construct_location_to_filter_list",
"(",
"match_query",
")",
":",
"# For each location, all filters for that location should be applied at the first instance.",
"# This function collects a list of all filters corresponding to each location",
"# present in the given MatchQuery.",
"location... | Return a dict mapping location -> list of filters applied at that location.
Args:
match_query: MatchQuery object from which to extract location -> filters dict
Returns:
dict mapping each location in match_query to a list of
Filter objects applied at that location | [
"Return",
"a",
"dict",
"mapping",
"location",
"-",
">",
"list",
"of",
"filters",
"applied",
"at",
"that",
"location",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L269-L291 | train | 227,942 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _filter_list_to_conjunction_expression | def _filter_list_to_conjunction_expression(filter_list):
"""Convert a list of filters to an Expression that is the conjunction of all of them."""
if not isinstance(filter_list, list):
raise AssertionError(u'Expected `list`, Received: {}.'.format(filter_list))
if any((not isinstance(filter_block, Filter) for filter_block in filter_list)):
raise AssertionError(u'Expected list of Filter objects. Received: {}'.format(filter_list))
expression_list = [filter_block.predicate for filter_block in filter_list]
return expression_list_to_conjunction(expression_list) | python | def _filter_list_to_conjunction_expression(filter_list):
"""Convert a list of filters to an Expression that is the conjunction of all of them."""
if not isinstance(filter_list, list):
raise AssertionError(u'Expected `list`, Received: {}.'.format(filter_list))
if any((not isinstance(filter_block, Filter) for filter_block in filter_list)):
raise AssertionError(u'Expected list of Filter objects. Received: {}'.format(filter_list))
expression_list = [filter_block.predicate for filter_block in filter_list]
return expression_list_to_conjunction(expression_list) | [
"def",
"_filter_list_to_conjunction_expression",
"(",
"filter_list",
")",
":",
"if",
"not",
"isinstance",
"(",
"filter_list",
",",
"list",
")",
":",
"raise",
"AssertionError",
"(",
"u'Expected `list`, Received: {}.'",
".",
"format",
"(",
"filter_list",
")",
")",
"if... | Convert a list of filters to an Expression that is the conjunction of all of them. | [
"Convert",
"a",
"list",
"of",
"filters",
"to",
"an",
"Expression",
"that",
"is",
"the",
"conjunction",
"of",
"all",
"of",
"them",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L294-L302 | train | 227,943 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _apply_filters_to_first_location_occurrence | def _apply_filters_to_first_location_occurrence(match_traversal, location_to_filters,
already_filtered_locations):
"""Apply all filters for a specific location into its first occurrence in a given traversal.
For each location in the given match traversal,
construct a conjunction of all filters applied to that location,
and apply the resulting Filter to the first instance of the location.
Args:
match_traversal: list of MatchStep objects to be lowered
location_to_filters: dict mapping each location in the MatchQuery which contains
the given match traversal to a list of filters applied at that location
already_filtered_locations: set of locations that have already had their filters applied
Returns:
new list of MatchStep objects with all filters for any given location composed into
a single filter which is applied to the first instance of that location
"""
new_match_traversal = []
newly_filtered_locations = set()
for match_step in match_traversal:
# Apply all filters for a location to the first occurence of that location
current_location = match_step.as_block.location
if current_location in newly_filtered_locations:
raise AssertionError(u'The same location {} was encountered twice in a single '
u'match traversal: {}. This should never happen.'
.format(current_location, match_traversal))
if all((current_location in location_to_filters,
current_location not in already_filtered_locations)):
where_block = Filter(
_filter_list_to_conjunction_expression(
location_to_filters[current_location]
)
)
# No further filters needed for this location. If the same location is found in
# another call to this function, no filters will be added.
newly_filtered_locations.add(current_location)
else:
where_block = None
new_match_step = MatchStep(
root_block=match_step.root_block,
coerce_type_block=match_step.coerce_type_block,
where_block=where_block,
as_block=match_step.as_block
)
new_match_traversal.append(new_match_step)
return new_match_traversal, newly_filtered_locations | python | def _apply_filters_to_first_location_occurrence(match_traversal, location_to_filters,
already_filtered_locations):
"""Apply all filters for a specific location into its first occurrence in a given traversal.
For each location in the given match traversal,
construct a conjunction of all filters applied to that location,
and apply the resulting Filter to the first instance of the location.
Args:
match_traversal: list of MatchStep objects to be lowered
location_to_filters: dict mapping each location in the MatchQuery which contains
the given match traversal to a list of filters applied at that location
already_filtered_locations: set of locations that have already had their filters applied
Returns:
new list of MatchStep objects with all filters for any given location composed into
a single filter which is applied to the first instance of that location
"""
new_match_traversal = []
newly_filtered_locations = set()
for match_step in match_traversal:
# Apply all filters for a location to the first occurence of that location
current_location = match_step.as_block.location
if current_location in newly_filtered_locations:
raise AssertionError(u'The same location {} was encountered twice in a single '
u'match traversal: {}. This should never happen.'
.format(current_location, match_traversal))
if all((current_location in location_to_filters,
current_location not in already_filtered_locations)):
where_block = Filter(
_filter_list_to_conjunction_expression(
location_to_filters[current_location]
)
)
# No further filters needed for this location. If the same location is found in
# another call to this function, no filters will be added.
newly_filtered_locations.add(current_location)
else:
where_block = None
new_match_step = MatchStep(
root_block=match_step.root_block,
coerce_type_block=match_step.coerce_type_block,
where_block=where_block,
as_block=match_step.as_block
)
new_match_traversal.append(new_match_step)
return new_match_traversal, newly_filtered_locations | [
"def",
"_apply_filters_to_first_location_occurrence",
"(",
"match_traversal",
",",
"location_to_filters",
",",
"already_filtered_locations",
")",
":",
"new_match_traversal",
"=",
"[",
"]",
"newly_filtered_locations",
"=",
"set",
"(",
")",
"for",
"match_step",
"in",
"match... | Apply all filters for a specific location into its first occurrence in a given traversal.
For each location in the given match traversal,
construct a conjunction of all filters applied to that location,
and apply the resulting Filter to the first instance of the location.
Args:
match_traversal: list of MatchStep objects to be lowered
location_to_filters: dict mapping each location in the MatchQuery which contains
the given match traversal to a list of filters applied at that location
already_filtered_locations: set of locations that have already had their filters applied
Returns:
new list of MatchStep objects with all filters for any given location composed into
a single filter which is applied to the first instance of that location | [
"Apply",
"all",
"filters",
"for",
"a",
"specific",
"location",
"into",
"its",
"first",
"occurrence",
"in",
"a",
"given",
"traversal",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L305-L355 | train | 227,944 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | collect_filters_to_first_location_occurrence | def collect_filters_to_first_location_occurrence(compound_match_query):
"""Collect all filters for a particular location to the first instance of the location.
Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may
result in filters being applied to locations after their first occurence.
OrientDB does not resolve this behavior correctly. Therefore, for each MatchQuery,
we collect all the filters for each location in a list. For each location,
we make a conjunction of the filter list (`_predicate_list_to_where_block`) and apply
the new filter to only the first instance of that location.
All other instances will have no filters (None).
Args:
compound_match_query: CompoundMatchQuery object containing 2^n MatchQuery objects
Returns:
CompoundMatchQuery with all filters for each location applied to the first instance
of that location.
"""
new_match_queries = []
# Each MatchQuery has a different set of locations, and associated Filters.
# Hence, each of them is processed independently.
for match_query in compound_match_query.match_queries:
# Construct mapping from location -> list of filter predicates applied at that location
location_to_filters = _construct_location_to_filter_list(match_query)
already_filtered_locations = set()
new_match_traversals = []
for match_traversal in match_query.match_traversals:
result = _apply_filters_to_first_location_occurrence(
match_traversal, location_to_filters, already_filtered_locations)
new_match_traversal, newly_filtered_locations = result
new_match_traversals.append(new_match_traversal)
already_filtered_locations.update(newly_filtered_locations)
new_match_queries.append(
MatchQuery(
match_traversals=new_match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=new_match_queries) | python | def collect_filters_to_first_location_occurrence(compound_match_query):
"""Collect all filters for a particular location to the first instance of the location.
Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may
result in filters being applied to locations after their first occurence.
OrientDB does not resolve this behavior correctly. Therefore, for each MatchQuery,
we collect all the filters for each location in a list. For each location,
we make a conjunction of the filter list (`_predicate_list_to_where_block`) and apply
the new filter to only the first instance of that location.
All other instances will have no filters (None).
Args:
compound_match_query: CompoundMatchQuery object containing 2^n MatchQuery objects
Returns:
CompoundMatchQuery with all filters for each location applied to the first instance
of that location.
"""
new_match_queries = []
# Each MatchQuery has a different set of locations, and associated Filters.
# Hence, each of them is processed independently.
for match_query in compound_match_query.match_queries:
# Construct mapping from location -> list of filter predicates applied at that location
location_to_filters = _construct_location_to_filter_list(match_query)
already_filtered_locations = set()
new_match_traversals = []
for match_traversal in match_query.match_traversals:
result = _apply_filters_to_first_location_occurrence(
match_traversal, location_to_filters, already_filtered_locations)
new_match_traversal, newly_filtered_locations = result
new_match_traversals.append(new_match_traversal)
already_filtered_locations.update(newly_filtered_locations)
new_match_queries.append(
MatchQuery(
match_traversals=new_match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=new_match_queries) | [
"def",
"collect_filters_to_first_location_occurrence",
"(",
"compound_match_query",
")",
":",
"new_match_queries",
"=",
"[",
"]",
"# Each MatchQuery has a different set of locations, and associated Filters.",
"# Hence, each of them is processed independently.",
"for",
"match_query",
"in"... | Collect all filters for a particular location to the first instance of the location.
Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may
result in filters being applied to locations after their first occurence.
OrientDB does not resolve this behavior correctly. Therefore, for each MatchQuery,
we collect all the filters for each location in a list. For each location,
we make a conjunction of the filter list (`_predicate_list_to_where_block`) and apply
the new filter to only the first instance of that location.
All other instances will have no filters (None).
Args:
compound_match_query: CompoundMatchQuery object containing 2^n MatchQuery objects
Returns:
CompoundMatchQuery with all filters for each location applied to the first instance
of that location. | [
"Collect",
"all",
"filters",
"for",
"a",
"particular",
"location",
"to",
"the",
"first",
"instance",
"of",
"the",
"location",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L358-L402 | train | 227,945 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _update_context_field_binary_composition | def _update_context_field_binary_composition(present_locations, expression):
"""Lower BinaryCompositions involving non-existent ContextFields to True.
Args:
present_locations: set of all locations in the current MatchQuery that have not been pruned
expression: BinaryComposition with at least one ContextField operand
Returns:
TrueLiteral iff either ContextField operand is not in `present_locations`,
and the original expression otherwise
"""
if not any((isinstance(expression.left, ContextField),
isinstance(expression.right, ContextField))):
raise AssertionError(u'Received a BinaryComposition {} without any ContextField '
u'operands. This should never happen.'.format(expression))
if isinstance(expression.left, ContextField):
context_field = expression.left
location_name, _ = context_field.location.get_location_name()
if location_name not in present_locations:
return TrueLiteral
if isinstance(expression.right, ContextField):
context_field = expression.right
location_name, _ = context_field.location.get_location_name()
if location_name not in present_locations:
return TrueLiteral
return expression | python | def _update_context_field_binary_composition(present_locations, expression):
"""Lower BinaryCompositions involving non-existent ContextFields to True.
Args:
present_locations: set of all locations in the current MatchQuery that have not been pruned
expression: BinaryComposition with at least one ContextField operand
Returns:
TrueLiteral iff either ContextField operand is not in `present_locations`,
and the original expression otherwise
"""
if not any((isinstance(expression.left, ContextField),
isinstance(expression.right, ContextField))):
raise AssertionError(u'Received a BinaryComposition {} without any ContextField '
u'operands. This should never happen.'.format(expression))
if isinstance(expression.left, ContextField):
context_field = expression.left
location_name, _ = context_field.location.get_location_name()
if location_name not in present_locations:
return TrueLiteral
if isinstance(expression.right, ContextField):
context_field = expression.right
location_name, _ = context_field.location.get_location_name()
if location_name not in present_locations:
return TrueLiteral
return expression | [
"def",
"_update_context_field_binary_composition",
"(",
"present_locations",
",",
"expression",
")",
":",
"if",
"not",
"any",
"(",
"(",
"isinstance",
"(",
"expression",
".",
"left",
",",
"ContextField",
")",
",",
"isinstance",
"(",
"expression",
".",
"right",
",... | Lower BinaryCompositions involving non-existent ContextFields to True.
Args:
present_locations: set of all locations in the current MatchQuery that have not been pruned
expression: BinaryComposition with at least one ContextField operand
Returns:
TrueLiteral iff either ContextField operand is not in `present_locations`,
and the original expression otherwise | [
"Lower",
"BinaryCompositions",
"involving",
"non",
"-",
"existent",
"ContextFields",
"to",
"True",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L405-L433 | train | 227,946 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _simplify_non_context_field_binary_composition | def _simplify_non_context_field_binary_composition(expression):
"""Return a simplified BinaryComposition if either operand is a TrueLiteral.
Args:
expression: BinaryComposition without any ContextField operand(s)
Returns:
simplified expression if the given expression is a disjunction/conjunction
and one of it's operands is a TrueLiteral,
and the original expression otherwise
"""
if any((isinstance(expression.left, ContextField),
isinstance(expression.right, ContextField))):
raise AssertionError(u'Received a BinaryComposition {} with a ContextField '
u'operand. This should never happen.'.format(expression))
if expression.operator == u'||':
if expression.left == TrueLiteral or expression.right == TrueLiteral:
return TrueLiteral
else:
return expression
elif expression.operator == u'&&':
if expression.left == TrueLiteral:
return expression.right
if expression.right == TrueLiteral:
return expression.left
else:
return expression
else:
return expression | python | def _simplify_non_context_field_binary_composition(expression):
"""Return a simplified BinaryComposition if either operand is a TrueLiteral.
Args:
expression: BinaryComposition without any ContextField operand(s)
Returns:
simplified expression if the given expression is a disjunction/conjunction
and one of it's operands is a TrueLiteral,
and the original expression otherwise
"""
if any((isinstance(expression.left, ContextField),
isinstance(expression.right, ContextField))):
raise AssertionError(u'Received a BinaryComposition {} with a ContextField '
u'operand. This should never happen.'.format(expression))
if expression.operator == u'||':
if expression.left == TrueLiteral or expression.right == TrueLiteral:
return TrueLiteral
else:
return expression
elif expression.operator == u'&&':
if expression.left == TrueLiteral:
return expression.right
if expression.right == TrueLiteral:
return expression.left
else:
return expression
else:
return expression | [
"def",
"_simplify_non_context_field_binary_composition",
"(",
"expression",
")",
":",
"if",
"any",
"(",
"(",
"isinstance",
"(",
"expression",
".",
"left",
",",
"ContextField",
")",
",",
"isinstance",
"(",
"expression",
".",
"right",
",",
"ContextField",
")",
")"... | Return a simplified BinaryComposition if either operand is a TrueLiteral.
Args:
expression: BinaryComposition without any ContextField operand(s)
Returns:
simplified expression if the given expression is a disjunction/conjunction
and one of it's operands is a TrueLiteral,
and the original expression otherwise | [
"Return",
"a",
"simplified",
"BinaryComposition",
"if",
"either",
"operand",
"is",
"a",
"TrueLiteral",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L436-L465 | train | 227,947 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _update_context_field_expression | def _update_context_field_expression(present_locations, expression):
"""Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result."""
no_op_blocks = (ContextField, Literal, LocalField, UnaryTransformation, Variable)
if isinstance(expression, BinaryComposition):
if isinstance(expression.left, ContextField) or isinstance(expression.right, ContextField):
return _update_context_field_binary_composition(present_locations, expression)
else:
return _simplify_non_context_field_binary_composition(expression)
elif isinstance(expression, TernaryConditional):
return _simplify_ternary_conditional(expression)
elif isinstance(expression, BetweenClause):
lower_bound = expression.lower_bound
upper_bound = expression.upper_bound
if isinstance(lower_bound, ContextField) or isinstance(upper_bound, ContextField):
raise AssertionError(u'Found BetweenClause with ContextFields as lower/upper bounds. '
u'This should never happen: {}'.format(expression))
return expression
elif isinstance(expression, (OutputContextField, FoldedContextField)):
raise AssertionError(u'Found unexpected expression of type {}. This should never happen: '
u'{}'.format(type(expression).__name__, expression))
elif isinstance(expression, no_op_blocks):
return expression
raise AssertionError(u'Found unhandled expression of type {}. This should never happen: '
u'{}'.format(type(expression).__name__, expression)) | python | def _update_context_field_expression(present_locations, expression):
"""Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result."""
no_op_blocks = (ContextField, Literal, LocalField, UnaryTransformation, Variable)
if isinstance(expression, BinaryComposition):
if isinstance(expression.left, ContextField) or isinstance(expression.right, ContextField):
return _update_context_field_binary_composition(present_locations, expression)
else:
return _simplify_non_context_field_binary_composition(expression)
elif isinstance(expression, TernaryConditional):
return _simplify_ternary_conditional(expression)
elif isinstance(expression, BetweenClause):
lower_bound = expression.lower_bound
upper_bound = expression.upper_bound
if isinstance(lower_bound, ContextField) or isinstance(upper_bound, ContextField):
raise AssertionError(u'Found BetweenClause with ContextFields as lower/upper bounds. '
u'This should never happen: {}'.format(expression))
return expression
elif isinstance(expression, (OutputContextField, FoldedContextField)):
raise AssertionError(u'Found unexpected expression of type {}. This should never happen: '
u'{}'.format(type(expression).__name__, expression))
elif isinstance(expression, no_op_blocks):
return expression
raise AssertionError(u'Found unhandled expression of type {}. This should never happen: '
u'{}'.format(type(expression).__name__, expression)) | [
"def",
"_update_context_field_expression",
"(",
"present_locations",
",",
"expression",
")",
":",
"no_op_blocks",
"=",
"(",
"ContextField",
",",
"Literal",
",",
"LocalField",
",",
"UnaryTransformation",
",",
"Variable",
")",
"if",
"isinstance",
"(",
"expression",
",... | Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result. | [
"Lower",
"Expressions",
"involving",
"non",
"-",
"existent",
"ContextFields",
"to",
"TrueLiteral",
"and",
"simplify",
"result",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L484-L508 | train | 227,948 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _lower_non_existent_context_field_filters | def _lower_non_existent_context_field_filters(match_traversals, visitor_fn):
"""Return new match traversals, lowering filters involving non-existent ContextFields.
Expressions involving non-existent ContextFields are evaluated to TrueLiteral.
BinaryCompositions, where one of the operands is lowered to a TrueLiteral,
are lowered appropriately based on the present operator (u'||' and u'&&' are affected).
TernaryConditionals, where the predicate is lowered to a TrueLiteral,
are replaced by their if_true predicate.
The `visitor_fn` implements these behaviors (see `_update_context_field_expression`).
Args:
match_traversals: list of match traversal enitities to be lowered
visitor_fn: visit_and_update function for lowering expressions in given match traversal
Returns:
new list of match_traversals, with all filter expressions lowered
"""
new_match_traversals = []
for match_traversal in match_traversals:
new_match_traversal = []
for step in match_traversal:
if step.where_block is not None:
new_filter = step.where_block.visit_and_update_expressions(visitor_fn)
if new_filter.predicate == TrueLiteral:
new_filter = None
new_step = step._replace(where_block=new_filter)
else:
new_step = step
new_match_traversal.append(new_step)
new_match_traversals.append(new_match_traversal)
return new_match_traversals | python | def _lower_non_existent_context_field_filters(match_traversals, visitor_fn):
"""Return new match traversals, lowering filters involving non-existent ContextFields.
Expressions involving non-existent ContextFields are evaluated to TrueLiteral.
BinaryCompositions, where one of the operands is lowered to a TrueLiteral,
are lowered appropriately based on the present operator (u'||' and u'&&' are affected).
TernaryConditionals, where the predicate is lowered to a TrueLiteral,
are replaced by their if_true predicate.
The `visitor_fn` implements these behaviors (see `_update_context_field_expression`).
Args:
match_traversals: list of match traversal enitities to be lowered
visitor_fn: visit_and_update function for lowering expressions in given match traversal
Returns:
new list of match_traversals, with all filter expressions lowered
"""
new_match_traversals = []
for match_traversal in match_traversals:
new_match_traversal = []
for step in match_traversal:
if step.where_block is not None:
new_filter = step.where_block.visit_and_update_expressions(visitor_fn)
if new_filter.predicate == TrueLiteral:
new_filter = None
new_step = step._replace(where_block=new_filter)
else:
new_step = step
new_match_traversal.append(new_step)
new_match_traversals.append(new_match_traversal)
return new_match_traversals | [
"def",
"_lower_non_existent_context_field_filters",
"(",
"match_traversals",
",",
"visitor_fn",
")",
":",
"new_match_traversals",
"=",
"[",
"]",
"for",
"match_traversal",
"in",
"match_traversals",
":",
"new_match_traversal",
"=",
"[",
"]",
"for",
"step",
"in",
"match_... | Return new match traversals, lowering filters involving non-existent ContextFields.
Expressions involving non-existent ContextFields are evaluated to TrueLiteral.
BinaryCompositions, where one of the operands is lowered to a TrueLiteral,
are lowered appropriately based on the present operator (u'||' and u'&&' are affected).
TernaryConditionals, where the predicate is lowered to a TrueLiteral,
are replaced by their if_true predicate.
The `visitor_fn` implements these behaviors (see `_update_context_field_expression`).
Args:
match_traversals: list of match traversal enitities to be lowered
visitor_fn: visit_and_update function for lowering expressions in given match traversal
Returns:
new list of match_traversals, with all filter expressions lowered | [
"Return",
"new",
"match",
"traversals",
"lowering",
"filters",
"involving",
"non",
"-",
"existent",
"ContextFields",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L511-L544 | train | 227,949 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | lower_context_field_expressions | def lower_context_field_expressions(compound_match_query):
"""Lower Expressons involving non-existent ContextFields."""
if len(compound_match_query.match_queries) == 0:
raise AssertionError(u'Received CompoundMatchQuery {} with no MatchQuery objects.'
.format(compound_match_query))
elif len(compound_match_query.match_queries) == 1:
# All ContextFields exist if there is only one MatchQuery
# becuase none of the traverses were omitted, and all locations exist (are defined).
return compound_match_query
else:
new_match_queries = []
for match_query in compound_match_query.match_queries:
match_traversals = match_query.match_traversals
present_locations, _ = _get_present_locations(match_traversals)
current_visitor_fn = partial(_update_context_field_expression, present_locations)
new_match_traversals = _lower_non_existent_context_field_filters(
match_traversals, current_visitor_fn)
new_match_queries.append(
MatchQuery(
match_traversals=new_match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=new_match_queries) | python | def lower_context_field_expressions(compound_match_query):
"""Lower Expressons involving non-existent ContextFields."""
if len(compound_match_query.match_queries) == 0:
raise AssertionError(u'Received CompoundMatchQuery {} with no MatchQuery objects.'
.format(compound_match_query))
elif len(compound_match_query.match_queries) == 1:
# All ContextFields exist if there is only one MatchQuery
# becuase none of the traverses were omitted, and all locations exist (are defined).
return compound_match_query
else:
new_match_queries = []
for match_query in compound_match_query.match_queries:
match_traversals = match_query.match_traversals
present_locations, _ = _get_present_locations(match_traversals)
current_visitor_fn = partial(_update_context_field_expression, present_locations)
new_match_traversals = _lower_non_existent_context_field_filters(
match_traversals, current_visitor_fn)
new_match_queries.append(
MatchQuery(
match_traversals=new_match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=new_match_queries) | [
"def",
"lower_context_field_expressions",
"(",
"compound_match_query",
")",
":",
"if",
"len",
"(",
"compound_match_query",
".",
"match_queries",
")",
"==",
"0",
":",
"raise",
"AssertionError",
"(",
"u'Received CompoundMatchQuery {} with no MatchQuery objects.'",
".",
"forma... | Lower Expressons involving non-existent ContextFields. | [
"Lower",
"Expressons",
"involving",
"non",
"-",
"existent",
"ContextFields",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L547-L574 | train | 227,950 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | _validate_edges_do_not_have_extra_links | def _validate_edges_do_not_have_extra_links(class_name, properties):
"""Validate that edges do not have properties of Link type that aren't the edge endpoints."""
for property_name, property_descriptor in six.iteritems(properties):
if property_name in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERTY_NAME}:
continue
if property_descriptor.type_id == PROPERTY_TYPE_LINK_ID:
raise IllegalSchemaStateError(u'Edge class "{}" has a property of type Link that is '
u'not an edge endpoint, this is not allowed: '
u'{}'.format(class_name, property_name)) | python | def _validate_edges_do_not_have_extra_links(class_name, properties):
"""Validate that edges do not have properties of Link type that aren't the edge endpoints."""
for property_name, property_descriptor in six.iteritems(properties):
if property_name in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERTY_NAME}:
continue
if property_descriptor.type_id == PROPERTY_TYPE_LINK_ID:
raise IllegalSchemaStateError(u'Edge class "{}" has a property of type Link that is '
u'not an edge endpoint, this is not allowed: '
u'{}'.format(class_name, property_name)) | [
"def",
"_validate_edges_do_not_have_extra_links",
"(",
"class_name",
",",
"properties",
")",
":",
"for",
"property_name",
",",
"property_descriptor",
"in",
"six",
".",
"iteritems",
"(",
"properties",
")",
":",
"if",
"property_name",
"in",
"{",
"EDGE_SOURCE_PROPERTY_NA... | Validate that edges do not have properties of Link type that aren't the edge endpoints. | [
"Validate",
"that",
"edges",
"do",
"not",
"have",
"properties",
"of",
"Link",
"type",
"that",
"aren",
"t",
"the",
"edge",
"endpoints",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L44-L53 | train | 227,951 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | _validate_property_names | def _validate_property_names(class_name, properties):
"""Validate that properties do not have names that may cause problems in the GraphQL schema."""
for property_name in properties:
if not property_name or property_name.startswith(ILLEGAL_PROPERTY_NAME_PREFIXES):
raise IllegalSchemaStateError(u'Class "{}" has a property with an illegal name: '
u'{}'.format(class_name, property_name)) | python | def _validate_property_names(class_name, properties):
"""Validate that properties do not have names that may cause problems in the GraphQL schema."""
for property_name in properties:
if not property_name or property_name.startswith(ILLEGAL_PROPERTY_NAME_PREFIXES):
raise IllegalSchemaStateError(u'Class "{}" has a property with an illegal name: '
u'{}'.format(class_name, property_name)) | [
"def",
"_validate_property_names",
"(",
"class_name",
",",
"properties",
")",
":",
"for",
"property_name",
"in",
"properties",
":",
"if",
"not",
"property_name",
"or",
"property_name",
".",
"startswith",
"(",
"ILLEGAL_PROPERTY_NAME_PREFIXES",
")",
":",
"raise",
"Ill... | Validate that properties do not have names that may cause problems in the GraphQL schema. | [
"Validate",
"that",
"properties",
"do",
"not",
"have",
"names",
"that",
"may",
"cause",
"problems",
"in",
"the",
"GraphQL",
"schema",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L56-L61 | train | 227,952 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | _validate_collections_have_default_values | def _validate_collections_have_default_values(class_name, property_name, property_descriptor):
"""Validate that if the property is of collection type, it has a specified default value."""
# We don't want properties of collection type having "null" values, since that may cause
# unexpected errors during GraphQL query execution and other operations.
if property_descriptor.type_id in COLLECTION_PROPERTY_TYPES:
if property_descriptor.default is None:
raise IllegalSchemaStateError(u'Class "{}" has a property "{}" of collection type with '
u'no default value.'.format(class_name, property_name)) | python | def _validate_collections_have_default_values(class_name, property_name, property_descriptor):
"""Validate that if the property is of collection type, it has a specified default value."""
# We don't want properties of collection type having "null" values, since that may cause
# unexpected errors during GraphQL query execution and other operations.
if property_descriptor.type_id in COLLECTION_PROPERTY_TYPES:
if property_descriptor.default is None:
raise IllegalSchemaStateError(u'Class "{}" has a property "{}" of collection type with '
u'no default value.'.format(class_name, property_name)) | [
"def",
"_validate_collections_have_default_values",
"(",
"class_name",
",",
"property_name",
",",
"property_descriptor",
")",
":",
"# We don't want properties of collection type having \"null\" values, since that may cause",
"# unexpected errors during GraphQL query execution and other operati... | Validate that if the property is of collection type, it has a specified default value. | [
"Validate",
"that",
"if",
"the",
"property",
"is",
"of",
"collection",
"type",
"it",
"has",
"a",
"specified",
"default",
"value",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L64-L71 | train | 227,953 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | get_superclasses_from_class_definition | def get_superclasses_from_class_definition(class_definition):
"""Extract a list of all superclass names from a class definition dict."""
# New-style superclasses definition, supporting multiple-inheritance.
superclasses = class_definition.get('superClasses', None)
if superclasses:
return list(superclasses)
# Old-style superclass definition, single inheritance only.
superclass = class_definition.get('superClass', None)
if superclass:
return [superclass]
# No superclasses are present.
return [] | python | def get_superclasses_from_class_definition(class_definition):
"""Extract a list of all superclass names from a class definition dict."""
# New-style superclasses definition, supporting multiple-inheritance.
superclasses = class_definition.get('superClasses', None)
if superclasses:
return list(superclasses)
# Old-style superclass definition, single inheritance only.
superclass = class_definition.get('superClass', None)
if superclass:
return [superclass]
# No superclasses are present.
return [] | [
"def",
"get_superclasses_from_class_definition",
"(",
"class_definition",
")",
":",
"# New-style superclasses definition, supporting multiple-inheritance.",
"superclasses",
"=",
"class_definition",
".",
"get",
"(",
"'superClasses'",
",",
"None",
")",
"if",
"superclasses",
":",
... | Extract a list of all superclass names from a class definition dict. | [
"Extract",
"a",
"list",
"of",
"all",
"superclass",
"names",
"from",
"a",
"class",
"definition",
"dict",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L74-L88 | train | 227,954 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaElement.freeze | def freeze(self):
"""Make the SchemaElement's connections immutable."""
self.in_connections = frozenset(self.in_connections)
self.out_connections = frozenset(self.out_connections) | python | def freeze(self):
"""Make the SchemaElement's connections immutable."""
self.in_connections = frozenset(self.in_connections)
self.out_connections = frozenset(self.out_connections) | [
"def",
"freeze",
"(",
"self",
")",
":",
"self",
".",
"in_connections",
"=",
"frozenset",
"(",
"self",
".",
"in_connections",
")",
"self",
".",
"out_connections",
"=",
"frozenset",
"(",
"self",
".",
"out_connections",
")"
] | Make the SchemaElement's connections immutable. | [
"Make",
"the",
"SchemaElement",
"s",
"connections",
"immutable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L180-L183 | train | 227,955 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.get_default_property_values | def get_default_property_values(self, classname):
"""Return a dict with default values for all properties declared on this class."""
schema_element = self.get_element_by_class_name(classname)
result = {
property_name: property_descriptor.default
for property_name, property_descriptor in six.iteritems(schema_element.properties)
}
if schema_element.is_edge:
# Remove the source/destination properties for edges, if they exist.
result.pop(EDGE_SOURCE_PROPERTY_NAME, None)
result.pop(EDGE_DESTINATION_PROPERTY_NAME, None)
return result | python | def get_default_property_values(self, classname):
"""Return a dict with default values for all properties declared on this class."""
schema_element = self.get_element_by_class_name(classname)
result = {
property_name: property_descriptor.default
for property_name, property_descriptor in six.iteritems(schema_element.properties)
}
if schema_element.is_edge:
# Remove the source/destination properties for edges, if they exist.
result.pop(EDGE_SOURCE_PROPERTY_NAME, None)
result.pop(EDGE_DESTINATION_PROPERTY_NAME, None)
return result | [
"def",
"get_default_property_values",
"(",
"self",
",",
"classname",
")",
":",
"schema_element",
"=",
"self",
".",
"get_element_by_class_name",
"(",
"classname",
")",
"result",
"=",
"{",
"property_name",
":",
"property_descriptor",
".",
"default",
"for",
"property_n... | Return a dict with default values for all properties declared on this class. | [
"Return",
"a",
"dict",
"with",
"default",
"values",
"for",
"all",
"properties",
"declared",
"on",
"this",
"class",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L297-L311 | train | 227,956 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph._get_property_values_with_defaults | def _get_property_values_with_defaults(self, classname, property_values):
"""Return the property values for the class, with default values applied where needed."""
# To uphold OrientDB semantics, make a new dict with all property values set
# to their default values, which are None if no default was set.
# Then, overwrite its data with the supplied property values.
final_values = self.get_default_property_values(classname)
final_values.update(property_values)
return final_values | python | def _get_property_values_with_defaults(self, classname, property_values):
"""Return the property values for the class, with default values applied where needed."""
# To uphold OrientDB semantics, make a new dict with all property values set
# to their default values, which are None if no default was set.
# Then, overwrite its data with the supplied property values.
final_values = self.get_default_property_values(classname)
final_values.update(property_values)
return final_values | [
"def",
"_get_property_values_with_defaults",
"(",
"self",
",",
"classname",
",",
"property_values",
")",
":",
"# To uphold OrientDB semantics, make a new dict with all property values set",
"# to their default values, which are None if no default was set.",
"# Then, overwrite its data with t... | Return the property values for the class, with default values applied where needed. | [
"Return",
"the",
"property",
"values",
"for",
"the",
"class",
"with",
"default",
"values",
"applied",
"where",
"needed",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L313-L320 | train | 227,957 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.get_element_by_class_name_or_raise | def get_element_by_class_name_or_raise(self, class_name):
"""Return the SchemaElement for the specified class name, asserting that it exists."""
if class_name not in self._elements:
raise InvalidClassError(u'Class does not exist: {}'.format(class_name))
return self._elements[class_name] | python | def get_element_by_class_name_or_raise(self, class_name):
"""Return the SchemaElement for the specified class name, asserting that it exists."""
if class_name not in self._elements:
raise InvalidClassError(u'Class does not exist: {}'.format(class_name))
return self._elements[class_name] | [
"def",
"get_element_by_class_name_or_raise",
"(",
"self",
",",
"class_name",
")",
":",
"if",
"class_name",
"not",
"in",
"self",
".",
"_elements",
":",
"raise",
"InvalidClassError",
"(",
"u'Class does not exist: {}'",
".",
"format",
"(",
"class_name",
")",
")",
"re... | Return the SchemaElement for the specified class name, asserting that it exists. | [
"Return",
"the",
"SchemaElement",
"for",
"the",
"specified",
"class",
"name",
"asserting",
"that",
"it",
"exists",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L322-L327 | train | 227,958 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.get_vertex_schema_element_or_raise | def get_vertex_schema_element_or_raise(self, vertex_classname):
"""Return the schema element with the given name, asserting that it's of vertex type."""
schema_element = self.get_element_by_class_name_or_raise(vertex_classname)
if not schema_element.is_vertex:
raise InvalidClassError(u'Non-vertex class provided: {}'.format(vertex_classname))
return schema_element | python | def get_vertex_schema_element_or_raise(self, vertex_classname):
"""Return the schema element with the given name, asserting that it's of vertex type."""
schema_element = self.get_element_by_class_name_or_raise(vertex_classname)
if not schema_element.is_vertex:
raise InvalidClassError(u'Non-vertex class provided: {}'.format(vertex_classname))
return schema_element | [
"def",
"get_vertex_schema_element_or_raise",
"(",
"self",
",",
"vertex_classname",
")",
":",
"schema_element",
"=",
"self",
".",
"get_element_by_class_name_or_raise",
"(",
"vertex_classname",
")",
"if",
"not",
"schema_element",
".",
"is_vertex",
":",
"raise",
"InvalidCl... | Return the schema element with the given name, asserting that it's of vertex type. | [
"Return",
"the",
"schema",
"element",
"with",
"the",
"given",
"name",
"asserting",
"that",
"it",
"s",
"of",
"vertex",
"type",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L329-L336 | train | 227,959 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.get_edge_schema_element_or_raise | def get_edge_schema_element_or_raise(self, edge_classname):
"""Return the schema element with the given name, asserting that it's of edge type."""
schema_element = self.get_element_by_class_name_or_raise(edge_classname)
if not schema_element.is_edge:
raise InvalidClassError(u'Non-edge class provided: {}'.format(edge_classname))
return schema_element | python | def get_edge_schema_element_or_raise(self, edge_classname):
"""Return the schema element with the given name, asserting that it's of edge type."""
schema_element = self.get_element_by_class_name_or_raise(edge_classname)
if not schema_element.is_edge:
raise InvalidClassError(u'Non-edge class provided: {}'.format(edge_classname))
return schema_element | [
"def",
"get_edge_schema_element_or_raise",
"(",
"self",
",",
"edge_classname",
")",
":",
"schema_element",
"=",
"self",
".",
"get_element_by_class_name_or_raise",
"(",
"edge_classname",
")",
"if",
"not",
"schema_element",
".",
"is_edge",
":",
"raise",
"InvalidClassError... | Return the schema element with the given name, asserting that it's of edge type. | [
"Return",
"the",
"schema",
"element",
"with",
"the",
"given",
"name",
"asserting",
"that",
"it",
"s",
"of",
"edge",
"type",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L338-L345 | train | 227,960 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.validate_is_non_abstract_vertex_type | def validate_is_non_abstract_vertex_type(self, vertex_classname):
"""Validate that a vertex classname corresponds to a non-abstract vertex class."""
element = self.get_vertex_schema_element_or_raise(vertex_classname)
if element.abstract:
raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract'
.format(vertex_classname)) | python | def validate_is_non_abstract_vertex_type(self, vertex_classname):
"""Validate that a vertex classname corresponds to a non-abstract vertex class."""
element = self.get_vertex_schema_element_or_raise(vertex_classname)
if element.abstract:
raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract'
.format(vertex_classname)) | [
"def",
"validate_is_non_abstract_vertex_type",
"(",
"self",
",",
"vertex_classname",
")",
":",
"element",
"=",
"self",
".",
"get_vertex_schema_element_or_raise",
"(",
"vertex_classname",
")",
"if",
"element",
".",
"abstract",
":",
"raise",
"InvalidClassError",
"(",
"u... | Validate that a vertex classname corresponds to a non-abstract vertex class. | [
"Validate",
"that",
"a",
"vertex",
"classname",
"corresponds",
"to",
"a",
"non",
"-",
"abstract",
"vertex",
"class",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L355-L361 | train | 227,961 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.validate_is_non_abstract_edge_type | def validate_is_non_abstract_edge_type(self, edge_classname):
"""Validate that a edge classname corresponds to a non-abstract edge class."""
element = self.get_edge_schema_element_or_raise(edge_classname)
if element.abstract:
raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract'
.format(edge_classname)) | python | def validate_is_non_abstract_edge_type(self, edge_classname):
"""Validate that a edge classname corresponds to a non-abstract edge class."""
element = self.get_edge_schema_element_or_raise(edge_classname)
if element.abstract:
raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract'
.format(edge_classname)) | [
"def",
"validate_is_non_abstract_edge_type",
"(",
"self",
",",
"edge_classname",
")",
":",
"element",
"=",
"self",
".",
"get_edge_schema_element_or_raise",
"(",
"edge_classname",
")",
"if",
"element",
".",
"abstract",
":",
"raise",
"InvalidClassError",
"(",
"u'Expecte... | Validate that a edge classname corresponds to a non-abstract edge class. | [
"Validate",
"that",
"a",
"edge",
"classname",
"corresponds",
"to",
"a",
"non",
"-",
"abstract",
"edge",
"class",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L363-L369 | train | 227,962 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.validate_properties_exist | def validate_properties_exist(self, classname, property_names):
"""Validate that the specified property names are indeed defined on the given class."""
schema_element = self.get_element_by_class_name(classname)
requested_properties = set(property_names)
available_properties = set(schema_element.properties.keys())
non_existent_properties = requested_properties - available_properties
if non_existent_properties:
raise InvalidPropertyError(
u'Class "{}" does not have definitions for properties "{}": '
u'{}'.format(classname, non_existent_properties, property_names)) | python | def validate_properties_exist(self, classname, property_names):
"""Validate that the specified property names are indeed defined on the given class."""
schema_element = self.get_element_by_class_name(classname)
requested_properties = set(property_names)
available_properties = set(schema_element.properties.keys())
non_existent_properties = requested_properties - available_properties
if non_existent_properties:
raise InvalidPropertyError(
u'Class "{}" does not have definitions for properties "{}": '
u'{}'.format(classname, non_existent_properties, property_names)) | [
"def",
"validate_properties_exist",
"(",
"self",
",",
"classname",
",",
"property_names",
")",
":",
"schema_element",
"=",
"self",
".",
"get_element_by_class_name",
"(",
"classname",
")",
"requested_properties",
"=",
"set",
"(",
"property_names",
")",
"available_prope... | Validate that the specified property names are indeed defined on the given class. | [
"Validate",
"that",
"the",
"specified",
"property",
"names",
"are",
"indeed",
"defined",
"on",
"the",
"given",
"class",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L371-L381 | train | 227,963 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph._split_classes_by_kind | def _split_classes_by_kind(self, class_name_to_definition):
"""Assign each class to the vertex, edge or non-graph type sets based on its kind."""
for class_name in class_name_to_definition:
inheritance_set = self._inheritance_sets[class_name]
is_vertex = ORIENTDB_BASE_VERTEX_CLASS_NAME in inheritance_set
is_edge = ORIENTDB_BASE_EDGE_CLASS_NAME in inheritance_set
if is_vertex and is_edge:
raise AssertionError(u'Class {} appears to be both a vertex and an edge class: '
u'{}'.format(class_name, inheritance_set))
elif is_vertex:
self._vertex_class_names.add(class_name)
elif is_edge:
self._edge_class_names.add(class_name)
else:
self._non_graph_class_names.add(class_name)
# Freeze the classname sets so they cannot be modified again.
self._vertex_class_names = frozenset(self._vertex_class_names)
self._edge_class_names = frozenset(self._edge_class_names)
self._non_graph_class_names = frozenset(self._non_graph_class_names) | python | def _split_classes_by_kind(self, class_name_to_definition):
"""Assign each class to the vertex, edge or non-graph type sets based on its kind."""
for class_name in class_name_to_definition:
inheritance_set = self._inheritance_sets[class_name]
is_vertex = ORIENTDB_BASE_VERTEX_CLASS_NAME in inheritance_set
is_edge = ORIENTDB_BASE_EDGE_CLASS_NAME in inheritance_set
if is_vertex and is_edge:
raise AssertionError(u'Class {} appears to be both a vertex and an edge class: '
u'{}'.format(class_name, inheritance_set))
elif is_vertex:
self._vertex_class_names.add(class_name)
elif is_edge:
self._edge_class_names.add(class_name)
else:
self._non_graph_class_names.add(class_name)
# Freeze the classname sets so they cannot be modified again.
self._vertex_class_names = frozenset(self._vertex_class_names)
self._edge_class_names = frozenset(self._edge_class_names)
self._non_graph_class_names = frozenset(self._non_graph_class_names) | [
"def",
"_split_classes_by_kind",
"(",
"self",
",",
"class_name_to_definition",
")",
":",
"for",
"class_name",
"in",
"class_name_to_definition",
":",
"inheritance_set",
"=",
"self",
".",
"_inheritance_sets",
"[",
"class_name",
"]",
"is_vertex",
"=",
"ORIENTDB_BASE_VERTEX... | Assign each class to the vertex, edge or non-graph type sets based on its kind. | [
"Assign",
"each",
"class",
"to",
"the",
"vertex",
"edge",
"or",
"non",
"-",
"graph",
"type",
"sets",
"based",
"on",
"its",
"kind",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L440-L461 | train | 227,964 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph._create_descriptor_from_property_definition | def _create_descriptor_from_property_definition(self, class_name, property_definition,
class_name_to_definition):
"""Return a PropertyDescriptor corresponding to the given OrientDB property definition."""
name = property_definition['name']
type_id = property_definition['type']
linked_class = property_definition.get('linkedClass', None)
linked_type = property_definition.get('linkedType', None)
qualifier = None
validate_supported_property_type_id(name, type_id)
if type_id == PROPERTY_TYPE_LINK_ID:
if class_name not in self._edge_class_names:
raise AssertionError(u'Found a property of type Link on a non-edge class: '
u'{} {}'.format(name, class_name))
if name not in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERTY_NAME}:
raise AssertionError(u'Found a property of type Link with an unexpected name: '
u'{} {}'.format(name, class_name))
if linked_class is None:
raise AssertionError(u'Property "{}" is declared with type Link but has no '
u'linked class: {}'.format(name, property_definition))
if linked_class not in self._vertex_class_names:
is_linked_class_abstract = class_name_to_definition[linked_class]['abstract']
all_subclasses_are_vertices = True
for subclass in self._subclass_sets[linked_class]:
if subclass != linked_class and subclass not in self.vertex_class_names:
all_subclasses_are_vertices = False
break
if not (is_linked_class_abstract and all_subclasses_are_vertices):
raise AssertionError(u'Property "{}" is declared as a Link to class {}, but '
u'that class is neither a vertex nor is it an '
u'abstract class whose subclasses are all vertices!'
.format(name, linked_class))
qualifier = linked_class
elif type_id in COLLECTION_PROPERTY_TYPES:
if linked_class is not None and linked_type is not None:
raise AssertionError(u'Property "{}" unexpectedly has both a linked class and '
u'a linked type: {}'.format(name, property_definition))
elif linked_type is not None and linked_class is None:
# No linked class, must be a linked native OrientDB type.
validate_supported_property_type_id(name + ' inner type', linked_type)
qualifier = linked_type
elif linked_class is not None and linked_type is None:
# No linked type, must be a linked non-graph user-defined type.
if linked_class not in self._non_graph_class_names:
raise AssertionError(u'Property "{}" is declared as the inner type of '
u'an embedded collection, but is not a non-graph class: '
u'{}'.format(name, linked_class))
qualifier = linked_class
else:
raise AssertionError(u'Property "{}" is an embedded collection but has '
u'neither a linked class nor a linked type: '
u'{}'.format(name, property_definition))
default_value = None
default_value_string = property_definition.get('defaultValue', None)
if default_value_string is not None:
default_value = parse_default_property_value(name, type_id, default_value_string)
descriptor = PropertyDescriptor(type_id=type_id, qualifier=qualifier, default=default_value)
# Sanity-check the descriptor before returning it.
_validate_collections_have_default_values(class_name, name, descriptor)
return descriptor | python | def _create_descriptor_from_property_definition(self, class_name, property_definition,
class_name_to_definition):
"""Return a PropertyDescriptor corresponding to the given OrientDB property definition."""
name = property_definition['name']
type_id = property_definition['type']
linked_class = property_definition.get('linkedClass', None)
linked_type = property_definition.get('linkedType', None)
qualifier = None
validate_supported_property_type_id(name, type_id)
if type_id == PROPERTY_TYPE_LINK_ID:
if class_name not in self._edge_class_names:
raise AssertionError(u'Found a property of type Link on a non-edge class: '
u'{} {}'.format(name, class_name))
if name not in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERTY_NAME}:
raise AssertionError(u'Found a property of type Link with an unexpected name: '
u'{} {}'.format(name, class_name))
if linked_class is None:
raise AssertionError(u'Property "{}" is declared with type Link but has no '
u'linked class: {}'.format(name, property_definition))
if linked_class not in self._vertex_class_names:
is_linked_class_abstract = class_name_to_definition[linked_class]['abstract']
all_subclasses_are_vertices = True
for subclass in self._subclass_sets[linked_class]:
if subclass != linked_class and subclass not in self.vertex_class_names:
all_subclasses_are_vertices = False
break
if not (is_linked_class_abstract and all_subclasses_are_vertices):
raise AssertionError(u'Property "{}" is declared as a Link to class {}, but '
u'that class is neither a vertex nor is it an '
u'abstract class whose subclasses are all vertices!'
.format(name, linked_class))
qualifier = linked_class
elif type_id in COLLECTION_PROPERTY_TYPES:
if linked_class is not None and linked_type is not None:
raise AssertionError(u'Property "{}" unexpectedly has both a linked class and '
u'a linked type: {}'.format(name, property_definition))
elif linked_type is not None and linked_class is None:
# No linked class, must be a linked native OrientDB type.
validate_supported_property_type_id(name + ' inner type', linked_type)
qualifier = linked_type
elif linked_class is not None and linked_type is None:
# No linked type, must be a linked non-graph user-defined type.
if linked_class not in self._non_graph_class_names:
raise AssertionError(u'Property "{}" is declared as the inner type of '
u'an embedded collection, but is not a non-graph class: '
u'{}'.format(name, linked_class))
qualifier = linked_class
else:
raise AssertionError(u'Property "{}" is an embedded collection but has '
u'neither a linked class nor a linked type: '
u'{}'.format(name, property_definition))
default_value = None
default_value_string = property_definition.get('defaultValue', None)
if default_value_string is not None:
default_value = parse_default_property_value(name, type_id, default_value_string)
descriptor = PropertyDescriptor(type_id=type_id, qualifier=qualifier, default=default_value)
# Sanity-check the descriptor before returning it.
_validate_collections_have_default_values(class_name, name, descriptor)
return descriptor | [
"def",
"_create_descriptor_from_property_definition",
"(",
"self",
",",
"class_name",
",",
"property_definition",
",",
"class_name_to_definition",
")",
":",
"name",
"=",
"property_definition",
"[",
"'name'",
"]",
"type_id",
"=",
"property_definition",
"[",
"'type'",
"]"... | Return a PropertyDescriptor corresponding to the given OrientDB property definition. | [
"Return",
"a",
"PropertyDescriptor",
"corresponding",
"to",
"the",
"given",
"OrientDB",
"property",
"definition",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L548-L616 | train | 227,965 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph._link_vertex_and_edge_types | def _link_vertex_and_edge_types(self):
"""For each edge, link it to the vertex types it connects to each other."""
for edge_class_name in self._edge_class_names:
edge_element = self._elements[edge_class_name]
if (EDGE_SOURCE_PROPERTY_NAME not in edge_element.properties or
EDGE_DESTINATION_PROPERTY_NAME not in edge_element.properties):
if edge_element.abstract:
continue
else:
raise AssertionError(u'Found a non-abstract edge class with undefined '
u'endpoint types: {}'.format(edge_element))
from_class_name = edge_element.properties[EDGE_SOURCE_PROPERTY_NAME].qualifier
to_class_name = edge_element.properties[EDGE_DESTINATION_PROPERTY_NAME].qualifier
edge_schema_element = self._elements[edge_class_name]
# Link from_class_name with edge_class_name
for from_class in self._subclass_sets[from_class_name]:
from_schema_element = self._elements[from_class]
from_schema_element.out_connections.add(edge_class_name)
edge_schema_element.in_connections.add(from_class)
# Link edge_class_name with to_class_name
for to_class in self._subclass_sets[to_class_name]:
to_schema_element = self._elements[to_class]
edge_schema_element.out_connections.add(to_class)
to_schema_element.in_connections.add(edge_class_name) | python | def _link_vertex_and_edge_types(self):
"""For each edge, link it to the vertex types it connects to each other."""
for edge_class_name in self._edge_class_names:
edge_element = self._elements[edge_class_name]
if (EDGE_SOURCE_PROPERTY_NAME not in edge_element.properties or
EDGE_DESTINATION_PROPERTY_NAME not in edge_element.properties):
if edge_element.abstract:
continue
else:
raise AssertionError(u'Found a non-abstract edge class with undefined '
u'endpoint types: {}'.format(edge_element))
from_class_name = edge_element.properties[EDGE_SOURCE_PROPERTY_NAME].qualifier
to_class_name = edge_element.properties[EDGE_DESTINATION_PROPERTY_NAME].qualifier
edge_schema_element = self._elements[edge_class_name]
# Link from_class_name with edge_class_name
for from_class in self._subclass_sets[from_class_name]:
from_schema_element = self._elements[from_class]
from_schema_element.out_connections.add(edge_class_name)
edge_schema_element.in_connections.add(from_class)
# Link edge_class_name with to_class_name
for to_class in self._subclass_sets[to_class_name]:
to_schema_element = self._elements[to_class]
edge_schema_element.out_connections.add(to_class)
to_schema_element.in_connections.add(edge_class_name) | [
"def",
"_link_vertex_and_edge_types",
"(",
"self",
")",
":",
"for",
"edge_class_name",
"in",
"self",
".",
"_edge_class_names",
":",
"edge_element",
"=",
"self",
".",
"_elements",
"[",
"edge_class_name",
"]",
"if",
"(",
"EDGE_SOURCE_PROPERTY_NAME",
"not",
"in",
"ed... | For each edge, link it to the vertex types it connects to each other. | [
"For",
"each",
"edge",
"link",
"it",
"to",
"the",
"vertex",
"types",
"it",
"connects",
"to",
"each",
"other",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L618-L646 | train | 227,966 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | _is_local_filter | def _is_local_filter(filter_block):
"""Return True if the Filter block references no non-local fields, and False otherwise."""
# We need the "result" value of this function to be mutated within the "visitor_fn".
# Since we support both Python 2 and Python 3, we can't use the "nonlocal" keyword here:
# https://www.python.org/dev/peps/pep-3104/
# Instead, we use a dict to store the value we need mutated, since the "visitor_fn"
# can mutate state in the parent scope, but not rebind variables in it without "nonlocal".
# TODO(predrag): Revisit this if we drop support for Python 2.
result = {
'is_local_filter': True
}
filter_predicate = filter_block.predicate
def visitor_fn(expression):
"""Expression visitor function that looks for uses of non-local fields."""
non_local_expression_types = (ContextField, ContextFieldExistence)
if isinstance(expression, non_local_expression_types):
result['is_local_filter'] = False
# Don't change the expression.
return expression
filter_predicate.visit_and_update(visitor_fn)
return result['is_local_filter'] | python | def _is_local_filter(filter_block):
"""Return True if the Filter block references no non-local fields, and False otherwise."""
# We need the "result" value of this function to be mutated within the "visitor_fn".
# Since we support both Python 2 and Python 3, we can't use the "nonlocal" keyword here:
# https://www.python.org/dev/peps/pep-3104/
# Instead, we use a dict to store the value we need mutated, since the "visitor_fn"
# can mutate state in the parent scope, but not rebind variables in it without "nonlocal".
# TODO(predrag): Revisit this if we drop support for Python 2.
result = {
'is_local_filter': True
}
filter_predicate = filter_block.predicate
def visitor_fn(expression):
"""Expression visitor function that looks for uses of non-local fields."""
non_local_expression_types = (ContextField, ContextFieldExistence)
if isinstance(expression, non_local_expression_types):
result['is_local_filter'] = False
# Don't change the expression.
return expression
filter_predicate.visit_and_update(visitor_fn)
return result['is_local_filter'] | [
"def",
"_is_local_filter",
"(",
"filter_block",
")",
":",
"# We need the \"result\" value of this function to be mutated within the \"visitor_fn\".",
"# Since we support both Python 2 and Python 3, we can't use the \"nonlocal\" keyword here:",
"# https://www.python.org/dev/peps/pep-3104/",
"# Inst... | Return True if the Filter block references no non-local fields, and False otherwise. | [
"Return",
"True",
"if",
"the",
"Filter",
"block",
"references",
"no",
"non",
"-",
"local",
"fields",
"and",
"False",
"otherwise",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L53-L78 | train | 227,967 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | _calculate_type_bound_at_step | def _calculate_type_bound_at_step(match_step):
"""Return the GraphQL type bound at the given step, or None if no bound is given."""
current_type_bounds = []
if isinstance(match_step.root_block, QueryRoot):
# The QueryRoot start class is a type bound.
current_type_bounds.extend(match_step.root_block.start_class)
if match_step.coerce_type_block is not None:
# The CoerceType target class is also a type bound.
current_type_bounds.extend(match_step.coerce_type_block.target_class)
if current_type_bounds:
# A type bound exists. Assert that there is exactly one bound, defined in precisely one way.
return get_only_element_from_collection(current_type_bounds)
else:
# No type bound exists at this MATCH step.
return None | python | def _calculate_type_bound_at_step(match_step):
"""Return the GraphQL type bound at the given step, or None if no bound is given."""
current_type_bounds = []
if isinstance(match_step.root_block, QueryRoot):
# The QueryRoot start class is a type bound.
current_type_bounds.extend(match_step.root_block.start_class)
if match_step.coerce_type_block is not None:
# The CoerceType target class is also a type bound.
current_type_bounds.extend(match_step.coerce_type_block.target_class)
if current_type_bounds:
# A type bound exists. Assert that there is exactly one bound, defined in precisely one way.
return get_only_element_from_collection(current_type_bounds)
else:
# No type bound exists at this MATCH step.
return None | [
"def",
"_calculate_type_bound_at_step",
"(",
"match_step",
")",
":",
"current_type_bounds",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"match_step",
".",
"root_block",
",",
"QueryRoot",
")",
":",
"# The QueryRoot start class is a type bound.",
"current_type_bounds",
".",
"... | Return the GraphQL type bound at the given step, or None if no bound is given. | [
"Return",
"the",
"GraphQL",
"type",
"bound",
"at",
"the",
"given",
"step",
"or",
"None",
"if",
"no",
"bound",
"is",
"given",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L188-L205 | train | 227,968 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | _assert_type_bounds_are_not_conflicting | def _assert_type_bounds_are_not_conflicting(current_type_bound, previous_type_bound,
location, match_query):
"""Ensure that the two bounds either are an exact match, or one of them is None."""
if all((current_type_bound is not None,
previous_type_bound is not None,
current_type_bound != previous_type_bound)):
raise AssertionError(
u'Conflicting type bounds calculated at location {}: {} vs {} '
u'for query {}'.format(location, previous_type_bound, current_type_bound, match_query)) | python | def _assert_type_bounds_are_not_conflicting(current_type_bound, previous_type_bound,
location, match_query):
"""Ensure that the two bounds either are an exact match, or one of them is None."""
if all((current_type_bound is not None,
previous_type_bound is not None,
current_type_bound != previous_type_bound)):
raise AssertionError(
u'Conflicting type bounds calculated at location {}: {} vs {} '
u'for query {}'.format(location, previous_type_bound, current_type_bound, match_query)) | [
"def",
"_assert_type_bounds_are_not_conflicting",
"(",
"current_type_bound",
",",
"previous_type_bound",
",",
"location",
",",
"match_query",
")",
":",
"if",
"all",
"(",
"(",
"current_type_bound",
"is",
"not",
"None",
",",
"previous_type_bound",
"is",
"not",
"None",
... | Ensure that the two bounds either are an exact match, or one of them is None. | [
"Ensure",
"that",
"the",
"two",
"bounds",
"either",
"are",
"an",
"exact",
"match",
"or",
"one",
"of",
"them",
"is",
"None",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L208-L216 | train | 227,969 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | _expose_only_preferred_locations | def _expose_only_preferred_locations(match_query, location_types, coerced_locations,
preferred_locations, eligible_locations):
"""Return a MATCH query where only preferred locations are valid as query start locations."""
preferred_location_types = dict()
eligible_location_types = dict()
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_step = match_step
current_step_location = match_step.as_block.location
if current_step_location in preferred_locations:
# This location is preferred. We have to make sure that at least one occurrence
# of this location in the MATCH query has an associated "class:" clause,
# which would be generated by a type bound at the corresponding MATCH step.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = preferred_location_types.get(current_step_location, None)
if previous_type_bound is not None:
# The location is already valid. If so, make sure that this step either does
# not have any type bounds (e.g. via QueryRoot or CoerceType blocks),
# or has type bounds that match the previously-decided type bound.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
else:
# The location is not yet known to be valid. If it does not have
# a type bound in this MATCH step, add a type coercion to the type
# registered in "location_types".
if current_type_bound is None:
current_type_bound = location_types[current_step_location].name
new_step = match_step._replace(
coerce_type_block=CoerceType({current_type_bound}))
preferred_location_types[current_step_location] = current_type_bound
elif current_step_location in eligible_locations:
# This location is eligible, but not preferred. We have not make sure
# none of the MATCH steps with this location have type bounds, and therefore
# will not produce a corresponding "class:" clause in the resulting MATCH query.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = eligible_location_types.get(current_step_location, None)
if current_type_bound is not None:
# There is a type bound here that we need to neutralize.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
# Record the deduced type bound, so that if we encounter this location again,
# we ensure that we again infer the same type bound.
eligible_location_types[current_step_location] = current_type_bound
if (current_step_location not in coerced_locations or
previous_type_bound is not None):
# The type bound here is already implied by the GraphQL query structure,
# or has already been applied at a previous occurrence of this location.
# We can simply delete the QueryRoot / CoerceType blocks that impart it.
if isinstance(match_step.root_block, QueryRoot):
new_root_block = None
else:
new_root_block = match_step.root_block
new_step = match_step._replace(
root_block=new_root_block, coerce_type_block=None)
else:
# The type bound here is not already implied by the GraphQL query structure.
# This should only be possible via a CoerceType block. Lower this CoerceType
# block into a Filter with INSTANCEOF to ensure the resulting query has the
# same semantics, while making the location invalid as a query start point.
if (isinstance(match_step.root_block, QueryRoot) or
match_step.coerce_type_block is None):
raise AssertionError(u'Unexpected MATCH step applying a type bound not '
u'already implied by the GraphQL query structure: '
u'{} {}'.format(match_step, match_query))
new_where_block = convert_coerce_type_and_add_to_where_block(
match_step.coerce_type_block, match_step.where_block)
new_step = match_step._replace(
coerce_type_block=None, where_block=new_where_block)
else:
# There is no type bound that OrientDB can find defined at this location.
# No action is necessary.
pass
else:
# This location is neither preferred nor eligible.
# No action is necessary at this location.
pass
new_traversal.append(new_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | python | def _expose_only_preferred_locations(match_query, location_types, coerced_locations,
preferred_locations, eligible_locations):
"""Return a MATCH query where only preferred locations are valid as query start locations."""
preferred_location_types = dict()
eligible_location_types = dict()
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_step = match_step
current_step_location = match_step.as_block.location
if current_step_location in preferred_locations:
# This location is preferred. We have to make sure that at least one occurrence
# of this location in the MATCH query has an associated "class:" clause,
# which would be generated by a type bound at the corresponding MATCH step.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = preferred_location_types.get(current_step_location, None)
if previous_type_bound is not None:
# The location is already valid. If so, make sure that this step either does
# not have any type bounds (e.g. via QueryRoot or CoerceType blocks),
# or has type bounds that match the previously-decided type bound.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
else:
# The location is not yet known to be valid. If it does not have
# a type bound in this MATCH step, add a type coercion to the type
# registered in "location_types".
if current_type_bound is None:
current_type_bound = location_types[current_step_location].name
new_step = match_step._replace(
coerce_type_block=CoerceType({current_type_bound}))
preferred_location_types[current_step_location] = current_type_bound
elif current_step_location in eligible_locations:
# This location is eligible, but not preferred. We have not make sure
# none of the MATCH steps with this location have type bounds, and therefore
# will not produce a corresponding "class:" clause in the resulting MATCH query.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = eligible_location_types.get(current_step_location, None)
if current_type_bound is not None:
# There is a type bound here that we need to neutralize.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
# Record the deduced type bound, so that if we encounter this location again,
# we ensure that we again infer the same type bound.
eligible_location_types[current_step_location] = current_type_bound
if (current_step_location not in coerced_locations or
previous_type_bound is not None):
# The type bound here is already implied by the GraphQL query structure,
# or has already been applied at a previous occurrence of this location.
# We can simply delete the QueryRoot / CoerceType blocks that impart it.
if isinstance(match_step.root_block, QueryRoot):
new_root_block = None
else:
new_root_block = match_step.root_block
new_step = match_step._replace(
root_block=new_root_block, coerce_type_block=None)
else:
# The type bound here is not already implied by the GraphQL query structure.
# This should only be possible via a CoerceType block. Lower this CoerceType
# block into a Filter with INSTANCEOF to ensure the resulting query has the
# same semantics, while making the location invalid as a query start point.
if (isinstance(match_step.root_block, QueryRoot) or
match_step.coerce_type_block is None):
raise AssertionError(u'Unexpected MATCH step applying a type bound not '
u'already implied by the GraphQL query structure: '
u'{} {}'.format(match_step, match_query))
new_where_block = convert_coerce_type_and_add_to_where_block(
match_step.coerce_type_block, match_step.where_block)
new_step = match_step._replace(
coerce_type_block=None, where_block=new_where_block)
else:
# There is no type bound that OrientDB can find defined at this location.
# No action is necessary.
pass
else:
# This location is neither preferred nor eligible.
# No action is necessary at this location.
pass
new_traversal.append(new_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | [
"def",
"_expose_only_preferred_locations",
"(",
"match_query",
",",
"location_types",
",",
"coerced_locations",
",",
"preferred_locations",
",",
"eligible_locations",
")",
":",
"preferred_location_types",
"=",
"dict",
"(",
")",
"eligible_location_types",
"=",
"dict",
"(",... | Return a MATCH query where only preferred locations are valid as query start locations. | [
"Return",
"a",
"MATCH",
"query",
"where",
"only",
"preferred",
"locations",
"are",
"valid",
"as",
"query",
"start",
"locations",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L219-L308 | train | 227,970 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | _expose_all_eligible_locations | def _expose_all_eligible_locations(match_query, location_types, eligible_locations):
"""Return a MATCH query where all eligible locations are valid as query start locations."""
eligible_location_types = dict()
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_step = match_step
current_step_location = match_step.as_block.location
if current_step_location in eligible_locations:
# This location is eligible. We need to make sure it has an associated type bound,
# so that it produces a "class:" clause that will make it a valid query start
# location. It either already has such a type bound, or we can use the type
# implied by the GraphQL query structure to add one.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = eligible_location_types.get(current_step_location, None)
if current_type_bound is None:
current_type_bound = location_types[current_step_location].name
new_coerce_type_block = CoerceType({current_type_bound})
new_step = match_step._replace(coerce_type_block=new_coerce_type_block)
else:
# There is a type bound here. We simply ensure that the bound is not conflicting
# with any other type bound at a different MATCH step with the same location.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
# Record the deduced type bound, so that if we encounter this location again,
# we ensure that we again infer the same type bound.
eligible_location_types[current_step_location] = current_type_bound
else:
# This function may only be called if there are no preferred locations. Since this
# location cannot be preferred, and is not eligible, it must be ineligible.
# No action is necessary in this case.
pass
new_traversal.append(new_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | python | def _expose_all_eligible_locations(match_query, location_types, eligible_locations):
"""Return a MATCH query where all eligible locations are valid as query start locations."""
eligible_location_types = dict()
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_step = match_step
current_step_location = match_step.as_block.location
if current_step_location in eligible_locations:
# This location is eligible. We need to make sure it has an associated type bound,
# so that it produces a "class:" clause that will make it a valid query start
# location. It either already has such a type bound, or we can use the type
# implied by the GraphQL query structure to add one.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = eligible_location_types.get(current_step_location, None)
if current_type_bound is None:
current_type_bound = location_types[current_step_location].name
new_coerce_type_block = CoerceType({current_type_bound})
new_step = match_step._replace(coerce_type_block=new_coerce_type_block)
else:
# There is a type bound here. We simply ensure that the bound is not conflicting
# with any other type bound at a different MATCH step with the same location.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
# Record the deduced type bound, so that if we encounter this location again,
# we ensure that we again infer the same type bound.
eligible_location_types[current_step_location] = current_type_bound
else:
# This function may only be called if there are no preferred locations. Since this
# location cannot be preferred, and is not eligible, it must be ineligible.
# No action is necessary in this case.
pass
new_traversal.append(new_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | [
"def",
"_expose_all_eligible_locations",
"(",
"match_query",
",",
"location_types",
",",
"eligible_locations",
")",
":",
"eligible_location_types",
"=",
"dict",
"(",
")",
"new_match_traversals",
"=",
"[",
"]",
"for",
"current_traversal",
"in",
"match_query",
".",
"mat... | Return a MATCH query where all eligible locations are valid as query start locations. | [
"Return",
"a",
"MATCH",
"query",
"where",
"all",
"eligible",
"locations",
"are",
"valid",
"as",
"query",
"start",
"locations",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L311-L350 | train | 227,971 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | expose_ideal_query_execution_start_points | def expose_ideal_query_execution_start_points(compound_match_query, location_types,
coerced_locations):
"""Ensure that OrientDB only considers desirable query start points in query planning."""
new_queries = []
for match_query in compound_match_query.match_queries:
location_classification = _classify_query_locations(match_query)
preferred_locations, eligible_locations, _ = location_classification
if preferred_locations:
# Convert all eligible locations into non-eligible ones, by removing
# their "class:" clause. The "class:" clause is provided either by having
# a QueryRoot block or a CoerceType block in the MatchStep corresponding
# to the location. We remove it by converting the class check into
# an "INSTANCEOF" Filter block, which OrientDB is unable to optimize away.
new_query = _expose_only_preferred_locations(
match_query, location_types, coerced_locations,
preferred_locations, eligible_locations)
elif eligible_locations:
# Make sure that all eligible locations have a "class:" clause by adding
# a CoerceType block that is a no-op as guaranteed by the schema. This merely
# ensures that OrientDB is able to use each of these locations as a query start point,
# and will choose the one whose class is of lowest cardinality.
new_query = _expose_all_eligible_locations(
match_query, location_types, eligible_locations)
else:
raise AssertionError(u'This query has no preferred or eligible query start locations. '
u'This is almost certainly a bug: {}'.format(match_query))
new_queries.append(new_query)
return compound_match_query._replace(match_queries=new_queries) | python | def expose_ideal_query_execution_start_points(compound_match_query, location_types,
coerced_locations):
"""Ensure that OrientDB only considers desirable query start points in query planning."""
new_queries = []
for match_query in compound_match_query.match_queries:
location_classification = _classify_query_locations(match_query)
preferred_locations, eligible_locations, _ = location_classification
if preferred_locations:
# Convert all eligible locations into non-eligible ones, by removing
# their "class:" clause. The "class:" clause is provided either by having
# a QueryRoot block or a CoerceType block in the MatchStep corresponding
# to the location. We remove it by converting the class check into
# an "INSTANCEOF" Filter block, which OrientDB is unable to optimize away.
new_query = _expose_only_preferred_locations(
match_query, location_types, coerced_locations,
preferred_locations, eligible_locations)
elif eligible_locations:
# Make sure that all eligible locations have a "class:" clause by adding
# a CoerceType block that is a no-op as guaranteed by the schema. This merely
# ensures that OrientDB is able to use each of these locations as a query start point,
# and will choose the one whose class is of lowest cardinality.
new_query = _expose_all_eligible_locations(
match_query, location_types, eligible_locations)
else:
raise AssertionError(u'This query has no preferred or eligible query start locations. '
u'This is almost certainly a bug: {}'.format(match_query))
new_queries.append(new_query)
return compound_match_query._replace(match_queries=new_queries) | [
"def",
"expose_ideal_query_execution_start_points",
"(",
"compound_match_query",
",",
"location_types",
",",
"coerced_locations",
")",
":",
"new_queries",
"=",
"[",
"]",
"for",
"match_query",
"in",
"compound_match_query",
".",
"match_queries",
":",
"location_classification"... | Ensure that OrientDB only considers desirable query start points in query planning. | [
"Ensure",
"that",
"OrientDB",
"only",
"considers",
"desirable",
"query",
"start",
"points",
"in",
"query",
"planning",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L353-L384 | train | 227,972 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/between_lowering.py | _expression_list_to_conjunction | def _expression_list_to_conjunction(expression_list):
"""Return an Expression that is the `&&` of all the expressions in the given list."""
if not isinstance(expression_list, list):
raise AssertionError(u'Expected list. Received {}: '
u'{}'.format(type(expression_list).__name__, expression_list))
if len(expression_list) == 0:
raise AssertionError(u'Received empty expression_list '
u'(function should never be called with empty list): '
u'{}'.format(expression_list))
elif len(expression_list) == 1:
return expression_list[0]
else:
remaining_conjunction = _expression_list_to_conjunction(expression_list[1:])
return BinaryComposition(u'&&', expression_list[0], remaining_conjunction) | python | def _expression_list_to_conjunction(expression_list):
"""Return an Expression that is the `&&` of all the expressions in the given list."""
if not isinstance(expression_list, list):
raise AssertionError(u'Expected list. Received {}: '
u'{}'.format(type(expression_list).__name__, expression_list))
if len(expression_list) == 0:
raise AssertionError(u'Received empty expression_list '
u'(function should never be called with empty list): '
u'{}'.format(expression_list))
elif len(expression_list) == 1:
return expression_list[0]
else:
remaining_conjunction = _expression_list_to_conjunction(expression_list[1:])
return BinaryComposition(u'&&', expression_list[0], remaining_conjunction) | [
"def",
"_expression_list_to_conjunction",
"(",
"expression_list",
")",
":",
"if",
"not",
"isinstance",
"(",
"expression_list",
",",
"list",
")",
":",
"raise",
"AssertionError",
"(",
"u'Expected list. Received {}: '",
"u'{}'",
".",
"format",
"(",
"type",
"(",
"expres... | Return an Expression that is the `&&` of all the expressions in the given list. | [
"Return",
"an",
"Expression",
"that",
"is",
"the",
"&&",
"of",
"all",
"the",
"expressions",
"in",
"the",
"given",
"list",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/between_lowering.py#L9-L22 | train | 227,973 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/between_lowering.py | _extract_conjuction_elements_from_expression | def _extract_conjuction_elements_from_expression(expression):
"""Return a generator for expressions that are connected by `&&`s in the given expression."""
if isinstance(expression, BinaryComposition) and expression.operator == u'&&':
for element in _extract_conjuction_elements_from_expression(expression.left):
yield element
for element in _extract_conjuction_elements_from_expression(expression.right):
yield element
else:
yield expression | python | def _extract_conjuction_elements_from_expression(expression):
"""Return a generator for expressions that are connected by `&&`s in the given expression."""
if isinstance(expression, BinaryComposition) and expression.operator == u'&&':
for element in _extract_conjuction_elements_from_expression(expression.left):
yield element
for element in _extract_conjuction_elements_from_expression(expression.right):
yield element
else:
yield expression | [
"def",
"_extract_conjuction_elements_from_expression",
"(",
"expression",
")",
":",
"if",
"isinstance",
"(",
"expression",
",",
"BinaryComposition",
")",
"and",
"expression",
".",
"operator",
"==",
"u'&&'",
":",
"for",
"element",
"in",
"_extract_conjuction_elements_from... | Return a generator for expressions that are connected by `&&`s in the given expression. | [
"Return",
"a",
"generator",
"for",
"expressions",
"that",
"are",
"connected",
"by",
"&&",
"s",
"in",
"the",
"given",
"expression",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/between_lowering.py#L25-L33 | train | 227,974 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/between_lowering.py | _construct_field_operator_expression_dict | def _construct_field_operator_expression_dict(expression_list):
"""Construct a mapping from local fields to specified operators, and corresponding expressions.
Args:
expression_list: list of expressions to analyze
Returns:
local_field_to_expressions:
dict mapping local field names to "operator -> list of BinaryComposition" dictionaries,
for each BinaryComposition operator involving the LocalField
remaining_expression_list:
list of remaining expressions that were *not*
BinaryCompositions on a LocalField using any of the between operators
"""
between_operators = (u'<=', u'>=')
inverse_operator = {u'>=': u'<=', u'<=': u'>='}
local_field_to_expressions = {}
remaining_expression_list = deque([])
for expression in expression_list:
if all((
isinstance(expression, BinaryComposition),
expression.operator in between_operators,
isinstance(expression.left, LocalField) or isinstance(expression.right, LocalField)
)):
if isinstance(expression.right, LocalField):
new_operator = inverse_operator[expression.operator]
new_expression = BinaryComposition(new_operator, expression.right, expression.left)
else:
new_expression = expression
field_name = new_expression.left.field_name
expressions_dict = local_field_to_expressions.setdefault(field_name, {})
expressions_dict.setdefault(new_expression.operator, []).append(new_expression)
else:
remaining_expression_list.append(expression)
return local_field_to_expressions, remaining_expression_list | python | def _construct_field_operator_expression_dict(expression_list):
"""Construct a mapping from local fields to specified operators, and corresponding expressions.
Args:
expression_list: list of expressions to analyze
Returns:
local_field_to_expressions:
dict mapping local field names to "operator -> list of BinaryComposition" dictionaries,
for each BinaryComposition operator involving the LocalField
remaining_expression_list:
list of remaining expressions that were *not*
BinaryCompositions on a LocalField using any of the between operators
"""
between_operators = (u'<=', u'>=')
inverse_operator = {u'>=': u'<=', u'<=': u'>='}
local_field_to_expressions = {}
remaining_expression_list = deque([])
for expression in expression_list:
if all((
isinstance(expression, BinaryComposition),
expression.operator in between_operators,
isinstance(expression.left, LocalField) or isinstance(expression.right, LocalField)
)):
if isinstance(expression.right, LocalField):
new_operator = inverse_operator[expression.operator]
new_expression = BinaryComposition(new_operator, expression.right, expression.left)
else:
new_expression = expression
field_name = new_expression.left.field_name
expressions_dict = local_field_to_expressions.setdefault(field_name, {})
expressions_dict.setdefault(new_expression.operator, []).append(new_expression)
else:
remaining_expression_list.append(expression)
return local_field_to_expressions, remaining_expression_list | [
"def",
"_construct_field_operator_expression_dict",
"(",
"expression_list",
")",
":",
"between_operators",
"=",
"(",
"u'<='",
",",
"u'>='",
")",
"inverse_operator",
"=",
"{",
"u'>='",
":",
"u'<='",
",",
"u'<='",
":",
"u'>='",
"}",
"local_field_to_expressions",
"=",
... | Construct a mapping from local fields to specified operators, and corresponding expressions.
Args:
expression_list: list of expressions to analyze
Returns:
local_field_to_expressions:
dict mapping local field names to "operator -> list of BinaryComposition" dictionaries,
for each BinaryComposition operator involving the LocalField
remaining_expression_list:
list of remaining expressions that were *not*
BinaryCompositions on a LocalField using any of the between operators | [
"Construct",
"a",
"mapping",
"from",
"local",
"fields",
"to",
"specified",
"operators",
"and",
"corresponding",
"expressions",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/between_lowering.py#L36-L70 | train | 227,975 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/between_lowering.py | _lower_expressions_to_between | def _lower_expressions_to_between(base_expression):
"""Return a new expression, with any eligible comparisons lowered to `between` clauses."""
expression_list = list(_extract_conjuction_elements_from_expression(base_expression))
if len(expression_list) == 0:
raise AssertionError(u'Received empty expression_list {} from base_expression: '
u'{}'.format(expression_list, base_expression))
elif len(expression_list) == 1:
return base_expression
else:
between_operators = (u'<=', u'>=')
local_field_to_expressions, new_expression_list = _construct_field_operator_expression_dict(
expression_list)
lowering_occurred = False
for field_name in local_field_to_expressions:
expressions_dict = local_field_to_expressions[field_name]
if all(operator in expressions_dict and len(expressions_dict[operator]) == 1
for operator in between_operators):
field = LocalField(field_name)
lower_bound = expressions_dict[u'>='][0].right
upper_bound = expressions_dict[u'<='][0].right
new_expression_list.appendleft(BetweenClause(field, lower_bound, upper_bound))
lowering_occurred = True
else:
for expression in expressions_dict.values():
new_expression_list.extend(expression)
if lowering_occurred:
return _expression_list_to_conjunction(list(new_expression_list))
else:
return base_expression | python | def _lower_expressions_to_between(base_expression):
"""Return a new expression, with any eligible comparisons lowered to `between` clauses."""
expression_list = list(_extract_conjuction_elements_from_expression(base_expression))
if len(expression_list) == 0:
raise AssertionError(u'Received empty expression_list {} from base_expression: '
u'{}'.format(expression_list, base_expression))
elif len(expression_list) == 1:
return base_expression
else:
between_operators = (u'<=', u'>=')
local_field_to_expressions, new_expression_list = _construct_field_operator_expression_dict(
expression_list)
lowering_occurred = False
for field_name in local_field_to_expressions:
expressions_dict = local_field_to_expressions[field_name]
if all(operator in expressions_dict and len(expressions_dict[operator]) == 1
for operator in between_operators):
field = LocalField(field_name)
lower_bound = expressions_dict[u'>='][0].right
upper_bound = expressions_dict[u'<='][0].right
new_expression_list.appendleft(BetweenClause(field, lower_bound, upper_bound))
lowering_occurred = True
else:
for expression in expressions_dict.values():
new_expression_list.extend(expression)
if lowering_occurred:
return _expression_list_to_conjunction(list(new_expression_list))
else:
return base_expression | [
"def",
"_lower_expressions_to_between",
"(",
"base_expression",
")",
":",
"expression_list",
"=",
"list",
"(",
"_extract_conjuction_elements_from_expression",
"(",
"base_expression",
")",
")",
"if",
"len",
"(",
"expression_list",
")",
"==",
"0",
":",
"raise",
"Asserti... | Return a new expression, with any eligible comparisons lowered to `between` clauses. | [
"Return",
"a",
"new",
"expression",
"with",
"any",
"eligible",
"comparisons",
"lowered",
"to",
"between",
"clauses",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/between_lowering.py#L73-L103 | train | 227,976 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/between_lowering.py | lower_comparisons_to_between | def lower_comparisons_to_between(match_query):
"""Return a new MatchQuery, with all eligible comparison filters lowered to between clauses."""
new_match_traversals = []
for current_match_traversal in match_query.match_traversals:
new_traversal = []
for step in current_match_traversal:
if step.where_block:
expression = step.where_block.predicate
new_where_block = Filter(_lower_expressions_to_between(expression))
new_traversal.append(step._replace(where_block=new_where_block))
else:
new_traversal.append(step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | python | def lower_comparisons_to_between(match_query):
"""Return a new MatchQuery, with all eligible comparison filters lowered to between clauses."""
new_match_traversals = []
for current_match_traversal in match_query.match_traversals:
new_traversal = []
for step in current_match_traversal:
if step.where_block:
expression = step.where_block.predicate
new_where_block = Filter(_lower_expressions_to_between(expression))
new_traversal.append(step._replace(where_block=new_where_block))
else:
new_traversal.append(step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | [
"def",
"lower_comparisons_to_between",
"(",
"match_query",
")",
":",
"new_match_traversals",
"=",
"[",
"]",
"for",
"current_match_traversal",
"in",
"match_query",
".",
"match_traversals",
":",
"new_traversal",
"=",
"[",
"]",
"for",
"step",
"in",
"current_match_travers... | Return a new MatchQuery, with all eligible comparison filters lowered to between clauses. | [
"Return",
"a",
"new",
"MatchQuery",
"with",
"all",
"eligible",
"comparison",
"filters",
"lowered",
"to",
"between",
"clauses",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/between_lowering.py#L106-L122 | train | 227,977 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/common.py | _ensure_arguments_are_provided | def _ensure_arguments_are_provided(expected_types, arguments):
"""Ensure that all arguments expected by the query were actually provided."""
# This function only checks that the arguments were specified,
# and does not check types. Type checking is done as part of the actual formatting step.
expected_arg_names = set(six.iterkeys(expected_types))
provided_arg_names = set(six.iterkeys(arguments))
if expected_arg_names != provided_arg_names:
missing_args = expected_arg_names - provided_arg_names
unexpected_args = provided_arg_names - expected_arg_names
raise GraphQLInvalidArgumentError(u'Missing or unexpected arguments found: '
u'missing {}, unexpected '
u'{}'.format(missing_args, unexpected_args)) | python | def _ensure_arguments_are_provided(expected_types, arguments):
"""Ensure that all arguments expected by the query were actually provided."""
# This function only checks that the arguments were specified,
# and does not check types. Type checking is done as part of the actual formatting step.
expected_arg_names = set(six.iterkeys(expected_types))
provided_arg_names = set(six.iterkeys(arguments))
if expected_arg_names != provided_arg_names:
missing_args = expected_arg_names - provided_arg_names
unexpected_args = provided_arg_names - expected_arg_names
raise GraphQLInvalidArgumentError(u'Missing or unexpected arguments found: '
u'missing {}, unexpected '
u'{}'.format(missing_args, unexpected_args)) | [
"def",
"_ensure_arguments_are_provided",
"(",
"expected_types",
",",
"arguments",
")",
":",
"# This function only checks that the arguments were specified,",
"# and does not check types. Type checking is done as part of the actual formatting step.",
"expected_arg_names",
"=",
"set",
"(",
... | Ensure that all arguments expected by the query were actually provided. | [
"Ensure",
"that",
"all",
"arguments",
"expected",
"by",
"the",
"query",
"were",
"actually",
"provided",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/common.py#L12-L24 | train | 227,978 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/common.py | insert_arguments_into_query | def insert_arguments_into_query(compilation_result, arguments):
"""Insert the arguments into the compiled GraphQL query to form a complete query.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a query in the appropriate output language, with inserted argument data
"""
_ensure_arguments_are_provided(compilation_result.input_metadata, arguments)
if compilation_result.language == MATCH_LANGUAGE:
return insert_arguments_into_match_query(compilation_result, arguments)
elif compilation_result.language == GREMLIN_LANGUAGE:
return insert_arguments_into_gremlin_query(compilation_result, arguments)
elif compilation_result.language == SQL_LANGUAGE:
return insert_arguments_into_sql_query(compilation_result, arguments)
else:
raise AssertionError(u'Unrecognized language in compilation result: '
u'{}'.format(compilation_result)) | python | def insert_arguments_into_query(compilation_result, arguments):
"""Insert the arguments into the compiled GraphQL query to form a complete query.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a query in the appropriate output language, with inserted argument data
"""
_ensure_arguments_are_provided(compilation_result.input_metadata, arguments)
if compilation_result.language == MATCH_LANGUAGE:
return insert_arguments_into_match_query(compilation_result, arguments)
elif compilation_result.language == GREMLIN_LANGUAGE:
return insert_arguments_into_gremlin_query(compilation_result, arguments)
elif compilation_result.language == SQL_LANGUAGE:
return insert_arguments_into_sql_query(compilation_result, arguments)
else:
raise AssertionError(u'Unrecognized language in compilation result: '
u'{}'.format(compilation_result)) | [
"def",
"insert_arguments_into_query",
"(",
"compilation_result",
",",
"arguments",
")",
":",
"_ensure_arguments_are_provided",
"(",
"compilation_result",
".",
"input_metadata",
",",
"arguments",
")",
"if",
"compilation_result",
".",
"language",
"==",
"MATCH_LANGUAGE",
":"... | Insert the arguments into the compiled GraphQL query to form a complete query.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a query in the appropriate output language, with inserted argument data | [
"Insert",
"the",
"arguments",
"into",
"the",
"compiled",
"GraphQL",
"query",
"to",
"form",
"a",
"complete",
"query",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/common.py#L31-L51 | train | 227,979 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | QueryRoot.validate | def validate(self):
"""Ensure that the QueryRoot block is valid."""
if not (isinstance(self.start_class, set) and
all(isinstance(x, six.string_types) for x in self.start_class)):
raise TypeError(u'Expected set of string start_class, got: {} {}'.format(
type(self.start_class).__name__, self.start_class))
for cls in self.start_class:
validate_safe_string(cls) | python | def validate(self):
"""Ensure that the QueryRoot block is valid."""
if not (isinstance(self.start_class, set) and
all(isinstance(x, six.string_types) for x in self.start_class)):
raise TypeError(u'Expected set of string start_class, got: {} {}'.format(
type(self.start_class).__name__, self.start_class))
for cls in self.start_class:
validate_safe_string(cls) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"(",
"isinstance",
"(",
"self",
".",
"start_class",
",",
"set",
")",
"and",
"all",
"(",
"isinstance",
"(",
"x",
",",
"six",
".",
"string_types",
")",
"for",
"x",
"in",
"self",
".",
"start_class",... | Ensure that the QueryRoot block is valid. | [
"Ensure",
"that",
"the",
"QueryRoot",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L34-L42 | train | 227,980 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | CoerceType.validate | def validate(self):
"""Ensure that the CoerceType block is valid."""
if not (isinstance(self.target_class, set) and
all(isinstance(x, six.string_types) for x in self.target_class)):
raise TypeError(u'Expected set of string target_class, got: {} {}'.format(
type(self.target_class).__name__, self.target_class))
for cls in self.target_class:
validate_safe_string(cls) | python | def validate(self):
"""Ensure that the CoerceType block is valid."""
if not (isinstance(self.target_class, set) and
all(isinstance(x, six.string_types) for x in self.target_class)):
raise TypeError(u'Expected set of string target_class, got: {} {}'.format(
type(self.target_class).__name__, self.target_class))
for cls in self.target_class:
validate_safe_string(cls) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"(",
"isinstance",
"(",
"self",
".",
"target_class",
",",
"set",
")",
"and",
"all",
"(",
"isinstance",
"(",
"x",
",",
"six",
".",
"string_types",
")",
"for",
"x",
"in",
"self",
".",
"target_class... | Ensure that the CoerceType block is valid. | [
"Ensure",
"that",
"the",
"CoerceType",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L79-L87 | train | 227,981 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | ConstructResult.validate | def validate(self):
"""Ensure that the ConstructResult block is valid."""
if not isinstance(self.fields, dict):
raise TypeError(u'Expected dict fields, got: {} {}'.format(
type(self.fields).__name__, self.fields))
for key, value in six.iteritems(self.fields):
validate_safe_string(key)
if not isinstance(value, Expression):
raise TypeError(
u'Expected Expression values in the fields dict, got: '
u'{} -> {}'.format(key, value)) | python | def validate(self):
"""Ensure that the ConstructResult block is valid."""
if not isinstance(self.fields, dict):
raise TypeError(u'Expected dict fields, got: {} {}'.format(
type(self.fields).__name__, self.fields))
for key, value in six.iteritems(self.fields):
validate_safe_string(key)
if not isinstance(value, Expression):
raise TypeError(
u'Expected Expression values in the fields dict, got: '
u'{} -> {}'.format(key, value)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"fields",
",",
"dict",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected dict fields, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"fields",
")",
".",
"_... | Ensure that the ConstructResult block is valid. | [
"Ensure",
"that",
"the",
"ConstructResult",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L120-L131 | train | 227,982 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | Filter.validate | def validate(self):
"""Ensure that the Filter block is valid."""
if not isinstance(self.predicate, Expression):
raise TypeError(u'Expected Expression predicate, got: {} {}'.format(
type(self.predicate).__name__, self.predicate)) | python | def validate(self):
"""Ensure that the Filter block is valid."""
if not isinstance(self.predicate, Expression):
raise TypeError(u'Expected Expression predicate, got: {} {}'.format(
type(self.predicate).__name__, self.predicate)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"predicate",
",",
"Expression",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected Expression predicate, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"predicate... | Ensure that the Filter block is valid. | [
"Ensure",
"that",
"the",
"Filter",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L174-L178 | train | 227,983 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | Backtrack.validate | def validate(self):
"""Ensure that the Backtrack block is valid."""
validate_marked_location(self.location)
if not isinstance(self.optional, bool):
raise TypeError(u'Expected bool optional, got: {} {}'.format(
type(self.optional).__name__, self.optional)) | python | def validate(self):
"""Ensure that the Backtrack block is valid."""
validate_marked_location(self.location)
if not isinstance(self.optional, bool):
raise TypeError(u'Expected bool optional, got: {} {}'.format(
type(self.optional).__name__, self.optional)) | [
"def",
"validate",
"(",
"self",
")",
":",
"validate_marked_location",
"(",
"self",
".",
"location",
")",
"if",
"not",
"isinstance",
"(",
"self",
".",
"optional",
",",
"bool",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected bool optional, got: {} {}'",
".",
"f... | Ensure that the Backtrack block is valid. | [
"Ensure",
"that",
"the",
"Backtrack",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L395-L400 | train | 227,984 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | Backtrack.to_gremlin | def to_gremlin(self):
"""Return a unicode object with the Gremlin representation of this BasicBlock."""
self.validate()
if self.optional:
operation = u'optional'
else:
operation = u'back'
mark_name, _ = self.location.get_location_name()
return u'{operation}({mark_name})'.format(
operation=operation,
mark_name=safe_quoted_string(mark_name)) | python | def to_gremlin(self):
"""Return a unicode object with the Gremlin representation of this BasicBlock."""
self.validate()
if self.optional:
operation = u'optional'
else:
operation = u'back'
mark_name, _ = self.location.get_location_name()
return u'{operation}({mark_name})'.format(
operation=operation,
mark_name=safe_quoted_string(mark_name)) | [
"def",
"to_gremlin",
"(",
"self",
")",
":",
"self",
".",
"validate",
"(",
")",
"if",
"self",
".",
"optional",
":",
"operation",
"=",
"u'optional'",
"else",
":",
"operation",
"=",
"u'back'",
"mark_name",
",",
"_",
"=",
"self",
".",
"location",
".",
"get... | Return a unicode object with the Gremlin representation of this BasicBlock. | [
"Return",
"a",
"unicode",
"object",
"with",
"the",
"Gremlin",
"representation",
"of",
"this",
"BasicBlock",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L402-L414 | train | 227,985 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | Fold.validate | def validate(self):
"""Ensure the Fold block is valid."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected a FoldScopeLocation for fold_scope_location, got: {} '
u'{}'.format(type(self.fold_scope_location), self.fold_scope_location)) | python | def validate(self):
"""Ensure the Fold block is valid."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected a FoldScopeLocation for fold_scope_location, got: {} '
u'{}'.format(type(self.fold_scope_location), self.fold_scope_location)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"fold_scope_location",
",",
"FoldScopeLocation",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected a FoldScopeLocation for fold_scope_location, got: {} '",
"u'{}'",
".",
"format",
"(... | Ensure the Fold block is valid. | [
"Ensure",
"the",
"Fold",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L446-L450 | train | 227,986 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | lower_ir | def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None):
"""Lower the IR blocks into a form that can be represented by a SQL query.
Args:
ir_blocks: list of IR blocks to lower into SQL-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
tree representation of IR blocks for recursive traversal by SQL backend.
"""
_validate_all_blocks_supported(ir_blocks, query_metadata_table)
construct_result = _get_construct_result(ir_blocks)
query_path_to_location_info = _map_query_path_to_location_info(query_metadata_table)
query_path_to_output_fields = _map_query_path_to_outputs(
construct_result, query_path_to_location_info)
block_index_to_location = _map_block_index_to_location(ir_blocks)
# perform lowering steps
ir_blocks = lower_unary_transformations(ir_blocks)
ir_blocks = lower_unsupported_metafield_expressions(ir_blocks)
# iteratively construct SqlTree
query_path_to_node = {}
query_path_to_filters = {}
tree_root = None
for index, block in enumerate(ir_blocks):
if isinstance(block, constants.SKIPPABLE_BLOCK_TYPES):
continue
location = block_index_to_location[index]
if isinstance(block, (blocks.QueryRoot,)):
query_path = location.query_path
if tree_root is not None:
raise AssertionError(
u'Encountered QueryRoot {} but tree root is already set to {} during '
u'construction of SQL query tree for IR blocks {} with query '
u'metadata table {}'.format(
block, tree_root, ir_blocks, query_metadata_table))
tree_root = SqlNode(block=block, query_path=query_path)
query_path_to_node[query_path] = tree_root
elif isinstance(block, blocks.Filter):
query_path_to_filters.setdefault(query_path, []).append(block)
else:
raise AssertionError(
u'Unsupported block {} unexpectedly passed validation for IR blocks '
u'{} with query metadata table {} .'.format(block, ir_blocks, query_metadata_table))
return SqlQueryTree(tree_root, query_path_to_location_info, query_path_to_output_fields,
query_path_to_filters, query_path_to_node) | python | def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None):
"""Lower the IR blocks into a form that can be represented by a SQL query.
Args:
ir_blocks: list of IR blocks to lower into SQL-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
tree representation of IR blocks for recursive traversal by SQL backend.
"""
_validate_all_blocks_supported(ir_blocks, query_metadata_table)
construct_result = _get_construct_result(ir_blocks)
query_path_to_location_info = _map_query_path_to_location_info(query_metadata_table)
query_path_to_output_fields = _map_query_path_to_outputs(
construct_result, query_path_to_location_info)
block_index_to_location = _map_block_index_to_location(ir_blocks)
# perform lowering steps
ir_blocks = lower_unary_transformations(ir_blocks)
ir_blocks = lower_unsupported_metafield_expressions(ir_blocks)
# iteratively construct SqlTree
query_path_to_node = {}
query_path_to_filters = {}
tree_root = None
for index, block in enumerate(ir_blocks):
if isinstance(block, constants.SKIPPABLE_BLOCK_TYPES):
continue
location = block_index_to_location[index]
if isinstance(block, (blocks.QueryRoot,)):
query_path = location.query_path
if tree_root is not None:
raise AssertionError(
u'Encountered QueryRoot {} but tree root is already set to {} during '
u'construction of SQL query tree for IR blocks {} with query '
u'metadata table {}'.format(
block, tree_root, ir_blocks, query_metadata_table))
tree_root = SqlNode(block=block, query_path=query_path)
query_path_to_node[query_path] = tree_root
elif isinstance(block, blocks.Filter):
query_path_to_filters.setdefault(query_path, []).append(block)
else:
raise AssertionError(
u'Unsupported block {} unexpectedly passed validation for IR blocks '
u'{} with query metadata table {} .'.format(block, ir_blocks, query_metadata_table))
return SqlQueryTree(tree_root, query_path_to_location_info, query_path_to_output_fields,
query_path_to_filters, query_path_to_node) | [
"def",
"lower_ir",
"(",
"ir_blocks",
",",
"query_metadata_table",
",",
"type_equivalence_hints",
"=",
"None",
")",
":",
"_validate_all_blocks_supported",
"(",
"ir_blocks",
",",
"query_metadata_table",
")",
"construct_result",
"=",
"_get_construct_result",
"(",
"ir_blocks"... | Lower the IR blocks into a form that can be represented by a SQL query.
Args:
ir_blocks: list of IR blocks to lower into SQL-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
tree representation of IR blocks for recursive traversal by SQL backend. | [
"Lower",
"the",
"IR",
"blocks",
"into",
"a",
"form",
"that",
"can",
"be",
"represented",
"by",
"a",
"SQL",
"query",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L17-L81 | train | 227,987 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _validate_all_blocks_supported | def _validate_all_blocks_supported(ir_blocks, query_metadata_table):
"""Validate that all IR blocks and ConstructResult fields passed to the backend are supported.
Args:
ir_blocks: List[BasicBlock], IR blocks to validate.
query_metadata_table: QueryMetadataTable, object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
Raises:
NotImplementedError, if any block or ConstructResult field is unsupported.
"""
if len(ir_blocks) < 3:
raise AssertionError(
u'Unexpectedly attempting to validate IR blocks with fewer than 3 blocks. A minimal '
u'query is expected to have at least a QueryRoot, GlobalOperationsStart, and '
u'ConstructResult block. The query metadata table is {}.'.format(query_metadata_table))
construct_result = _get_construct_result(ir_blocks)
unsupported_blocks = []
unsupported_fields = []
for block in ir_blocks[:-1]:
if isinstance(block, constants.SUPPORTED_BLOCK_TYPES):
continue
if isinstance(block, constants.SKIPPABLE_BLOCK_TYPES):
continue
unsupported_blocks.append(block)
for field_name, field in six.iteritems(construct_result.fields):
if not isinstance(field, constants.SUPPORTED_OUTPUT_EXPRESSION_TYPES):
unsupported_fields.append((field_name, field))
elif field.location.field in constants.UNSUPPORTED_META_FIELDS:
unsupported_fields.append((field_name, field))
if len(unsupported_blocks) > 0 or len(unsupported_fields) > 0:
raise NotImplementedError(
u'Encountered unsupported blocks {} and unsupported fields {} during construction of '
u'SQL query tree for IR blocks {} with query metadata table {}.'.format(
unsupported_blocks, unsupported_fields, ir_blocks, query_metadata_table)) | python | def _validate_all_blocks_supported(ir_blocks, query_metadata_table):
"""Validate that all IR blocks and ConstructResult fields passed to the backend are supported.
Args:
ir_blocks: List[BasicBlock], IR blocks to validate.
query_metadata_table: QueryMetadataTable, object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
Raises:
NotImplementedError, if any block or ConstructResult field is unsupported.
"""
if len(ir_blocks) < 3:
raise AssertionError(
u'Unexpectedly attempting to validate IR blocks with fewer than 3 blocks. A minimal '
u'query is expected to have at least a QueryRoot, GlobalOperationsStart, and '
u'ConstructResult block. The query metadata table is {}.'.format(query_metadata_table))
construct_result = _get_construct_result(ir_blocks)
unsupported_blocks = []
unsupported_fields = []
for block in ir_blocks[:-1]:
if isinstance(block, constants.SUPPORTED_BLOCK_TYPES):
continue
if isinstance(block, constants.SKIPPABLE_BLOCK_TYPES):
continue
unsupported_blocks.append(block)
for field_name, field in six.iteritems(construct_result.fields):
if not isinstance(field, constants.SUPPORTED_OUTPUT_EXPRESSION_TYPES):
unsupported_fields.append((field_name, field))
elif field.location.field in constants.UNSUPPORTED_META_FIELDS:
unsupported_fields.append((field_name, field))
if len(unsupported_blocks) > 0 or len(unsupported_fields) > 0:
raise NotImplementedError(
u'Encountered unsupported blocks {} and unsupported fields {} during construction of '
u'SQL query tree for IR blocks {} with query metadata table {}.'.format(
unsupported_blocks, unsupported_fields, ir_blocks, query_metadata_table)) | [
"def",
"_validate_all_blocks_supported",
"(",
"ir_blocks",
",",
"query_metadata_table",
")",
":",
"if",
"len",
"(",
"ir_blocks",
")",
"<",
"3",
":",
"raise",
"AssertionError",
"(",
"u'Unexpectedly attempting to validate IR blocks with fewer than 3 blocks. A minimal '",
"u'que... | Validate that all IR blocks and ConstructResult fields passed to the backend are supported.
Args:
ir_blocks: List[BasicBlock], IR blocks to validate.
query_metadata_table: QueryMetadataTable, object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
Raises:
NotImplementedError, if any block or ConstructResult field is unsupported. | [
"Validate",
"that",
"all",
"IR",
"blocks",
"and",
"ConstructResult",
"fields",
"passed",
"to",
"the",
"backend",
"are",
"supported",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L84-L121 | train | 227,988 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _get_construct_result | def _get_construct_result(ir_blocks):
"""Return the ConstructResult block from a list of IR blocks."""
last_block = ir_blocks[-1]
if not isinstance(last_block, blocks.ConstructResult):
raise AssertionError(
u'The last IR block {} for IR blocks {} was unexpectedly not '
u'a ConstructResult block.'.format(last_block, ir_blocks))
return last_block | python | def _get_construct_result(ir_blocks):
"""Return the ConstructResult block from a list of IR blocks."""
last_block = ir_blocks[-1]
if not isinstance(last_block, blocks.ConstructResult):
raise AssertionError(
u'The last IR block {} for IR blocks {} was unexpectedly not '
u'a ConstructResult block.'.format(last_block, ir_blocks))
return last_block | [
"def",
"_get_construct_result",
"(",
"ir_blocks",
")",
":",
"last_block",
"=",
"ir_blocks",
"[",
"-",
"1",
"]",
"if",
"not",
"isinstance",
"(",
"last_block",
",",
"blocks",
".",
"ConstructResult",
")",
":",
"raise",
"AssertionError",
"(",
"u'The last IR block {}... | Return the ConstructResult block from a list of IR blocks. | [
"Return",
"the",
"ConstructResult",
"block",
"from",
"a",
"list",
"of",
"IR",
"blocks",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L124-L131 | train | 227,989 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _map_query_path_to_location_info | def _map_query_path_to_location_info(query_metadata_table):
"""Create a map from each query path to a LocationInfo at that path.
Args:
query_metadata_table: QueryMetadataTable, object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
Returns:
Dict[Tuple[str], LocationInfo], dictionary mapping query path to LocationInfo at that path.
"""
query_path_to_location_info = {}
for location, location_info in query_metadata_table.registered_locations:
if not isinstance(location, Location):
continue
if location.query_path in query_path_to_location_info:
# make sure the stored location information equals the new location information
# for the fields the SQL backend requires.
equivalent_location_info = query_path_to_location_info[location.query_path]
if not _location_infos_equal(location_info, equivalent_location_info):
raise AssertionError(
u'Differing LocationInfos at query_path {} between {} and {}. Expected '
u'parent_location.query_path, optional_scopes_depth, recursive_scopes_depth '
u'and types to be equal for LocationInfos sharing the same query path.'.format(
location.query_path, location_info, equivalent_location_info))
query_path_to_location_info[location.query_path] = location_info
return query_path_to_location_info | python | def _map_query_path_to_location_info(query_metadata_table):
"""Create a map from each query path to a LocationInfo at that path.
Args:
query_metadata_table: QueryMetadataTable, object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
Returns:
Dict[Tuple[str], LocationInfo], dictionary mapping query path to LocationInfo at that path.
"""
query_path_to_location_info = {}
for location, location_info in query_metadata_table.registered_locations:
if not isinstance(location, Location):
continue
if location.query_path in query_path_to_location_info:
# make sure the stored location information equals the new location information
# for the fields the SQL backend requires.
equivalent_location_info = query_path_to_location_info[location.query_path]
if not _location_infos_equal(location_info, equivalent_location_info):
raise AssertionError(
u'Differing LocationInfos at query_path {} between {} and {}. Expected '
u'parent_location.query_path, optional_scopes_depth, recursive_scopes_depth '
u'and types to be equal for LocationInfos sharing the same query path.'.format(
location.query_path, location_info, equivalent_location_info))
query_path_to_location_info[location.query_path] = location_info
return query_path_to_location_info | [
"def",
"_map_query_path_to_location_info",
"(",
"query_metadata_table",
")",
":",
"query_path_to_location_info",
"=",
"{",
"}",
"for",
"location",
",",
"location_info",
"in",
"query_metadata_table",
".",
"registered_locations",
":",
"if",
"not",
"isinstance",
"(",
"loca... | Create a map from each query path to a LocationInfo at that path.
Args:
query_metadata_table: QueryMetadataTable, object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
Returns:
Dict[Tuple[str], LocationInfo], dictionary mapping query path to LocationInfo at that path. | [
"Create",
"a",
"map",
"from",
"each",
"query",
"path",
"to",
"a",
"LocationInfo",
"at",
"that",
"path",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L134-L161 | train | 227,990 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _location_infos_equal | def _location_infos_equal(left, right):
"""Return True if LocationInfo objects are equivalent for the SQL backend, False otherwise.
LocationInfo objects are considered equal for the SQL backend iff the optional scopes depth,
recursive scopes depth, types and parent query paths are equal.
Args:
left: LocationInfo, left location info object to compare.
right: LocationInfo, right location info object to compare.
Returns:
bool, True if LocationInfo objects equivalent, False otherwise.
"""
if not isinstance(left, LocationInfo) or not isinstance(right, LocationInfo):
raise AssertionError(
u'Unsupported LocationInfo comparison between types {} and {} '
u'with values {}, {}'.format(type(left), type(right), left, right))
optional_scopes_depth_equal = (left.optional_scopes_depth == right.optional_scopes_depth)
parent_query_paths_equal = (
(left.parent_location is None and right.parent_location is None) or
(left.parent_location.query_path == right.parent_location.query_path))
recursive_scopes_depths_equal = (left.recursive_scopes_depth == right.recursive_scopes_depth)
types_equal = left.type == right.type
return all([
optional_scopes_depth_equal,
parent_query_paths_equal,
recursive_scopes_depths_equal,
types_equal,
]) | python | def _location_infos_equal(left, right):
"""Return True if LocationInfo objects are equivalent for the SQL backend, False otherwise.
LocationInfo objects are considered equal for the SQL backend iff the optional scopes depth,
recursive scopes depth, types and parent query paths are equal.
Args:
left: LocationInfo, left location info object to compare.
right: LocationInfo, right location info object to compare.
Returns:
bool, True if LocationInfo objects equivalent, False otherwise.
"""
if not isinstance(left, LocationInfo) or not isinstance(right, LocationInfo):
raise AssertionError(
u'Unsupported LocationInfo comparison between types {} and {} '
u'with values {}, {}'.format(type(left), type(right), left, right))
optional_scopes_depth_equal = (left.optional_scopes_depth == right.optional_scopes_depth)
parent_query_paths_equal = (
(left.parent_location is None and right.parent_location is None) or
(left.parent_location.query_path == right.parent_location.query_path))
recursive_scopes_depths_equal = (left.recursive_scopes_depth == right.recursive_scopes_depth)
types_equal = left.type == right.type
return all([
optional_scopes_depth_equal,
parent_query_paths_equal,
recursive_scopes_depths_equal,
types_equal,
]) | [
"def",
"_location_infos_equal",
"(",
"left",
",",
"right",
")",
":",
"if",
"not",
"isinstance",
"(",
"left",
",",
"LocationInfo",
")",
"or",
"not",
"isinstance",
"(",
"right",
",",
"LocationInfo",
")",
":",
"raise",
"AssertionError",
"(",
"u'Unsupported Locati... | Return True if LocationInfo objects are equivalent for the SQL backend, False otherwise.
LocationInfo objects are considered equal for the SQL backend iff the optional scopes depth,
recursive scopes depth, types and parent query paths are equal.
Args:
left: LocationInfo, left location info object to compare.
right: LocationInfo, right location info object to compare.
Returns:
bool, True if LocationInfo objects equivalent, False otherwise. | [
"Return",
"True",
"if",
"LocationInfo",
"objects",
"are",
"equivalent",
"for",
"the",
"SQL",
"backend",
"False",
"otherwise",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L164-L196 | train | 227,991 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _map_query_path_to_outputs | def _map_query_path_to_outputs(construct_result, query_path_to_location_info):
"""Assign the output fields of a ConstructResult block to their respective query_path."""
query_path_to_output_fields = {}
for output_name, field in six.iteritems(construct_result.fields):
field_name = field.location.field
output_query_path = field.location.query_path
output_field_info = constants.SqlOutput(
field_name=field_name,
output_name=output_name,
graphql_type=query_path_to_location_info[output_query_path].type)
output_field_mapping = query_path_to_output_fields.setdefault(output_query_path, [])
output_field_mapping.append(output_field_info)
return query_path_to_output_fields | python | def _map_query_path_to_outputs(construct_result, query_path_to_location_info):
"""Assign the output fields of a ConstructResult block to their respective query_path."""
query_path_to_output_fields = {}
for output_name, field in six.iteritems(construct_result.fields):
field_name = field.location.field
output_query_path = field.location.query_path
output_field_info = constants.SqlOutput(
field_name=field_name,
output_name=output_name,
graphql_type=query_path_to_location_info[output_query_path].type)
output_field_mapping = query_path_to_output_fields.setdefault(output_query_path, [])
output_field_mapping.append(output_field_info)
return query_path_to_output_fields | [
"def",
"_map_query_path_to_outputs",
"(",
"construct_result",
",",
"query_path_to_location_info",
")",
":",
"query_path_to_output_fields",
"=",
"{",
"}",
"for",
"output_name",
",",
"field",
"in",
"six",
".",
"iteritems",
"(",
"construct_result",
".",
"fields",
")",
... | Assign the output fields of a ConstructResult block to their respective query_path. | [
"Assign",
"the",
"output",
"fields",
"of",
"a",
"ConstructResult",
"block",
"to",
"their",
"respective",
"query_path",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L199-L211 | train | 227,992 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _map_block_index_to_location | def _map_block_index_to_location(ir_blocks):
"""Associate each IR block with its corresponding location, by index."""
block_index_to_location = {}
# MarkLocation blocks occur after the blocks related to that location.
# The core approach here is to buffer blocks until their MarkLocation is encountered
# after which all buffered blocks can be associated with the encountered MarkLocation.location.
current_block_ixs = []
for num, ir_block in enumerate(ir_blocks):
if isinstance(ir_block, blocks.GlobalOperationsStart):
if len(current_block_ixs) > 0:
unassociated_blocks = [ir_blocks[ix] for ix in current_block_ixs]
raise AssertionError(
u'Unexpectedly encountered global operations before mapping blocks '
u'{} to their respective locations.'.format(unassociated_blocks))
break
current_block_ixs.append(num)
if isinstance(ir_block, blocks.MarkLocation):
for ix in current_block_ixs:
block_index_to_location[ix] = ir_block.location
current_block_ixs = []
return block_index_to_location | python | def _map_block_index_to_location(ir_blocks):
"""Associate each IR block with its corresponding location, by index."""
block_index_to_location = {}
# MarkLocation blocks occur after the blocks related to that location.
# The core approach here is to buffer blocks until their MarkLocation is encountered
# after which all buffered blocks can be associated with the encountered MarkLocation.location.
current_block_ixs = []
for num, ir_block in enumerate(ir_blocks):
if isinstance(ir_block, blocks.GlobalOperationsStart):
if len(current_block_ixs) > 0:
unassociated_blocks = [ir_blocks[ix] for ix in current_block_ixs]
raise AssertionError(
u'Unexpectedly encountered global operations before mapping blocks '
u'{} to their respective locations.'.format(unassociated_blocks))
break
current_block_ixs.append(num)
if isinstance(ir_block, blocks.MarkLocation):
for ix in current_block_ixs:
block_index_to_location[ix] = ir_block.location
current_block_ixs = []
return block_index_to_location | [
"def",
"_map_block_index_to_location",
"(",
"ir_blocks",
")",
":",
"block_index_to_location",
"=",
"{",
"}",
"# MarkLocation blocks occur after the blocks related to that location.",
"# The core approach here is to buffer blocks until their MarkLocation is encountered",
"# after which all bu... | Associate each IR block with its corresponding location, by index. | [
"Associate",
"each",
"IR",
"block",
"with",
"its",
"corresponding",
"location",
"by",
"index",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L214-L234 | train | 227,993 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | lower_unary_transformations | def lower_unary_transformations(ir_blocks):
"""Raise exception if any unary transformation block encountered."""
def visitor_fn(expression):
"""Raise error if current expression is a UnaryTransformation."""
if not isinstance(expression, expressions.UnaryTransformation):
return expression
raise NotImplementedError(
u'UnaryTransformation expression "{}" encountered with IR blocks {} is unsupported by '
u'the SQL backend.'.format(expression, ir_blocks)
)
new_ir_blocks = [
block.visit_and_update_expressions(visitor_fn)
for block in ir_blocks
]
return new_ir_blocks | python | def lower_unary_transformations(ir_blocks):
"""Raise exception if any unary transformation block encountered."""
def visitor_fn(expression):
"""Raise error if current expression is a UnaryTransformation."""
if not isinstance(expression, expressions.UnaryTransformation):
return expression
raise NotImplementedError(
u'UnaryTransformation expression "{}" encountered with IR blocks {} is unsupported by '
u'the SQL backend.'.format(expression, ir_blocks)
)
new_ir_blocks = [
block.visit_and_update_expressions(visitor_fn)
for block in ir_blocks
]
return new_ir_blocks | [
"def",
"lower_unary_transformations",
"(",
"ir_blocks",
")",
":",
"def",
"visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Raise error if current expression is a UnaryTransformation.\"\"\"",
"if",
"not",
"isinstance",
"(",
"expression",
",",
"expressions",
".",
"UnaryTransf... | Raise exception if any unary transformation block encountered. | [
"Raise",
"exception",
"if",
"any",
"unary",
"transformation",
"block",
"encountered",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L237-L252 | train | 227,994 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | lower_unsupported_metafield_expressions | def lower_unsupported_metafield_expressions(ir_blocks):
"""Raise exception if an unsupported metafield is encountered in any LocalField expression."""
def visitor_fn(expression):
"""Visitor function raising exception for any unsupported metafield."""
if not isinstance(expression, expressions.LocalField):
return expression
if expression.field_name not in constants.UNSUPPORTED_META_FIELDS:
return expression
raise NotImplementedError(
u'Encountered unsupported metafield {} in LocalField {} during construction of '
u'SQL query tree for IR blocks {}.'.format(
constants.UNSUPPORTED_META_FIELDS[expression.field_name], expression, ir_blocks))
new_ir_blocks = [
block.visit_and_update_expressions(visitor_fn)
for block in ir_blocks
]
return new_ir_blocks | python | def lower_unsupported_metafield_expressions(ir_blocks):
"""Raise exception if an unsupported metafield is encountered in any LocalField expression."""
def visitor_fn(expression):
"""Visitor function raising exception for any unsupported metafield."""
if not isinstance(expression, expressions.LocalField):
return expression
if expression.field_name not in constants.UNSUPPORTED_META_FIELDS:
return expression
raise NotImplementedError(
u'Encountered unsupported metafield {} in LocalField {} during construction of '
u'SQL query tree for IR blocks {}.'.format(
constants.UNSUPPORTED_META_FIELDS[expression.field_name], expression, ir_blocks))
new_ir_blocks = [
block.visit_and_update_expressions(visitor_fn)
for block in ir_blocks
]
return new_ir_blocks | [
"def",
"lower_unsupported_metafield_expressions",
"(",
"ir_blocks",
")",
":",
"def",
"visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Visitor function raising exception for any unsupported metafield.\"\"\"",
"if",
"not",
"isinstance",
"(",
"expression",
",",
"expressions",
"... | Raise exception if an unsupported metafield is encountered in any LocalField expression. | [
"Raise",
"exception",
"if",
"an",
"unsupported",
"metafield",
"is",
"encountered",
"in",
"any",
"LocalField",
"expression",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L255-L272 | train | 227,995 |
kensho-technologies/graphql-compiler | graphql_compiler/__init__.py | get_graphql_schema_from_orientdb_schema_data | def get_graphql_schema_from_orientdb_schema_data(schema_data, class_to_field_type_overrides=None,
hidden_classes=None):
"""Construct a GraphQL schema from an OrientDB schema.
Args:
schema_data: list of dicts describing the classes in the OrientDB schema. The following
format is the way the data is structured in OrientDB 2. See
the README.md file for an example of how to query this data.
Each dict has the following string fields:
- name: string, the name of the class.
- superClasses (optional): list of strings, the name of the class's
superclasses.
- superClass (optional): string, the name of the class's superclass. May be
used instead of superClasses if there is only one
superClass. Used for backwards compatibility with
OrientDB.
- customFields (optional): dict, string -> string, data defined on the class
instead of instances of the class.
- abstract: bool, true if the class is abstract.
- properties: list of dicts, describing the class's properties.
Each property dictionary has the following string fields:
- name: string, the name of the property.
- type: int, builtin OrientDB type ID of the property.
See schema_properties.py for the mapping.
- linkedType (optional): int, if the property is a
collection of builtin OrientDB
objects, then it indicates their
type ID.
- linkedClass (optional): string, if the property is a
collection of class instances,
then it indicates the name of
the class. If class is an edge
class, and the field name is
either 'in' or 'out', then it
describes the name of an
endpoint of the edge.
- defaultValue: string, the textual representation of the
default value for the property, as
returned by OrientDB's schema
introspection code, e.g., '{}' for
the embedded set type. Note that if the
property is a collection type, it must
have a default value.
class_to_field_type_overrides: optional dict, class name -> {field name -> field type},
(string -> {string -> GraphQLType}). Used to override the
type of a field in the class where it's first defined and all
the class's subclasses.
hidden_classes: optional set of strings, classes to not include in the GraphQL schema.
Returns:
tuple of (GraphQL schema object, GraphQL type equivalence hints dict).
The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}).
"""
if class_to_field_type_overrides is None:
class_to_field_type_overrides = dict()
if hidden_classes is None:
hidden_classes = set()
schema_graph = SchemaGraph(schema_data)
return get_graphql_schema_from_schema_graph(schema_graph, class_to_field_type_overrides,
hidden_classes) | python | def get_graphql_schema_from_orientdb_schema_data(schema_data, class_to_field_type_overrides=None,
hidden_classes=None):
"""Construct a GraphQL schema from an OrientDB schema.
Args:
schema_data: list of dicts describing the classes in the OrientDB schema. The following
format is the way the data is structured in OrientDB 2. See
the README.md file for an example of how to query this data.
Each dict has the following string fields:
- name: string, the name of the class.
- superClasses (optional): list of strings, the name of the class's
superclasses.
- superClass (optional): string, the name of the class's superclass. May be
used instead of superClasses if there is only one
superClass. Used for backwards compatibility with
OrientDB.
- customFields (optional): dict, string -> string, data defined on the class
instead of instances of the class.
- abstract: bool, true if the class is abstract.
- properties: list of dicts, describing the class's properties.
Each property dictionary has the following string fields:
- name: string, the name of the property.
- type: int, builtin OrientDB type ID of the property.
See schema_properties.py for the mapping.
- linkedType (optional): int, if the property is a
collection of builtin OrientDB
objects, then it indicates their
type ID.
- linkedClass (optional): string, if the property is a
collection of class instances,
then it indicates the name of
the class. If class is an edge
class, and the field name is
either 'in' or 'out', then it
describes the name of an
endpoint of the edge.
- defaultValue: string, the textual representation of the
default value for the property, as
returned by OrientDB's schema
introspection code, e.g., '{}' for
the embedded set type. Note that if the
property is a collection type, it must
have a default value.
class_to_field_type_overrides: optional dict, class name -> {field name -> field type},
(string -> {string -> GraphQLType}). Used to override the
type of a field in the class where it's first defined and all
the class's subclasses.
hidden_classes: optional set of strings, classes to not include in the GraphQL schema.
Returns:
tuple of (GraphQL schema object, GraphQL type equivalence hints dict).
The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}).
"""
if class_to_field_type_overrides is None:
class_to_field_type_overrides = dict()
if hidden_classes is None:
hidden_classes = set()
schema_graph = SchemaGraph(schema_data)
return get_graphql_schema_from_schema_graph(schema_graph, class_to_field_type_overrides,
hidden_classes) | [
"def",
"get_graphql_schema_from_orientdb_schema_data",
"(",
"schema_data",
",",
"class_to_field_type_overrides",
"=",
"None",
",",
"hidden_classes",
"=",
"None",
")",
":",
"if",
"class_to_field_type_overrides",
"is",
"None",
":",
"class_to_field_type_overrides",
"=",
"dict"... | Construct a GraphQL schema from an OrientDB schema.
Args:
schema_data: list of dicts describing the classes in the OrientDB schema. The following
format is the way the data is structured in OrientDB 2. See
the README.md file for an example of how to query this data.
Each dict has the following string fields:
- name: string, the name of the class.
- superClasses (optional): list of strings, the name of the class's
superclasses.
- superClass (optional): string, the name of the class's superclass. May be
used instead of superClasses if there is only one
superClass. Used for backwards compatibility with
OrientDB.
- customFields (optional): dict, string -> string, data defined on the class
instead of instances of the class.
- abstract: bool, true if the class is abstract.
- properties: list of dicts, describing the class's properties.
Each property dictionary has the following string fields:
- name: string, the name of the property.
- type: int, builtin OrientDB type ID of the property.
See schema_properties.py for the mapping.
- linkedType (optional): int, if the property is a
collection of builtin OrientDB
objects, then it indicates their
type ID.
- linkedClass (optional): string, if the property is a
collection of class instances,
then it indicates the name of
the class. If class is an edge
class, and the field name is
either 'in' or 'out', then it
describes the name of an
endpoint of the edge.
- defaultValue: string, the textual representation of the
default value for the property, as
returned by OrientDB's schema
introspection code, e.g., '{}' for
the embedded set type. Note that if the
property is a collection type, it must
have a default value.
class_to_field_type_overrides: optional dict, class name -> {field name -> field type},
(string -> {string -> GraphQLType}). Used to override the
type of a field in the class where it's first defined and all
the class's subclasses.
hidden_classes: optional set of strings, classes to not include in the GraphQL schema.
Returns:
tuple of (GraphQL schema object, GraphQL type equivalence hints dict).
The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}). | [
"Construct",
"a",
"GraphQL",
"schema",
"from",
"an",
"OrientDB",
"schema",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/__init__.py#L139-L199 | train | 227,996 |
slackapi/python-slack-events-api | slackeventsapi/__init__.py | SlackEventAdapter.start | def start(self, host='127.0.0.1', port=None, debug=False, **kwargs):
"""
Start the built in webserver, bound to the host and port you'd like.
Default host is `127.0.0.1` and port 8080.
:param host: The host you want to bind the build in webserver to
:param port: The port number you want the webserver to run on
:param debug: Set to `True` to enable debug level logging
:param kwargs: Additional arguments you'd like to pass to Flask
"""
self.server.run(host=host, port=port, debug=debug, **kwargs) | python | def start(self, host='127.0.0.1', port=None, debug=False, **kwargs):
"""
Start the built in webserver, bound to the host and port you'd like.
Default host is `127.0.0.1` and port 8080.
:param host: The host you want to bind the build in webserver to
:param port: The port number you want the webserver to run on
:param debug: Set to `True` to enable debug level logging
:param kwargs: Additional arguments you'd like to pass to Flask
"""
self.server.run(host=host, port=port, debug=debug, **kwargs) | [
"def",
"start",
"(",
"self",
",",
"host",
"=",
"'127.0.0.1'",
",",
"port",
"=",
"None",
",",
"debug",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"server",
".",
"run",
"(",
"host",
"=",
"host",
",",
"port",
"=",
"port",
",",
"... | Start the built in webserver, bound to the host and port you'd like.
Default host is `127.0.0.1` and port 8080.
:param host: The host you want to bind the build in webserver to
:param port: The port number you want the webserver to run on
:param debug: Set to `True` to enable debug level logging
:param kwargs: Additional arguments you'd like to pass to Flask | [
"Start",
"the",
"built",
"in",
"webserver",
"bound",
"to",
"the",
"host",
"and",
"port",
"you",
"d",
"like",
".",
"Default",
"host",
"is",
"127",
".",
"0",
".",
"0",
".",
"1",
"and",
"port",
"8080",
"."
] | 1254d83181eb939f124a0e4746dafea7e14047c1 | https://github.com/slackapi/python-slack-events-api/blob/1254d83181eb939f124a0e4746dafea7e14047c1/slackeventsapi/__init__.py#L13-L23 | train | 227,997 |
apragacz/django-rest-registration | rest_registration/api/views/login.py | login | def login(request):
'''
Logs in the user via given login and password.
'''
serializer_class = registration_settings.LOGIN_SERIALIZER_CLASS
serializer = serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.get_authenticated_user()
if not user:
raise BadRequest('Login or password invalid.')
extra_data = perform_login(request, user)
return get_ok_response('Login successful', extra_data=extra_data) | python | def login(request):
'''
Logs in the user via given login and password.
'''
serializer_class = registration_settings.LOGIN_SERIALIZER_CLASS
serializer = serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.get_authenticated_user()
if not user:
raise BadRequest('Login or password invalid.')
extra_data = perform_login(request, user)
return get_ok_response('Login successful', extra_data=extra_data) | [
"def",
"login",
"(",
"request",
")",
":",
"serializer_class",
"=",
"registration_settings",
".",
"LOGIN_SERIALIZER_CLASS",
"serializer",
"=",
"serializer_class",
"(",
"data",
"=",
"request",
".",
"data",
")",
"serializer",
".",
"is_valid",
"(",
"raise_exception",
... | Logs in the user via given login and password. | [
"Logs",
"in",
"the",
"user",
"via",
"given",
"login",
"and",
"password",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/rest_registration/api/views/login.py#L25-L39 | train | 227,998 |
apragacz/django-rest-registration | rest_registration/api/views/login.py | logout | def logout(request):
'''
Logs out the user. returns an error if the user is not
authenticated.
'''
user = request.user
serializer = LogoutSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
if should_authenticate_session():
auth.logout(request)
if should_retrieve_token() and data['revoke_token']:
try:
user.auth_token.delete()
except Token.DoesNotExist:
raise BadRequest('Cannot remove non-existent token')
return get_ok_response('Logout successful') | python | def logout(request):
'''
Logs out the user. returns an error if the user is not
authenticated.
'''
user = request.user
serializer = LogoutSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
if should_authenticate_session():
auth.logout(request)
if should_retrieve_token() and data['revoke_token']:
try:
user.auth_token.delete()
except Token.DoesNotExist:
raise BadRequest('Cannot remove non-existent token')
return get_ok_response('Logout successful') | [
"def",
"logout",
"(",
"request",
")",
":",
"user",
"=",
"request",
".",
"user",
"serializer",
"=",
"LogoutSerializer",
"(",
"data",
"=",
"request",
".",
"data",
")",
"serializer",
".",
"is_valid",
"(",
"raise_exception",
"=",
"True",
")",
"data",
"=",
"s... | Logs out the user. returns an error if the user is not
authenticated. | [
"Logs",
"out",
"the",
"user",
".",
"returns",
"an",
"error",
"if",
"the",
"user",
"is",
"not",
"authenticated",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/rest_registration/api/views/login.py#L49-L67 | train | 227,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.