repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
fastavro/fastavro | fastavro/_validation_py.py | validate_int | def validate_int(datum, **kwargs):
"""
Check that the data value is a non floating
point number with size less that Int32.
Also support for logicalType timestamp validation with datetime.
Int32 = -2147483648<=datum<=2147483647
conditional python types
(int, long, numbers.Integral,
datetime.time, datetime.datetime, datetime.date)
Parameters
----------
datum: Any
Data being validated
kwargs: Any
Unused kwargs
"""
return (
(isinstance(datum, (int, long, numbers.Integral))
and INT_MIN_VALUE <= datum <= INT_MAX_VALUE
and not isinstance(datum, bool))
or isinstance(
datum, (datetime.time, datetime.datetime, datetime.date)
)
) | python | def validate_int(datum, **kwargs):
"""
Check that the data value is a non floating
point number with size less that Int32.
Also support for logicalType timestamp validation with datetime.
Int32 = -2147483648<=datum<=2147483647
conditional python types
(int, long, numbers.Integral,
datetime.time, datetime.datetime, datetime.date)
Parameters
----------
datum: Any
Data being validated
kwargs: Any
Unused kwargs
"""
return (
(isinstance(datum, (int, long, numbers.Integral))
and INT_MIN_VALUE <= datum <= INT_MAX_VALUE
and not isinstance(datum, bool))
or isinstance(
datum, (datetime.time, datetime.datetime, datetime.date)
)
) | [
"def",
"validate_int",
"(",
"datum",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"(",
"(",
"isinstance",
"(",
"datum",
",",
"(",
"int",
",",
"long",
",",
"numbers",
".",
"Integral",
")",
")",
"and",
"INT_MIN_VALUE",
"<=",
"datum",
"<=",
"INT_MAX_VALUE... | Check that the data value is a non floating
point number with size less that Int32.
Also support for logicalType timestamp validation with datetime.
Int32 = -2147483648<=datum<=2147483647
conditional python types
(int, long, numbers.Integral,
datetime.time, datetime.datetime, datetime.date)
Parameters
----------
datum: Any
Data being validated
kwargs: Any
Unused kwargs | [
"Check",
"that",
"the",
"data",
"value",
"is",
"a",
"non",
"floating",
"point",
"number",
"with",
"size",
"less",
"that",
"Int32",
".",
"Also",
"support",
"for",
"logicalType",
"timestamp",
"validation",
"with",
"datetime",
"."
] | bafe826293e19eb93e77bbb0f6adfa059c7884b2 | https://github.com/fastavro/fastavro/blob/bafe826293e19eb93e77bbb0f6adfa059c7884b2/fastavro/_validation_py.py#L79-L105 | train | 227,700 |
fastavro/fastavro | fastavro/_validation_py.py | validate_float | def validate_float(datum, **kwargs):
"""
Check that the data value is a floating
point number or double precision.
conditional python types
(int, long, float, numbers.Real)
Parameters
----------
datum: Any
Data being validated
kwargs: Any
Unused kwargs
"""
return (
isinstance(datum, (int, long, float, numbers.Real))
and not isinstance(datum, bool)
) | python | def validate_float(datum, **kwargs):
"""
Check that the data value is a floating
point number or double precision.
conditional python types
(int, long, float, numbers.Real)
Parameters
----------
datum: Any
Data being validated
kwargs: Any
Unused kwargs
"""
return (
isinstance(datum, (int, long, float, numbers.Real))
and not isinstance(datum, bool)
) | [
"def",
"validate_float",
"(",
"datum",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"(",
"isinstance",
"(",
"datum",
",",
"(",
"int",
",",
"long",
",",
"float",
",",
"numbers",
".",
"Real",
")",
")",
"and",
"not",
"isinstance",
"(",
"datum",
",",
"... | Check that the data value is a floating
point number or double precision.
conditional python types
(int, long, float, numbers.Real)
Parameters
----------
datum: Any
Data being validated
kwargs: Any
Unused kwargs | [
"Check",
"that",
"the",
"data",
"value",
"is",
"a",
"floating",
"point",
"number",
"or",
"double",
"precision",
"."
] | bafe826293e19eb93e77bbb0f6adfa059c7884b2 | https://github.com/fastavro/fastavro/blob/bafe826293e19eb93e77bbb0f6adfa059c7884b2/fastavro/_validation_py.py#L137-L155 | train | 227,701 |
fastavro/fastavro | fastavro/_validation_py.py | validate_record | def validate_record(datum, schema, parent_ns=None, raise_errors=True):
"""
Check that the data is a Mapping type with all schema defined fields
validated as True.
Parameters
----------
datum: Any
Data being validated
schema: dict
Schema
parent_ns: str
parent namespace
raise_errors: bool
If true, raises ValidationError on invalid data
"""
_, namespace = schema_name(schema, parent_ns)
return (
isinstance(datum, Mapping) and
all(validate(datum=datum.get(f['name'], f.get('default', no_value)),
schema=f['type'],
field='{}.{}'.format(namespace, f['name']),
raise_errors=raise_errors)
for f in schema['fields']
)
) | python | def validate_record(datum, schema, parent_ns=None, raise_errors=True):
"""
Check that the data is a Mapping type with all schema defined fields
validated as True.
Parameters
----------
datum: Any
Data being validated
schema: dict
Schema
parent_ns: str
parent namespace
raise_errors: bool
If true, raises ValidationError on invalid data
"""
_, namespace = schema_name(schema, parent_ns)
return (
isinstance(datum, Mapping) and
all(validate(datum=datum.get(f['name'], f.get('default', no_value)),
schema=f['type'],
field='{}.{}'.format(namespace, f['name']),
raise_errors=raise_errors)
for f in schema['fields']
)
) | [
"def",
"validate_record",
"(",
"datum",
",",
"schema",
",",
"parent_ns",
"=",
"None",
",",
"raise_errors",
"=",
"True",
")",
":",
"_",
",",
"namespace",
"=",
"schema_name",
"(",
"schema",
",",
"parent_ns",
")",
"return",
"(",
"isinstance",
"(",
"datum",
... | Check that the data is a Mapping type with all schema defined fields
validated as True.
Parameters
----------
datum: Any
Data being validated
schema: dict
Schema
parent_ns: str
parent namespace
raise_errors: bool
If true, raises ValidationError on invalid data | [
"Check",
"that",
"the",
"data",
"is",
"a",
"Mapping",
"type",
"with",
"all",
"schema",
"defined",
"fields",
"validated",
"as",
"True",
"."
] | bafe826293e19eb93e77bbb0f6adfa059c7884b2 | https://github.com/fastavro/fastavro/blob/bafe826293e19eb93e77bbb0f6adfa059c7884b2/fastavro/_validation_py.py#L245-L270 | train | 227,702 |
fastavro/fastavro | fastavro/_validation_py.py | validate_union | def validate_union(datum, schema, parent_ns=None, raise_errors=True):
"""
Check that the data is a list type with possible options to
validate as True.
Parameters
----------
datum: Any
Data being validated
schema: dict
Schema
parent_ns: str
parent namespace
raise_errors: bool
If true, raises ValidationError on invalid data
"""
if isinstance(datum, tuple):
(name, datum) = datum
for candidate in schema:
if extract_record_type(candidate) == 'record':
if name == candidate["name"]:
return validate(datum, schema=candidate,
field=parent_ns,
raise_errors=raise_errors)
else:
return False
errors = []
for s in schema:
try:
ret = validate(datum, schema=s,
field=parent_ns,
raise_errors=raise_errors)
if ret:
# We exit on the first passing type in Unions
return True
except ValidationError as e:
errors.extend(e.errors)
if raise_errors:
raise ValidationError(*errors)
return False | python | def validate_union(datum, schema, parent_ns=None, raise_errors=True):
"""
Check that the data is a list type with possible options to
validate as True.
Parameters
----------
datum: Any
Data being validated
schema: dict
Schema
parent_ns: str
parent namespace
raise_errors: bool
If true, raises ValidationError on invalid data
"""
if isinstance(datum, tuple):
(name, datum) = datum
for candidate in schema:
if extract_record_type(candidate) == 'record':
if name == candidate["name"]:
return validate(datum, schema=candidate,
field=parent_ns,
raise_errors=raise_errors)
else:
return False
errors = []
for s in schema:
try:
ret = validate(datum, schema=s,
field=parent_ns,
raise_errors=raise_errors)
if ret:
# We exit on the first passing type in Unions
return True
except ValidationError as e:
errors.extend(e.errors)
if raise_errors:
raise ValidationError(*errors)
return False | [
"def",
"validate_union",
"(",
"datum",
",",
"schema",
",",
"parent_ns",
"=",
"None",
",",
"raise_errors",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"datum",
",",
"tuple",
")",
":",
"(",
"name",
",",
"datum",
")",
"=",
"datum",
"for",
"candidate",... | Check that the data is a list type with possible options to
validate as True.
Parameters
----------
datum: Any
Data being validated
schema: dict
Schema
parent_ns: str
parent namespace
raise_errors: bool
If true, raises ValidationError on invalid data | [
"Check",
"that",
"the",
"data",
"is",
"a",
"list",
"type",
"with",
"possible",
"options",
"to",
"validate",
"as",
"True",
"."
] | bafe826293e19eb93e77bbb0f6adfa059c7884b2 | https://github.com/fastavro/fastavro/blob/bafe826293e19eb93e77bbb0f6adfa059c7884b2/fastavro/_validation_py.py#L273-L313 | train | 227,703 |
fastavro/fastavro | fastavro/_validation_py.py | validate_many | def validate_many(records, schema, raise_errors=True):
"""
Validate a list of data!
Parameters
----------
records: iterable
List of records to validate
schema: dict
Schema
raise_errors: bool, optional
If true, errors are raised for invalid data. If false, a simple
True (valid) or False (invalid) result is returned
Example::
from fastavro.validation import validate_many
schema = {...}
records = [{...}, {...}, ...]
validate_many(records, schema)
"""
errors = []
results = []
for record in records:
try:
results.append(validate(record, schema, raise_errors=raise_errors))
except ValidationError as e:
errors.extend(e.errors)
if raise_errors and errors:
raise ValidationError(*errors)
return all(results) | python | def validate_many(records, schema, raise_errors=True):
"""
Validate a list of data!
Parameters
----------
records: iterable
List of records to validate
schema: dict
Schema
raise_errors: bool, optional
If true, errors are raised for invalid data. If false, a simple
True (valid) or False (invalid) result is returned
Example::
from fastavro.validation import validate_many
schema = {...}
records = [{...}, {...}, ...]
validate_many(records, schema)
"""
errors = []
results = []
for record in records:
try:
results.append(validate(record, schema, raise_errors=raise_errors))
except ValidationError as e:
errors.extend(e.errors)
if raise_errors and errors:
raise ValidationError(*errors)
return all(results) | [
"def",
"validate_many",
"(",
"records",
",",
"schema",
",",
"raise_errors",
"=",
"True",
")",
":",
"errors",
"=",
"[",
"]",
"results",
"=",
"[",
"]",
"for",
"record",
"in",
"records",
":",
"try",
":",
"results",
".",
"append",
"(",
"validate",
"(",
"... | Validate a list of data!
Parameters
----------
records: iterable
List of records to validate
schema: dict
Schema
raise_errors: bool, optional
If true, errors are raised for invalid data. If false, a simple
True (valid) or False (invalid) result is returned
Example::
from fastavro.validation import validate_many
schema = {...}
records = [{...}, {...}, ...]
validate_many(records, schema) | [
"Validate",
"a",
"list",
"of",
"data!"
] | bafe826293e19eb93e77bbb0f6adfa059c7884b2 | https://github.com/fastavro/fastavro/blob/bafe826293e19eb93e77bbb0f6adfa059c7884b2/fastavro/_validation_py.py#L383-L414 | train | 227,704 |
fastavro/fastavro | fastavro/_schema_py.py | parse_schema | def parse_schema(schema, _write_hint=True, _force=False):
"""Returns a parsed avro schema
It is not necessary to call parse_schema but doing so and saving the parsed
schema for use later will make future operations faster as the schema will
not need to be reparsed.
Parameters
----------
schema: dict
Input schema
_write_hint: bool
Internal API argument specifying whether or not the __fastavro_parsed
marker should be added to the schema
_force: bool
Internal API argument. If True, the schema will always be parsed even
if it has been parsed and has the __fastavro_parsed marker
Example::
from fastavro import parse_schema
from fastavro import writer
parsed_schema = parse_schema(original_schema)
with open('weather.avro', 'wb') as out:
writer(out, parsed_schema, records)
"""
if _force:
return _parse_schema(schema, "", _write_hint)
elif isinstance(schema, dict) and "__fastavro_parsed" in schema:
return schema
else:
return _parse_schema(schema, "", _write_hint) | python | def parse_schema(schema, _write_hint=True, _force=False):
"""Returns a parsed avro schema
It is not necessary to call parse_schema but doing so and saving the parsed
schema for use later will make future operations faster as the schema will
not need to be reparsed.
Parameters
----------
schema: dict
Input schema
_write_hint: bool
Internal API argument specifying whether or not the __fastavro_parsed
marker should be added to the schema
_force: bool
Internal API argument. If True, the schema will always be parsed even
if it has been parsed and has the __fastavro_parsed marker
Example::
from fastavro import parse_schema
from fastavro import writer
parsed_schema = parse_schema(original_schema)
with open('weather.avro', 'wb') as out:
writer(out, parsed_schema, records)
"""
if _force:
return _parse_schema(schema, "", _write_hint)
elif isinstance(schema, dict) and "__fastavro_parsed" in schema:
return schema
else:
return _parse_schema(schema, "", _write_hint) | [
"def",
"parse_schema",
"(",
"schema",
",",
"_write_hint",
"=",
"True",
",",
"_force",
"=",
"False",
")",
":",
"if",
"_force",
":",
"return",
"_parse_schema",
"(",
"schema",
",",
"\"\"",
",",
"_write_hint",
")",
"elif",
"isinstance",
"(",
"schema",
",",
"... | Returns a parsed avro schema
It is not necessary to call parse_schema but doing so and saving the parsed
schema for use later will make future operations faster as the schema will
not need to be reparsed.
Parameters
----------
schema: dict
Input schema
_write_hint: bool
Internal API argument specifying whether or not the __fastavro_parsed
marker should be added to the schema
_force: bool
Internal API argument. If True, the schema will always be parsed even
if it has been parsed and has the __fastavro_parsed marker
Example::
from fastavro import parse_schema
from fastavro import writer
parsed_schema = parse_schema(original_schema)
with open('weather.avro', 'wb') as out:
writer(out, parsed_schema, records) | [
"Returns",
"a",
"parsed",
"avro",
"schema"
] | bafe826293e19eb93e77bbb0f6adfa059c7884b2 | https://github.com/fastavro/fastavro/blob/bafe826293e19eb93e77bbb0f6adfa059c7884b2/fastavro/_schema_py.py#L53-L86 | train | 227,705 |
fastavro/fastavro | fastavro/_schema_py.py | load_schema | def load_schema(schema_path):
'''
Returns a schema loaded from the file at `schema_path`.
Will recursively load referenced schemas assuming they can be found in
files in the same directory and named with the convention
`<type_name>.avsc`.
'''
with open(schema_path) as fd:
schema = json.load(fd)
schema_dir, schema_file = path.split(schema_path)
return _load_schema(schema, schema_dir) | python | def load_schema(schema_path):
'''
Returns a schema loaded from the file at `schema_path`.
Will recursively load referenced schemas assuming they can be found in
files in the same directory and named with the convention
`<type_name>.avsc`.
'''
with open(schema_path) as fd:
schema = json.load(fd)
schema_dir, schema_file = path.split(schema_path)
return _load_schema(schema, schema_dir) | [
"def",
"load_schema",
"(",
"schema_path",
")",
":",
"with",
"open",
"(",
"schema_path",
")",
"as",
"fd",
":",
"schema",
"=",
"json",
".",
"load",
"(",
"fd",
")",
"schema_dir",
",",
"schema_file",
"=",
"path",
".",
"split",
"(",
"schema_path",
")",
"ret... | Returns a schema loaded from the file at `schema_path`.
Will recursively load referenced schemas assuming they can be found in
files in the same directory and named with the convention
`<type_name>.avsc`. | [
"Returns",
"a",
"schema",
"loaded",
"from",
"the",
"file",
"at",
"schema_path",
"."
] | bafe826293e19eb93e77bbb0f6adfa059c7884b2 | https://github.com/fastavro/fastavro/blob/bafe826293e19eb93e77bbb0f6adfa059c7884b2/fastavro/_schema_py.py#L212-L223 | train | 227,706 |
alejandroautalan/pygubu | pygubu/widgets/simpletooltip.py | ToolTip.showtip | def showtip(self, text):
"Display text in tooltip window"
self.text = text
if self.tipwindow or not self.text:
return
x, y, cx, cy = self.widget.bbox("insert")
x = x + self.widget.winfo_rootx() + 27
y = y + cy + self.widget.winfo_rooty() +27
self.tipwindow = tw = tk.Toplevel(self.widget)
tw.wm_overrideredirect(1)
tw.wm_geometry("+%d+%d" % (x, y))
try:
# For Mac OS
tw.tk.call("::tk::unsupported::MacWindowStyle",
"style", tw._w,
"help", "noActivates")
except tk.TclError:
pass
label = tk.Label(tw, text=self.text, justify=tk.LEFT,
background="#ffffe0", foreground="black",
relief=tk.SOLID, borderwidth=1,
font=("tahoma", "8", "normal"))
label.pack(ipadx=1) | python | def showtip(self, text):
"Display text in tooltip window"
self.text = text
if self.tipwindow or not self.text:
return
x, y, cx, cy = self.widget.bbox("insert")
x = x + self.widget.winfo_rootx() + 27
y = y + cy + self.widget.winfo_rooty() +27
self.tipwindow = tw = tk.Toplevel(self.widget)
tw.wm_overrideredirect(1)
tw.wm_geometry("+%d+%d" % (x, y))
try:
# For Mac OS
tw.tk.call("::tk::unsupported::MacWindowStyle",
"style", tw._w,
"help", "noActivates")
except tk.TclError:
pass
label = tk.Label(tw, text=self.text, justify=tk.LEFT,
background="#ffffe0", foreground="black",
relief=tk.SOLID, borderwidth=1,
font=("tahoma", "8", "normal"))
label.pack(ipadx=1) | [
"def",
"showtip",
"(",
"self",
",",
"text",
")",
":",
"self",
".",
"text",
"=",
"text",
"if",
"self",
".",
"tipwindow",
"or",
"not",
"self",
".",
"text",
":",
"return",
"x",
",",
"y",
",",
"cx",
",",
"cy",
"=",
"self",
".",
"widget",
".",
"bbox... | Display text in tooltip window | [
"Display",
"text",
"in",
"tooltip",
"window"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/widgets/simpletooltip.py#L20-L42 | train | 227,707 |
alejandroautalan/pygubu | pygubu/__init__.py | TkApplication.run | def run(self):
"""Ejecute the main loop."""
self.toplevel.protocol("WM_DELETE_WINDOW", self.__on_window_close)
self.toplevel.mainloop() | python | def run(self):
"""Ejecute the main loop."""
self.toplevel.protocol("WM_DELETE_WINDOW", self.__on_window_close)
self.toplevel.mainloop() | [
"def",
"run",
"(",
"self",
")",
":",
"self",
".",
"toplevel",
".",
"protocol",
"(",
"\"WM_DELETE_WINDOW\"",
",",
"self",
".",
"__on_window_close",
")",
"self",
".",
"toplevel",
".",
"mainloop",
"(",
")"
] | Ejecute the main loop. | [
"Ejecute",
"the",
"main",
"loop",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/__init__.py#L41-L45 | train | 227,708 |
alejandroautalan/pygubu | examples/py2exe/myapp.py | MyApplication.create_regpoly | def create_regpoly(self, x0, y0, x1, y1, sides=0, start=90, extent=360, **kw):
"""Create a regular polygon"""
coords = self.__regpoly_coords(x0, y0, x1, y1, sides, start, extent)
return self.canvas.create_polygon(*coords, **kw) | python | def create_regpoly(self, x0, y0, x1, y1, sides=0, start=90, extent=360, **kw):
"""Create a regular polygon"""
coords = self.__regpoly_coords(x0, y0, x1, y1, sides, start, extent)
return self.canvas.create_polygon(*coords, **kw) | [
"def",
"create_regpoly",
"(",
"self",
",",
"x0",
",",
"y0",
",",
"x1",
",",
"y1",
",",
"sides",
"=",
"0",
",",
"start",
"=",
"90",
",",
"extent",
"=",
"360",
",",
"*",
"*",
"kw",
")",
":",
"coords",
"=",
"self",
".",
"__regpoly_coords",
"(",
"x... | Create a regular polygon | [
"Create",
"a",
"regular",
"polygon"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/examples/py2exe/myapp.py#L131-L134 | train | 227,709 |
alejandroautalan/pygubu | examples/py2exe/myapp.py | MyApplication.__regpoly_coords | def __regpoly_coords(self, x0, y0, x1, y1, sides, start, extent):
"""Create the coordinates of the regular polygon specified"""
coords = []
if extent == 0:
return coords
xm = (x0 + x1) / 2.
ym = (y0 + y1) / 2.
rx = xm - x0
ry = ym - y0
n = sides
if n == 0: # 0 sides => circle
n = round((rx + ry) * .5)
if n < 2:
n = 4
# Extent can be negative
dirv = 1 if extent > 0 else -1
if abs(extent) > 360:
extent = dirv * abs(extent) % 360
step = dirv * 360 / n
numsteps = 1 + extent / float(step)
numsteps_int = int(numsteps)
i = 0
while i < numsteps_int:
rad = (start - i * step) * DEG2RAD
x = rx * math.cos(rad)
y = ry * math.sin(rad)
coords.append((xm+x, ym-y))
i += 1
# Figure out where last segment should end
if numsteps != numsteps_int:
# Vecter V1 is last drawn vertext (x,y) from above
# Vector V2 is the edge of the polygon
rad2 = (start - numsteps_int * step) * DEG2RAD
x2 = rx * math.cos(rad2) - x
y2 = ry * math.sin(rad2) - y
# Vector V3 is unit vector in direction we end at
rad3 = (start - extent) * DEG2RAD
x3 = math.cos(rad3)
y3 = math.sin(rad3)
# Find where V3 crosses V1+V2 => find j s.t. V1 + kV2 = jV3
j = (x*y2 - x2*y) / (x3*y2 - x2*y3)
coords.append((xm + j * x3, ym - j * y3))
return coords | python | def __regpoly_coords(self, x0, y0, x1, y1, sides, start, extent):
"""Create the coordinates of the regular polygon specified"""
coords = []
if extent == 0:
return coords
xm = (x0 + x1) / 2.
ym = (y0 + y1) / 2.
rx = xm - x0
ry = ym - y0
n = sides
if n == 0: # 0 sides => circle
n = round((rx + ry) * .5)
if n < 2:
n = 4
# Extent can be negative
dirv = 1 if extent > 0 else -1
if abs(extent) > 360:
extent = dirv * abs(extent) % 360
step = dirv * 360 / n
numsteps = 1 + extent / float(step)
numsteps_int = int(numsteps)
i = 0
while i < numsteps_int:
rad = (start - i * step) * DEG2RAD
x = rx * math.cos(rad)
y = ry * math.sin(rad)
coords.append((xm+x, ym-y))
i += 1
# Figure out where last segment should end
if numsteps != numsteps_int:
# Vecter V1 is last drawn vertext (x,y) from above
# Vector V2 is the edge of the polygon
rad2 = (start - numsteps_int * step) * DEG2RAD
x2 = rx * math.cos(rad2) - x
y2 = ry * math.sin(rad2) - y
# Vector V3 is unit vector in direction we end at
rad3 = (start - extent) * DEG2RAD
x3 = math.cos(rad3)
y3 = math.sin(rad3)
# Find where V3 crosses V1+V2 => find j s.t. V1 + kV2 = jV3
j = (x*y2 - x2*y) / (x3*y2 - x2*y3)
coords.append((xm + j * x3, ym - j * y3))
return coords | [
"def",
"__regpoly_coords",
"(",
"self",
",",
"x0",
",",
"y0",
",",
"x1",
",",
"y1",
",",
"sides",
",",
"start",
",",
"extent",
")",
":",
"coords",
"=",
"[",
"]",
"if",
"extent",
"==",
"0",
":",
"return",
"coords",
"xm",
"=",
"(",
"x0",
"+",
"x1... | Create the coordinates of the regular polygon specified | [
"Create",
"the",
"coordinates",
"of",
"the",
"regular",
"polygon",
"specified"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/examples/py2exe/myapp.py#L136-L189 | train | 227,710 |
alejandroautalan/pygubu | pygubu/builder/__init__.py | Builder.get_image | def get_image(self, path):
"""Return tk image corresponding to name which is taken form path."""
image = ''
name = os.path.basename(path)
if not StockImage.is_registered(name):
ipath = self.__find_image(path)
if ipath is not None:
StockImage.register(name, ipath)
else:
msg = "Image '{0}' not found in resource paths.".format(name)
logger.warning(msg)
try:
image = StockImage.get(name)
except StockImageException:
# TODO: notify something here.
pass
return image | python | def get_image(self, path):
"""Return tk image corresponding to name which is taken form path."""
image = ''
name = os.path.basename(path)
if not StockImage.is_registered(name):
ipath = self.__find_image(path)
if ipath is not None:
StockImage.register(name, ipath)
else:
msg = "Image '{0}' not found in resource paths.".format(name)
logger.warning(msg)
try:
image = StockImage.get(name)
except StockImageException:
# TODO: notify something here.
pass
return image | [
"def",
"get_image",
"(",
"self",
",",
"path",
")",
":",
"image",
"=",
"''",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"path",
")",
"if",
"not",
"StockImage",
".",
"is_registered",
"(",
"name",
")",
":",
"ipath",
"=",
"self",
".",
"__fin... | Return tk image corresponding to name which is taken form path. | [
"Return",
"tk",
"image",
"corresponding",
"to",
"name",
"which",
"is",
"taken",
"form",
"path",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/__init__.py#L195-L211 | train | 227,711 |
alejandroautalan/pygubu | pygubu/builder/__init__.py | Builder.import_variables | def import_variables(self, container, varnames=None):
"""Helper method to avoid call get_variable for every variable."""
if varnames is None:
for keyword in self.tkvariables:
setattr(container, keyword, self.tkvariables[keyword])
else:
for keyword in varnames:
if keyword in self.tkvariables:
setattr(container, keyword, self.tkvariables[keyword]) | python | def import_variables(self, container, varnames=None):
"""Helper method to avoid call get_variable for every variable."""
if varnames is None:
for keyword in self.tkvariables:
setattr(container, keyword, self.tkvariables[keyword])
else:
for keyword in varnames:
if keyword in self.tkvariables:
setattr(container, keyword, self.tkvariables[keyword]) | [
"def",
"import_variables",
"(",
"self",
",",
"container",
",",
"varnames",
"=",
"None",
")",
":",
"if",
"varnames",
"is",
"None",
":",
"for",
"keyword",
"in",
"self",
".",
"tkvariables",
":",
"setattr",
"(",
"container",
",",
"keyword",
",",
"self",
".",... | Helper method to avoid call get_variable for every variable. | [
"Helper",
"method",
"to",
"avoid",
"call",
"get_variable",
"for",
"every",
"variable",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/__init__.py#L228-L236 | train | 227,712 |
alejandroautalan/pygubu | pygubu/builder/__init__.py | Builder.create_variable | def create_variable(self, varname, vtype=None):
"""Create a tk variable.
If the variable was created previously return that instance.
"""
var_types = ('string', 'int', 'boolean', 'double')
vname = varname
var = None
type_from_name = 'string' # default type
if ':' in varname:
type_from_name, vname = varname.split(':')
# Fix incorrect order bug #33
if type_from_name not in (var_types):
# Swap order
type_from_name, vname = vname, type_from_name
if type_from_name not in (var_types):
raise Exception('Undefined variable type in "{0}"'.format(varname))
if vname in self.tkvariables:
var = self.tkvariables[vname]
else:
if vtype is None:
# get type from name
if type_from_name == 'int':
var = tkinter.IntVar()
elif type_from_name == 'boolean':
var = tkinter.BooleanVar()
elif type_from_name == 'double':
var = tkinter.DoubleVar()
else:
var = tkinter.StringVar()
else:
var = vtype()
self.tkvariables[vname] = var
return var | python | def create_variable(self, varname, vtype=None):
"""Create a tk variable.
If the variable was created previously return that instance.
"""
var_types = ('string', 'int', 'boolean', 'double')
vname = varname
var = None
type_from_name = 'string' # default type
if ':' in varname:
type_from_name, vname = varname.split(':')
# Fix incorrect order bug #33
if type_from_name not in (var_types):
# Swap order
type_from_name, vname = vname, type_from_name
if type_from_name not in (var_types):
raise Exception('Undefined variable type in "{0}"'.format(varname))
if vname in self.tkvariables:
var = self.tkvariables[vname]
else:
if vtype is None:
# get type from name
if type_from_name == 'int':
var = tkinter.IntVar()
elif type_from_name == 'boolean':
var = tkinter.BooleanVar()
elif type_from_name == 'double':
var = tkinter.DoubleVar()
else:
var = tkinter.StringVar()
else:
var = vtype()
self.tkvariables[vname] = var
return var | [
"def",
"create_variable",
"(",
"self",
",",
"varname",
",",
"vtype",
"=",
"None",
")",
":",
"var_types",
"=",
"(",
"'string'",
",",
"'int'",
",",
"'boolean'",
",",
"'double'",
")",
"vname",
"=",
"varname",
"var",
"=",
"None",
"type_from_name",
"=",
"'str... | Create a tk variable.
If the variable was created previously return that instance. | [
"Create",
"a",
"tk",
"variable",
".",
"If",
"the",
"variable",
"was",
"created",
"previously",
"return",
"that",
"instance",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/__init__.py#L238-L273 | train | 227,713 |
alejandroautalan/pygubu | pygubu/builder/__init__.py | Builder.add_from_file | def add_from_file(self, fpath):
"""Load ui definition from file."""
if self.tree is None:
base, name = os.path.split(fpath)
self.add_resource_path(base)
self.tree = tree = ET.parse(fpath)
self.root = tree.getroot()
self.objects = {}
else:
# TODO: append to current tree
pass | python | def add_from_file(self, fpath):
"""Load ui definition from file."""
if self.tree is None:
base, name = os.path.split(fpath)
self.add_resource_path(base)
self.tree = tree = ET.parse(fpath)
self.root = tree.getroot()
self.objects = {}
else:
# TODO: append to current tree
pass | [
"def",
"add_from_file",
"(",
"self",
",",
"fpath",
")",
":",
"if",
"self",
".",
"tree",
"is",
"None",
":",
"base",
",",
"name",
"=",
"os",
".",
"path",
".",
"split",
"(",
"fpath",
")",
"self",
".",
"add_resource_path",
"(",
"base",
")",
"self",
"."... | Load ui definition from file. | [
"Load",
"ui",
"definition",
"from",
"file",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/__init__.py#L275-L285 | train | 227,714 |
alejandroautalan/pygubu | pygubu/builder/__init__.py | Builder.add_from_string | def add_from_string(self, strdata):
"""Load ui definition from string."""
if self.tree is None:
self.tree = tree = ET.ElementTree(ET.fromstring(strdata))
self.root = tree.getroot()
self.objects = {}
else:
# TODO: append to current tree
pass | python | def add_from_string(self, strdata):
"""Load ui definition from string."""
if self.tree is None:
self.tree = tree = ET.ElementTree(ET.fromstring(strdata))
self.root = tree.getroot()
self.objects = {}
else:
# TODO: append to current tree
pass | [
"def",
"add_from_string",
"(",
"self",
",",
"strdata",
")",
":",
"if",
"self",
".",
"tree",
"is",
"None",
":",
"self",
".",
"tree",
"=",
"tree",
"=",
"ET",
".",
"ElementTree",
"(",
"ET",
".",
"fromstring",
"(",
"strdata",
")",
")",
"self",
".",
"ro... | Load ui definition from string. | [
"Load",
"ui",
"definition",
"from",
"string",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/__init__.py#L287-L295 | train | 227,715 |
alejandroautalan/pygubu | pygubu/builder/__init__.py | Builder.add_from_xmlnode | def add_from_xmlnode(self, element):
"""Load ui definition from xml.etree.Element node."""
if self.tree is None:
root = ET.Element('interface')
root.append(element)
self.tree = tree = ET.ElementTree(root)
self.root = tree.getroot()
self.objects = {}
# ET.dump(tree)
else:
# TODO: append to current tree
pass | python | def add_from_xmlnode(self, element):
"""Load ui definition from xml.etree.Element node."""
if self.tree is None:
root = ET.Element('interface')
root.append(element)
self.tree = tree = ET.ElementTree(root)
self.root = tree.getroot()
self.objects = {}
# ET.dump(tree)
else:
# TODO: append to current tree
pass | [
"def",
"add_from_xmlnode",
"(",
"self",
",",
"element",
")",
":",
"if",
"self",
".",
"tree",
"is",
"None",
":",
"root",
"=",
"ET",
".",
"Element",
"(",
"'interface'",
")",
"root",
".",
"append",
"(",
"element",
")",
"self",
".",
"tree",
"=",
"tree",
... | Load ui definition from xml.etree.Element node. | [
"Load",
"ui",
"definition",
"from",
"xml",
".",
"etree",
".",
"Element",
"node",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/__init__.py#L297-L308 | train | 227,716 |
alejandroautalan/pygubu | pygubu/builder/__init__.py | Builder.get_object | def get_object(self, name, master=None):
"""Find and create the widget named name.
Use master as parent. If widget was already created, return
that instance."""
widget = None
if name in self.objects:
widget = self.objects[name].widget
else:
xpath = ".//object[@id='{0}']".format(name)
node = self.tree.find(xpath)
if node is not None:
root = BuilderObject(self, dict())
root.widget = master
bobject = self._realize(root, node)
widget = bobject.widget
if widget is None:
msg = 'Widget "{0}" not defined.'.format(name)
raise Exception(msg)
return widget | python | def get_object(self, name, master=None):
"""Find and create the widget named name.
Use master as parent. If widget was already created, return
that instance."""
widget = None
if name in self.objects:
widget = self.objects[name].widget
else:
xpath = ".//object[@id='{0}']".format(name)
node = self.tree.find(xpath)
if node is not None:
root = BuilderObject(self, dict())
root.widget = master
bobject = self._realize(root, node)
widget = bobject.widget
if widget is None:
msg = 'Widget "{0}" not defined.'.format(name)
raise Exception(msg)
return widget | [
"def",
"get_object",
"(",
"self",
",",
"name",
",",
"master",
"=",
"None",
")",
":",
"widget",
"=",
"None",
"if",
"name",
"in",
"self",
".",
"objects",
":",
"widget",
"=",
"self",
".",
"objects",
"[",
"name",
"]",
".",
"widget",
"else",
":",
"xpath... | Find and create the widget named name.
Use master as parent. If widget was already created, return
that instance. | [
"Find",
"and",
"create",
"the",
"widget",
"named",
"name",
".",
"Use",
"master",
"as",
"parent",
".",
"If",
"widget",
"was",
"already",
"created",
"return",
"that",
"instance",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/__init__.py#L310-L328 | train | 227,717 |
alejandroautalan/pygubu | pygubu/builder/__init__.py | Builder._realize | def _realize(self, master, element):
"""Builds a widget from xml element using master as parent."""
data = data_xmlnode_to_dict(element, self.translator)
cname = data['class']
uniqueid = data['id']
if cname not in CLASS_MAP:
self._import_class(cname)
if cname in CLASS_MAP:
self._pre_process_data(data)
parent = CLASS_MAP[cname].builder.factory(self, data)
widget = parent.realize(master)
self.objects[uniqueid] = parent
xpath = "./child"
children = element.findall(xpath)
for child in children:
child_xml = child.find('./object')
child = self._realize(parent, child_xml)
parent.add_child(child)
parent.configure()
parent.layout()
return parent
else:
raise Exception('Class "{0}" not mapped'.format(cname)) | python | def _realize(self, master, element):
"""Builds a widget from xml element using master as parent."""
data = data_xmlnode_to_dict(element, self.translator)
cname = data['class']
uniqueid = data['id']
if cname not in CLASS_MAP:
self._import_class(cname)
if cname in CLASS_MAP:
self._pre_process_data(data)
parent = CLASS_MAP[cname].builder.factory(self, data)
widget = parent.realize(master)
self.objects[uniqueid] = parent
xpath = "./child"
children = element.findall(xpath)
for child in children:
child_xml = child.find('./object')
child = self._realize(parent, child_xml)
parent.add_child(child)
parent.configure()
parent.layout()
return parent
else:
raise Exception('Class "{0}" not mapped'.format(cname)) | [
"def",
"_realize",
"(",
"self",
",",
"master",
",",
"element",
")",
":",
"data",
"=",
"data_xmlnode_to_dict",
"(",
"element",
",",
"self",
".",
"translator",
")",
"cname",
"=",
"data",
"[",
"'class'",
"]",
"uniqueid",
"=",
"data",
"[",
"'id'",
"]",
"if... | Builds a widget from xml element using master as parent. | [
"Builds",
"a",
"widget",
"from",
"xml",
"element",
"using",
"master",
"as",
"parent",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/__init__.py#L349-L377 | train | 227,718 |
alejandroautalan/pygubu | pygubu/builder/__init__.py | Builder.connect_callbacks | def connect_callbacks(self, callbacks_bag):
"""Connect callbacks specified in callbacks_bag with callbacks
defined in the ui definition.
Return a list with the name of the callbacks not connected.
"""
notconnected = []
for wname, builderobj in self.objects.items():
missing = builderobj.connect_commands(callbacks_bag)
if missing is not None:
notconnected.extend(missing)
missing = builderobj.connect_bindings(callbacks_bag)
if missing is not None:
notconnected.extend(missing)
if notconnected:
notconnected = list(set(notconnected))
msg = 'Missing callbacks for commands: {}'.format(notconnected)
logger.warning(msg)
return notconnected
else:
return None | python | def connect_callbacks(self, callbacks_bag):
"""Connect callbacks specified in callbacks_bag with callbacks
defined in the ui definition.
Return a list with the name of the callbacks not connected.
"""
notconnected = []
for wname, builderobj in self.objects.items():
missing = builderobj.connect_commands(callbacks_bag)
if missing is not None:
notconnected.extend(missing)
missing = builderobj.connect_bindings(callbacks_bag)
if missing is not None:
notconnected.extend(missing)
if notconnected:
notconnected = list(set(notconnected))
msg = 'Missing callbacks for commands: {}'.format(notconnected)
logger.warning(msg)
return notconnected
else:
return None | [
"def",
"connect_callbacks",
"(",
"self",
",",
"callbacks_bag",
")",
":",
"notconnected",
"=",
"[",
"]",
"for",
"wname",
",",
"builderobj",
"in",
"self",
".",
"objects",
".",
"items",
"(",
")",
":",
"missing",
"=",
"builderobj",
".",
"connect_commands",
"("... | Connect callbacks specified in callbacks_bag with callbacks
defined in the ui definition.
Return a list with the name of the callbacks not connected. | [
"Connect",
"callbacks",
"specified",
"in",
"callbacks_bag",
"with",
"callbacks",
"defined",
"in",
"the",
"ui",
"definition",
".",
"Return",
"a",
"list",
"with",
"the",
"name",
"of",
"the",
"callbacks",
"not",
"connected",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/__init__.py#L388-L407 | train | 227,719 |
alejandroautalan/pygubu | pygubudesigner/util/selecttool.py | SelectTool._start_selecting | def _start_selecting(self, event):
"""Comienza con el proceso de seleccion."""
self._selecting = True
canvas = self._canvas
x = canvas.canvasx(event.x)
y = canvas.canvasy(event.y)
self._sstart = (x, y)
if not self._sobject:
self._sobject = canvas.create_rectangle(
self._sstart[0], self._sstart[1], x, y,
dash=(3,5), outline='#0000ff'
)
canvas.itemconfigure(self._sobject, state=tk.NORMAL) | python | def _start_selecting(self, event):
"""Comienza con el proceso de seleccion."""
self._selecting = True
canvas = self._canvas
x = canvas.canvasx(event.x)
y = canvas.canvasy(event.y)
self._sstart = (x, y)
if not self._sobject:
self._sobject = canvas.create_rectangle(
self._sstart[0], self._sstart[1], x, y,
dash=(3,5), outline='#0000ff'
)
canvas.itemconfigure(self._sobject, state=tk.NORMAL) | [
"def",
"_start_selecting",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"_selecting",
"=",
"True",
"canvas",
"=",
"self",
".",
"_canvas",
"x",
"=",
"canvas",
".",
"canvasx",
"(",
"event",
".",
"x",
")",
"y",
"=",
"canvas",
".",
"canvasy",
"(",
... | Comienza con el proceso de seleccion. | [
"Comienza",
"con",
"el",
"proceso",
"de",
"seleccion",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/util/selecttool.py#L51-L63 | train | 227,720 |
alejandroautalan/pygubu | pygubudesigner/util/selecttool.py | SelectTool._keep_selecting | def _keep_selecting(self, event):
"""Continua con el proceso de seleccion.
Crea o redimensiona el cuadro de seleccion de acuerdo con
la posicion del raton."""
canvas = self._canvas
x = canvas.canvasx(event.x)
y = canvas.canvasy(event.y)
canvas.coords(self._sobject,
self._sstart[0], self._sstart[1], x, y) | python | def _keep_selecting(self, event):
"""Continua con el proceso de seleccion.
Crea o redimensiona el cuadro de seleccion de acuerdo con
la posicion del raton."""
canvas = self._canvas
x = canvas.canvasx(event.x)
y = canvas.canvasy(event.y)
canvas.coords(self._sobject,
self._sstart[0], self._sstart[1], x, y) | [
"def",
"_keep_selecting",
"(",
"self",
",",
"event",
")",
":",
"canvas",
"=",
"self",
".",
"_canvas",
"x",
"=",
"canvas",
".",
"canvasx",
"(",
"event",
".",
"x",
")",
"y",
"=",
"canvas",
".",
"canvasy",
"(",
"event",
".",
"y",
")",
"canvas",
".",
... | Continua con el proceso de seleccion.
Crea o redimensiona el cuadro de seleccion de acuerdo con
la posicion del raton. | [
"Continua",
"con",
"el",
"proceso",
"de",
"seleccion",
".",
"Crea",
"o",
"redimensiona",
"el",
"cuadro",
"de",
"seleccion",
"de",
"acuerdo",
"con",
"la",
"posicion",
"del",
"raton",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/util/selecttool.py#L65-L73 | train | 227,721 |
alejandroautalan/pygubu | pygubudesigner/util/selecttool.py | SelectTool._finish_selecting | def _finish_selecting(self, event):
"""Finaliza la seleccion.
Marca como seleccionados todos los objetos que se encuentran
dentro del recuadro de seleccion."""
self._selecting = False
canvas = self._canvas
x = canvas.canvasx(event.x)
y = canvas.canvasy(event.y)
canvas.coords(self._sobject, -1, -1, -1, -1)
canvas.itemconfigure(self._sobject, state=tk.HIDDEN)
sel_region = self._sstart[0], self._sstart[1], x, y
canvas.region_selected = sel_region
canvas.event_generate('<<RegionSelected>>') | python | def _finish_selecting(self, event):
"""Finaliza la seleccion.
Marca como seleccionados todos los objetos que se encuentran
dentro del recuadro de seleccion."""
self._selecting = False
canvas = self._canvas
x = canvas.canvasx(event.x)
y = canvas.canvasy(event.y)
canvas.coords(self._sobject, -1, -1, -1, -1)
canvas.itemconfigure(self._sobject, state=tk.HIDDEN)
sel_region = self._sstart[0], self._sstart[1], x, y
canvas.region_selected = sel_region
canvas.event_generate('<<RegionSelected>>') | [
"def",
"_finish_selecting",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"_selecting",
"=",
"False",
"canvas",
"=",
"self",
".",
"_canvas",
"x",
"=",
"canvas",
".",
"canvasx",
"(",
"event",
".",
"x",
")",
"y",
"=",
"canvas",
".",
"canvasy",
"(",... | Finaliza la seleccion.
Marca como seleccionados todos los objetos que se encuentran
dentro del recuadro de seleccion. | [
"Finaliza",
"la",
"seleccion",
".",
"Marca",
"como",
"seleccionados",
"todos",
"los",
"objetos",
"que",
"se",
"encuentran",
"dentro",
"del",
"recuadro",
"de",
"seleccion",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/util/selecttool.py#L75-L89 | train | 227,722 |
alejandroautalan/pygubu | pygubu/widgets/calendarframe.py | matrix_coords | def matrix_coords(rows, cols, rowh, colw, ox=0, oy=0):
"Generate coords for a matrix of rects"
for i, f, c in rowmajor(rows, cols):
x = ox + c * colw
y = oy + f * rowh
x1 = x + colw
y1 = y + rowh
yield (i, x, y, x1, y1) | python | def matrix_coords(rows, cols, rowh, colw, ox=0, oy=0):
"Generate coords for a matrix of rects"
for i, f, c in rowmajor(rows, cols):
x = ox + c * colw
y = oy + f * rowh
x1 = x + colw
y1 = y + rowh
yield (i, x, y, x1, y1) | [
"def",
"matrix_coords",
"(",
"rows",
",",
"cols",
",",
"rowh",
",",
"colw",
",",
"ox",
"=",
"0",
",",
"oy",
"=",
"0",
")",
":",
"for",
"i",
",",
"f",
",",
"c",
"in",
"rowmajor",
"(",
"rows",
",",
"cols",
")",
":",
"x",
"=",
"ox",
"+",
"c",
... | Generate coords for a matrix of rects | [
"Generate",
"coords",
"for",
"a",
"matrix",
"of",
"rects"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/widgets/calendarframe.py#L40-L47 | train | 227,723 |
alejandroautalan/pygubu | pygubudesigner/util/__init__.py | ArrayVar.get | def get(self):
'''Return a dictionary that represents the Tcl array'''
value = {}
for (elementname, elementvar) in self._elementvars.items():
value[elementname] = elementvar.get()
return value | python | def get(self):
'''Return a dictionary that represents the Tcl array'''
value = {}
for (elementname, elementvar) in self._elementvars.items():
value[elementname] = elementvar.get()
return value | [
"def",
"get",
"(",
"self",
")",
":",
"value",
"=",
"{",
"}",
"for",
"(",
"elementname",
",",
"elementvar",
")",
"in",
"self",
".",
"_elementvars",
".",
"items",
"(",
")",
":",
"value",
"[",
"elementname",
"]",
"=",
"elementvar",
".",
"get",
"(",
")... | Return a dictionary that represents the Tcl array | [
"Return",
"a",
"dictionary",
"that",
"represents",
"the",
"Tcl",
"array"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/util/__init__.py#L96-L101 | train | 227,724 |
alejandroautalan/pygubu | pygubu/widgets/editabletreeview.py | EditableTreeview.yview | def yview(self, *args):
"""Update inplace widgets position when doing vertical scroll"""
self.after_idle(self.__updateWnds)
ttk.Treeview.yview(self, *args) | python | def yview(self, *args):
"""Update inplace widgets position when doing vertical scroll"""
self.after_idle(self.__updateWnds)
ttk.Treeview.yview(self, *args) | [
"def",
"yview",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"after_idle",
"(",
"self",
".",
"__updateWnds",
")",
"ttk",
".",
"Treeview",
".",
"yview",
"(",
"self",
",",
"*",
"args",
")"
] | Update inplace widgets position when doing vertical scroll | [
"Update",
"inplace",
"widgets",
"position",
"when",
"doing",
"vertical",
"scroll"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/widgets/editabletreeview.py#L106-L109 | train | 227,725 |
alejandroautalan/pygubu | pygubu/widgets/editabletreeview.py | EditableTreeview.xview | def xview(self, *args):
"""Update inplace widgets position when doing horizontal scroll"""
self.after_idle(self.__updateWnds)
ttk.Treeview.xview(self, *args) | python | def xview(self, *args):
"""Update inplace widgets position when doing horizontal scroll"""
self.after_idle(self.__updateWnds)
ttk.Treeview.xview(self, *args) | [
"def",
"xview",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"after_idle",
"(",
"self",
".",
"__updateWnds",
")",
"ttk",
".",
"Treeview",
".",
"xview",
"(",
"self",
",",
"*",
"args",
")"
] | Update inplace widgets position when doing horizontal scroll | [
"Update",
"inplace",
"widgets",
"position",
"when",
"doing",
"horizontal",
"scroll"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/widgets/editabletreeview.py#L119-L122 | train | 227,726 |
alejandroautalan/pygubu | pygubu/widgets/editabletreeview.py | EditableTreeview.__check_focus | def __check_focus(self, event):
"""Checks if the focus has changed"""
#print('Event:', event.type, event.x, event.y)
changed = False
if not self._curfocus:
changed = True
elif self._curfocus != self.focus():
self.__clear_inplace_widgets()
changed = True
newfocus = self.focus()
if changed:
if newfocus:
#print('Focus changed to:', newfocus)
self._curfocus= newfocus
self.__focus(newfocus)
self.__updateWnds() | python | def __check_focus(self, event):
"""Checks if the focus has changed"""
#print('Event:', event.type, event.x, event.y)
changed = False
if not self._curfocus:
changed = True
elif self._curfocus != self.focus():
self.__clear_inplace_widgets()
changed = True
newfocus = self.focus()
if changed:
if newfocus:
#print('Focus changed to:', newfocus)
self._curfocus= newfocus
self.__focus(newfocus)
self.__updateWnds() | [
"def",
"__check_focus",
"(",
"self",
",",
"event",
")",
":",
"#print('Event:', event.type, event.x, event.y)",
"changed",
"=",
"False",
"if",
"not",
"self",
".",
"_curfocus",
":",
"changed",
"=",
"True",
"elif",
"self",
".",
"_curfocus",
"!=",
"self",
".",
"fo... | Checks if the focus has changed | [
"Checks",
"if",
"the",
"focus",
"has",
"changed"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/widgets/editabletreeview.py#L132-L147 | train | 227,727 |
alejandroautalan/pygubu | pygubu/widgets/editabletreeview.py | EditableTreeview.__focus | def __focus(self, item):
"""Called when focus item has changed"""
cols = self.__get_display_columns()
for col in cols:
self.__event_info =(col,item)
self.event_generate('<<TreeviewInplaceEdit>>')
if col in self._inplace_widgets:
w = self._inplace_widgets[col]
w.bind('<Key-Tab>',
lambda e: w.tk_focusNext().focus_set())
w.bind('<Shift-Key-Tab>',
lambda e: w.tk_focusPrev().focus_set()) | python | def __focus(self, item):
"""Called when focus item has changed"""
cols = self.__get_display_columns()
for col in cols:
self.__event_info =(col,item)
self.event_generate('<<TreeviewInplaceEdit>>')
if col in self._inplace_widgets:
w = self._inplace_widgets[col]
w.bind('<Key-Tab>',
lambda e: w.tk_focusNext().focus_set())
w.bind('<Shift-Key-Tab>',
lambda e: w.tk_focusPrev().focus_set()) | [
"def",
"__focus",
"(",
"self",
",",
"item",
")",
":",
"cols",
"=",
"self",
".",
"__get_display_columns",
"(",
")",
"for",
"col",
"in",
"cols",
":",
"self",
".",
"__event_info",
"=",
"(",
"col",
",",
"item",
")",
"self",
".",
"event_generate",
"(",
"'... | Called when focus item has changed | [
"Called",
"when",
"focus",
"item",
"has",
"changed"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/widgets/editabletreeview.py#L149-L160 | train | 227,728 |
alejandroautalan/pygubu | pygubu/widgets/editabletreeview.py | EditableTreeview.__clear_inplace_widgets | def __clear_inplace_widgets(self):
"""Remove all inplace edit widgets."""
cols = self.__get_display_columns()
#print('Clear:', cols)
for c in cols:
if c in self._inplace_widgets:
widget = self._inplace_widgets[c]
widget.place_forget()
self._inplace_widgets_show.pop(c, None) | python | def __clear_inplace_widgets(self):
"""Remove all inplace edit widgets."""
cols = self.__get_display_columns()
#print('Clear:', cols)
for c in cols:
if c in self._inplace_widgets:
widget = self._inplace_widgets[c]
widget.place_forget()
self._inplace_widgets_show.pop(c, None) | [
"def",
"__clear_inplace_widgets",
"(",
"self",
")",
":",
"cols",
"=",
"self",
".",
"__get_display_columns",
"(",
")",
"#print('Clear:', cols)",
"for",
"c",
"in",
"cols",
":",
"if",
"c",
"in",
"self",
".",
"_inplace_widgets",
":",
"widget",
"=",
"self",
".",
... | Remove all inplace edit widgets. | [
"Remove",
"all",
"inplace",
"edit",
"widgets",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/widgets/editabletreeview.py#L179-L187 | train | 227,729 |
alejandroautalan/pygubu | setup.py | CustomInstall.run | def run(self):
"""Run parent install, and then save the install dir in the script."""
install.run(self)
#
# Remove old pygubu.py from scripts path if exists
spath = os.path.join(self.install_scripts, 'pygubu')
for ext in ('.py', '.pyw'):
filename = spath + ext
if os.path.exists(filename):
os.remove(filename)
#
# Remove old pygubu-designer.bat
if platform.system() == 'Windows':
spath = os.path.join(self.install_scripts, 'pygubu-designer.bat')
if os.path.exists(spath):
os.remove(spath) | python | def run(self):
"""Run parent install, and then save the install dir in the script."""
install.run(self)
#
# Remove old pygubu.py from scripts path if exists
spath = os.path.join(self.install_scripts, 'pygubu')
for ext in ('.py', '.pyw'):
filename = spath + ext
if os.path.exists(filename):
os.remove(filename)
#
# Remove old pygubu-designer.bat
if platform.system() == 'Windows':
spath = os.path.join(self.install_scripts, 'pygubu-designer.bat')
if os.path.exists(spath):
os.remove(spath) | [
"def",
"run",
"(",
"self",
")",
":",
"install",
".",
"run",
"(",
"self",
")",
"#",
"# Remove old pygubu.py from scripts path if exists",
"spath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"install_scripts",
",",
"'pygubu'",
")",
"for",
"ext",
... | Run parent install, and then save the install dir in the script. | [
"Run",
"parent",
"install",
"and",
"then",
"save",
"the",
"install",
"dir",
"in",
"the",
"script",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/setup.py#L30-L46 | train | 227,730 |
alejandroautalan/pygubu | pygubudesigner/propertieseditor.py | PropertiesEditor.hide_all | def hide_all(self):
"""Hide all properties from property editor."""
self.current = None
for _v, (label, widget) in self._propbag.items():
label.grid_remove()
widget.grid_remove() | python | def hide_all(self):
"""Hide all properties from property editor."""
self.current = None
for _v, (label, widget) in self._propbag.items():
label.grid_remove()
widget.grid_remove() | [
"def",
"hide_all",
"(",
"self",
")",
":",
"self",
".",
"current",
"=",
"None",
"for",
"_v",
",",
"(",
"label",
",",
"widget",
")",
"in",
"self",
".",
"_propbag",
".",
"items",
"(",
")",
":",
"label",
".",
"grid_remove",
"(",
")",
"widget",
".",
"... | Hide all properties from property editor. | [
"Hide",
"all",
"properties",
"from",
"property",
"editor",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/propertieseditor.py#L150-L156 | train | 227,731 |
alejandroautalan/pygubu | pygubu/builder/builderobject.py | BuilderObject._get_init_args | def _get_init_args(self):
"""Creates dict with properties marked as readonly"""
args = {}
for rop in self.ro_properties:
if rop in self.properties:
args[rop] = self.properties[rop]
return args | python | def _get_init_args(self):
"""Creates dict with properties marked as readonly"""
args = {}
for rop in self.ro_properties:
if rop in self.properties:
args[rop] = self.properties[rop]
return args | [
"def",
"_get_init_args",
"(",
"self",
")",
":",
"args",
"=",
"{",
"}",
"for",
"rop",
"in",
"self",
".",
"ro_properties",
":",
"if",
"rop",
"in",
"self",
".",
"properties",
":",
"args",
"[",
"rop",
"]",
"=",
"self",
".",
"properties",
"[",
"rop",
"]... | Creates dict with properties marked as readonly | [
"Creates",
"dict",
"with",
"properties",
"marked",
"as",
"readonly"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/builder/builderobject.py#L86-L93 | train | 227,732 |
alejandroautalan/pygubu | pygubudesigner/previewer.py | OnCanvasMenuPreview._calculate_menu_wh | def _calculate_menu_wh(self):
""" Calculate menu widht and height."""
w = iw = 50
h = ih = 0
# menu.index returns None if there are no choices
index = self._menu.index(tk.END)
index = index if index is not None else 0
count = index + 1
# First calculate using the font paramters of root menu:
font = self._menu.cget('font')
font = self._get_font(font)
for i in range(0, count):
mtype = self._menu.type(i)
if mtype == 'tearoff':
continue
label = 'default'
ifont = 'TkMenuFont'
if mtype != 'separator':
label = self._menu.entrycget(i, 'label')
ifont = self._menu.entrycget(i, 'font')
wpx = font.measure(label)
hpx = font.metrics('linespace')
w += wpx
if hpx > h:
h = hpx * 2
# Calculate using font configured for each subitem
ifont = self._get_font(ifont)
wpx = ifont.measure(label)
hpx = ifont.metrics('linespace')
iw += wpx
if hpx > ih:
ih = hpx * 2
# Then compare 2 sizes and use the greatest
w = max(w, iw, 100)
h = max(h, ih, 25)
self._cwidth = w + int(w * 0.25)
self._cheight = h + int(h * 0.25) | python | def _calculate_menu_wh(self):
""" Calculate menu widht and height."""
w = iw = 50
h = ih = 0
# menu.index returns None if there are no choices
index = self._menu.index(tk.END)
index = index if index is not None else 0
count = index + 1
# First calculate using the font paramters of root menu:
font = self._menu.cget('font')
font = self._get_font(font)
for i in range(0, count):
mtype = self._menu.type(i)
if mtype == 'tearoff':
continue
label = 'default'
ifont = 'TkMenuFont'
if mtype != 'separator':
label = self._menu.entrycget(i, 'label')
ifont = self._menu.entrycget(i, 'font')
wpx = font.measure(label)
hpx = font.metrics('linespace')
w += wpx
if hpx > h:
h = hpx * 2
# Calculate using font configured for each subitem
ifont = self._get_font(ifont)
wpx = ifont.measure(label)
hpx = ifont.metrics('linespace')
iw += wpx
if hpx > ih:
ih = hpx * 2
# Then compare 2 sizes and use the greatest
w = max(w, iw, 100)
h = max(h, ih, 25)
self._cwidth = w + int(w * 0.25)
self._cheight = h + int(h * 0.25) | [
"def",
"_calculate_menu_wh",
"(",
"self",
")",
":",
"w",
"=",
"iw",
"=",
"50",
"h",
"=",
"ih",
"=",
"0",
"# menu.index returns None if there are no choices",
"index",
"=",
"self",
".",
"_menu",
".",
"index",
"(",
"tk",
".",
"END",
")",
"index",
"=",
"ind... | Calculate menu widht and height. | [
"Calculate",
"menu",
"widht",
"and",
"height",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/previewer.py#L283-L320 | train | 227,733 |
alejandroautalan/pygubu | pygubudesigner/previewer.py | PreviewHelper._over_resizer | def _over_resizer(self, x, y):
"Returns True if mouse is over a resizer"
over_resizer = False
c = self.canvas
ids = c.find_overlapping(x, y, x, y)
if ids:
o = ids[0]
tags = c.gettags(o)
if 'resizer' in tags:
over_resizer = True
return over_resizer | python | def _over_resizer(self, x, y):
"Returns True if mouse is over a resizer"
over_resizer = False
c = self.canvas
ids = c.find_overlapping(x, y, x, y)
if ids:
o = ids[0]
tags = c.gettags(o)
if 'resizer' in tags:
over_resizer = True
return over_resizer | [
"def",
"_over_resizer",
"(",
"self",
",",
"x",
",",
"y",
")",
":",
"over_resizer",
"=",
"False",
"c",
"=",
"self",
".",
"canvas",
"ids",
"=",
"c",
".",
"find_overlapping",
"(",
"x",
",",
"y",
",",
"x",
",",
"y",
")",
"if",
"ids",
":",
"o",
"=",... | Returns True if mouse is over a resizer | [
"Returns",
"True",
"if",
"mouse",
"is",
"over",
"a",
"resizer"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/previewer.py#L453-L464 | train | 227,734 |
alejandroautalan/pygubu | pygubudesigner/previewer.py | PreviewHelper.resize_preview | def resize_preview(self, dw, dh):
"Resizes preview that is currently dragged"
# identify preview
if self._objects_moving:
id_ = self._objects_moving[0]
tags = self.canvas.gettags(id_)
for tag in tags:
if tag.startswith('preview_'):
_, ident = tag.split('preview_')
preview = self.previews[ident]
preview.resize_by(dw, dh)
self.move_previews()
break
self._update_cregion() | python | def resize_preview(self, dw, dh):
"Resizes preview that is currently dragged"
# identify preview
if self._objects_moving:
id_ = self._objects_moving[0]
tags = self.canvas.gettags(id_)
for tag in tags:
if tag.startswith('preview_'):
_, ident = tag.split('preview_')
preview = self.previews[ident]
preview.resize_by(dw, dh)
self.move_previews()
break
self._update_cregion() | [
"def",
"resize_preview",
"(",
"self",
",",
"dw",
",",
"dh",
")",
":",
"# identify preview",
"if",
"self",
".",
"_objects_moving",
":",
"id_",
"=",
"self",
".",
"_objects_moving",
"[",
"0",
"]",
"tags",
"=",
"self",
".",
"canvas",
".",
"gettags",
"(",
"... | Resizes preview that is currently dragged | [
"Resizes",
"preview",
"that",
"is",
"currently",
"dragged"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/previewer.py#L466-L480 | train | 227,735 |
alejandroautalan/pygubu | pygubudesigner/previewer.py | PreviewHelper.move_previews | def move_previews(self):
"Move previews after a resize event"
# calculate new positions
min_y = self._calc_preview_ypos()
for idx, (key, p) in enumerate(self.previews.items()):
new_dy = min_y[idx] - p.y
self.previews[key].move_by(0, new_dy)
self._update_cregion()
self.show_selected(self._sel_id, self._sel_widget) | python | def move_previews(self):
"Move previews after a resize event"
# calculate new positions
min_y = self._calc_preview_ypos()
for idx, (key, p) in enumerate(self.previews.items()):
new_dy = min_y[idx] - p.y
self.previews[key].move_by(0, new_dy)
self._update_cregion()
self.show_selected(self._sel_id, self._sel_widget) | [
"def",
"move_previews",
"(",
"self",
")",
":",
"# calculate new positions",
"min_y",
"=",
"self",
".",
"_calc_preview_ypos",
"(",
")",
"for",
"idx",
",",
"(",
"key",
",",
"p",
")",
"in",
"enumerate",
"(",
"self",
".",
"previews",
".",
"items",
"(",
")",
... | Move previews after a resize event | [
"Move",
"previews",
"after",
"a",
"resize",
"event"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/previewer.py#L490-L499 | train | 227,736 |
alejandroautalan/pygubu | pygubudesigner/previewer.py | PreviewHelper._calc_preview_ypos | def _calc_preview_ypos(self):
"Calculates the previews positions on canvas"
y = 10
min_y = [y]
for k, p in self.previews.items():
y += p.height() + self.padding
min_y.append(y)
return min_y | python | def _calc_preview_ypos(self):
"Calculates the previews positions on canvas"
y = 10
min_y = [y]
for k, p in self.previews.items():
y += p.height() + self.padding
min_y.append(y)
return min_y | [
"def",
"_calc_preview_ypos",
"(",
"self",
")",
":",
"y",
"=",
"10",
"min_y",
"=",
"[",
"y",
"]",
"for",
"k",
",",
"p",
"in",
"self",
".",
"previews",
".",
"items",
"(",
")",
":",
"y",
"+=",
"p",
".",
"height",
"(",
")",
"+",
"self",
".",
"pad... | Calculates the previews positions on canvas | [
"Calculates",
"the",
"previews",
"positions",
"on",
"canvas"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/previewer.py#L501-L509 | train | 227,737 |
alejandroautalan/pygubu | pygubudesigner/previewer.py | PreviewHelper._get_slot | def _get_slot(self):
"Returns the next coordinates for a preview"
x = y = 10
for k, p in self.previews.items():
y += p.height() + self.padding
return x, y | python | def _get_slot(self):
"Returns the next coordinates for a preview"
x = y = 10
for k, p in self.previews.items():
y += p.height() + self.padding
return x, y | [
"def",
"_get_slot",
"(",
"self",
")",
":",
"x",
"=",
"y",
"=",
"10",
"for",
"k",
",",
"p",
"in",
"self",
".",
"previews",
".",
"items",
"(",
")",
":",
"y",
"+=",
"p",
".",
"height",
"(",
")",
"+",
"self",
".",
"padding",
"return",
"x",
",",
... | Returns the next coordinates for a preview | [
"Returns",
"the",
"next",
"coordinates",
"for",
"a",
"preview"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/previewer.py#L511-L517 | train | 227,738 |
alejandroautalan/pygubu | pygubu/stockimage.py | StockImage.clear_cache | def clear_cache(cls):
"""Call this before closing tk root"""
#Prevent tkinter errors on python 2 ??
for key in cls._cached:
cls._cached[key] = None
cls._cached = {} | python | def clear_cache(cls):
"""Call this before closing tk root"""
#Prevent tkinter errors on python 2 ??
for key in cls._cached:
cls._cached[key] = None
cls._cached = {} | [
"def",
"clear_cache",
"(",
"cls",
")",
":",
"#Prevent tkinter errors on python 2 ??",
"for",
"key",
"in",
"cls",
".",
"_cached",
":",
"cls",
".",
"_cached",
"[",
"key",
"]",
"=",
"None",
"cls",
".",
"_cached",
"=",
"{",
"}"
] | Call this before closing tk root | [
"Call",
"this",
"before",
"closing",
"tk",
"root"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L54-L59 | train | 227,739 |
alejandroautalan/pygubu | pygubu/stockimage.py | StockImage.register | def register(cls, key, filename):
"""Register a image file using key"""
if key in cls._stock:
logger.info('Warning, replacing resource ' + str(key))
cls._stock[key] = {'type': 'custom', 'filename': filename}
logger.info('%s registered as %s' % (filename, key)) | python | def register(cls, key, filename):
"""Register a image file using key"""
if key in cls._stock:
logger.info('Warning, replacing resource ' + str(key))
cls._stock[key] = {'type': 'custom', 'filename': filename}
logger.info('%s registered as %s' % (filename, key)) | [
"def",
"register",
"(",
"cls",
",",
"key",
",",
"filename",
")",
":",
"if",
"key",
"in",
"cls",
".",
"_stock",
":",
"logger",
".",
"info",
"(",
"'Warning, replacing resource '",
"+",
"str",
"(",
"key",
")",
")",
"cls",
".",
"_stock",
"[",
"key",
"]",... | Register a image file using key | [
"Register",
"a",
"image",
"file",
"using",
"key"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L62-L68 | train | 227,740 |
alejandroautalan/pygubu | pygubu/stockimage.py | StockImage.register_from_data | def register_from_data(cls, key, format, data):
"""Register a image data using key"""
if key in cls._stock:
logger.info('Warning, replacing resource ' + str(key))
cls._stock[key] = {'type': 'data', 'data': data, 'format': format }
logger.info('%s registered as %s' % ('data', key)) | python | def register_from_data(cls, key, format, data):
"""Register a image data using key"""
if key in cls._stock:
logger.info('Warning, replacing resource ' + str(key))
cls._stock[key] = {'type': 'data', 'data': data, 'format': format }
logger.info('%s registered as %s' % ('data', key)) | [
"def",
"register_from_data",
"(",
"cls",
",",
"key",
",",
"format",
",",
"data",
")",
":",
"if",
"key",
"in",
"cls",
".",
"_stock",
":",
"logger",
".",
"info",
"(",
"'Warning, replacing resource '",
"+",
"str",
"(",
"key",
")",
")",
"cls",
".",
"_stock... | Register a image data using key | [
"Register",
"a",
"image",
"data",
"using",
"key"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L71-L77 | train | 227,741 |
alejandroautalan/pygubu | pygubu/stockimage.py | StockImage.register_created | def register_created(cls, key, image):
"""Register an already created image using key"""
if key in cls._stock:
logger.info('Warning, replacing resource ' + str(key))
cls._stock[key] = {'type': 'created', 'image': image}
logger.info('%s registered as %s' % ('data', key)) | python | def register_created(cls, key, image):
"""Register an already created image using key"""
if key in cls._stock:
logger.info('Warning, replacing resource ' + str(key))
cls._stock[key] = {'type': 'created', 'image': image}
logger.info('%s registered as %s' % ('data', key)) | [
"def",
"register_created",
"(",
"cls",
",",
"key",
",",
"image",
")",
":",
"if",
"key",
"in",
"cls",
".",
"_stock",
":",
"logger",
".",
"info",
"(",
"'Warning, replacing resource '",
"+",
"str",
"(",
"key",
")",
")",
"cls",
".",
"_stock",
"[",
"key",
... | Register an already created image using key | [
"Register",
"an",
"already",
"created",
"image",
"using",
"key"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L80-L86 | train | 227,742 |
alejandroautalan/pygubu | pygubu/stockimage.py | StockImage._load_image | def _load_image(cls, rkey):
"""Load image from file or return the cached instance."""
v = cls._stock[rkey]
img = None
itype = v['type']
if itype in ('stock', 'data'):
img = tk.PhotoImage(format=v['format'], data=v['data'])
elif itype == 'created':
img = v['image']
else:
img = tk.PhotoImage(file=v['filename'])
cls._cached[rkey] = img
logger.info('Loaded resource %s.' % rkey)
return img | python | def _load_image(cls, rkey):
"""Load image from file or return the cached instance."""
v = cls._stock[rkey]
img = None
itype = v['type']
if itype in ('stock', 'data'):
img = tk.PhotoImage(format=v['format'], data=v['data'])
elif itype == 'created':
img = v['image']
else:
img = tk.PhotoImage(file=v['filename'])
cls._cached[rkey] = img
logger.info('Loaded resource %s.' % rkey)
return img | [
"def",
"_load_image",
"(",
"cls",
",",
"rkey",
")",
":",
"v",
"=",
"cls",
".",
"_stock",
"[",
"rkey",
"]",
"img",
"=",
"None",
"itype",
"=",
"v",
"[",
"'type'",
"]",
"if",
"itype",
"in",
"(",
"'stock'",
",",
"'data'",
")",
":",
"img",
"=",
"tk"... | Load image from file or return the cached instance. | [
"Load",
"image",
"from",
"file",
"or",
"return",
"the",
"cached",
"instance",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L107-L121 | train | 227,743 |
alejandroautalan/pygubu | pygubu/stockimage.py | StockImage.get | def get(cls, rkey):
"""Get image previously registered with key rkey.
If key not exist, raise StockImageException
"""
if rkey in cls._cached:
logger.info('Resource %s is in cache.' % rkey)
return cls._cached[rkey]
if rkey in cls._stock:
img = cls._load_image(rkey)
return img
else:
raise StockImageException('StockImage: %s not registered.' % rkey) | python | def get(cls, rkey):
"""Get image previously registered with key rkey.
If key not exist, raise StockImageException
"""
if rkey in cls._cached:
logger.info('Resource %s is in cache.' % rkey)
return cls._cached[rkey]
if rkey in cls._stock:
img = cls._load_image(rkey)
return img
else:
raise StockImageException('StockImage: %s not registered.' % rkey) | [
"def",
"get",
"(",
"cls",
",",
"rkey",
")",
":",
"if",
"rkey",
"in",
"cls",
".",
"_cached",
":",
"logger",
".",
"info",
"(",
"'Resource %s is in cache.'",
"%",
"rkey",
")",
"return",
"cls",
".",
"_cached",
"[",
"rkey",
"]",
"if",
"rkey",
"in",
"cls",... | Get image previously registered with key rkey.
If key not exist, raise StockImageException | [
"Get",
"image",
"previously",
"registered",
"with",
"key",
"rkey",
".",
"If",
"key",
"not",
"exist",
"raise",
"StockImageException"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L124-L136 | train | 227,744 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.config_treeview | def config_treeview(self):
"""Sets treeview columns and other params"""
tree = self.treeview
tree.bind('<Double-1>', self.on_treeview_double_click)
tree.bind('<<TreeviewSelect>>', self.on_treeview_select, add='+') | python | def config_treeview(self):
"""Sets treeview columns and other params"""
tree = self.treeview
tree.bind('<Double-1>', self.on_treeview_double_click)
tree.bind('<<TreeviewSelect>>', self.on_treeview_select, add='+') | [
"def",
"config_treeview",
"(",
"self",
")",
":",
"tree",
"=",
"self",
".",
"treeview",
"tree",
".",
"bind",
"(",
"'<Double-1>'",
",",
"self",
".",
"on_treeview_double_click",
")",
"tree",
".",
"bind",
"(",
"'<<TreeviewSelect>>'",
",",
"self",
".",
"on_treevi... | Sets treeview columns and other params | [
"Sets",
"treeview",
"columns",
"and",
"other",
"params"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L79-L83 | train | 227,745 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.get_toplevel_parent | def get_toplevel_parent(self, treeitem):
"""Returns the top level parent for treeitem."""
tv = self.treeview
toplevel_items = tv.get_children()
item = treeitem
while not (item in toplevel_items):
item = tv.parent(item)
return item | python | def get_toplevel_parent(self, treeitem):
"""Returns the top level parent for treeitem."""
tv = self.treeview
toplevel_items = tv.get_children()
item = treeitem
while not (item in toplevel_items):
item = tv.parent(item)
return item | [
"def",
"get_toplevel_parent",
"(",
"self",
",",
"treeitem",
")",
":",
"tv",
"=",
"self",
".",
"treeview",
"toplevel_items",
"=",
"tv",
".",
"get_children",
"(",
")",
"item",
"=",
"treeitem",
"while",
"not",
"(",
"item",
"in",
"toplevel_items",
")",
":",
... | Returns the top level parent for treeitem. | [
"Returns",
"the",
"top",
"level",
"parent",
"for",
"treeitem",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L85-L94 | train | 227,746 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.draw_widget | def draw_widget(self, item):
"""Create a preview of the selected treeview item"""
if item:
self.filter_remove(remember=True)
selected_id = self.treedata[item]['id']
item = self.get_toplevel_parent(item)
widget_id = self.treedata[item]['id']
wclass = self.treedata[item]['class']
xmlnode = self.tree_node_to_xml('', item)
self.previewer.draw(item, widget_id, xmlnode, wclass)
self.previewer.show_selected(item, selected_id)
self.filter_restore() | python | def draw_widget(self, item):
"""Create a preview of the selected treeview item"""
if item:
self.filter_remove(remember=True)
selected_id = self.treedata[item]['id']
item = self.get_toplevel_parent(item)
widget_id = self.treedata[item]['id']
wclass = self.treedata[item]['class']
xmlnode = self.tree_node_to_xml('', item)
self.previewer.draw(item, widget_id, xmlnode, wclass)
self.previewer.show_selected(item, selected_id)
self.filter_restore() | [
"def",
"draw_widget",
"(",
"self",
",",
"item",
")",
":",
"if",
"item",
":",
"self",
".",
"filter_remove",
"(",
"remember",
"=",
"True",
")",
"selected_id",
"=",
"self",
".",
"treedata",
"[",
"item",
"]",
"[",
"'id'",
"]",
"item",
"=",
"self",
".",
... | Create a preview of the selected treeview item | [
"Create",
"a",
"preview",
"of",
"the",
"selected",
"treeview",
"item"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L96-L107 | train | 227,747 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.on_treeview_delete_selection | def on_treeview_delete_selection(self, event=None):
"""Removes selected items from treeview"""
tv = self.treeview
selection = tv.selection()
# Need to remove filter
self.filter_remove(remember=True)
toplevel_items = tv.get_children()
parents_to_redraw = set()
for item in selection:
try:
parent = ''
if item not in toplevel_items:
parent = self.get_toplevel_parent(item)
else:
self.previewer.delete(item)
del self.treedata[item]
tv.delete(item)
self.app.set_changed()
if parent:
self._update_max_grid_rc(parent)
parents_to_redraw.add(parent)
self.widget_editor.hide_all()
except tk.TclError:
# Selection of parent and child items ??
# TODO: notify something here
pass
# redraw widgets
for item in parents_to_redraw:
self.draw_widget(item)
# restore filter
self.filter_restore() | python | def on_treeview_delete_selection(self, event=None):
"""Removes selected items from treeview"""
tv = self.treeview
selection = tv.selection()
# Need to remove filter
self.filter_remove(remember=True)
toplevel_items = tv.get_children()
parents_to_redraw = set()
for item in selection:
try:
parent = ''
if item not in toplevel_items:
parent = self.get_toplevel_parent(item)
else:
self.previewer.delete(item)
del self.treedata[item]
tv.delete(item)
self.app.set_changed()
if parent:
self._update_max_grid_rc(parent)
parents_to_redraw.add(parent)
self.widget_editor.hide_all()
except tk.TclError:
# Selection of parent and child items ??
# TODO: notify something here
pass
# redraw widgets
for item in parents_to_redraw:
self.draw_widget(item)
# restore filter
self.filter_restore() | [
"def",
"on_treeview_delete_selection",
"(",
"self",
",",
"event",
"=",
"None",
")",
":",
"tv",
"=",
"self",
".",
"treeview",
"selection",
"=",
"tv",
".",
"selection",
"(",
")",
"# Need to remove filter",
"self",
".",
"filter_remove",
"(",
"remember",
"=",
"T... | Removes selected items from treeview | [
"Removes",
"selected",
"items",
"from",
"treeview"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L134-L167 | train | 227,748 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.tree_to_xml | def tree_to_xml(self):
"""Traverses treeview and generates a ElementTree object"""
# Need to remove filter or hidden items will not be saved.
self.filter_remove(remember=True)
tree = self.treeview
root = ET.Element('interface')
items = tree.get_children()
for item in items:
node = self.tree_node_to_xml('', item)
root.append(node)
# restore filter
self.filter_restore()
return ET.ElementTree(root) | python | def tree_to_xml(self):
"""Traverses treeview and generates a ElementTree object"""
# Need to remove filter or hidden items will not be saved.
self.filter_remove(remember=True)
tree = self.treeview
root = ET.Element('interface')
items = tree.get_children()
for item in items:
node = self.tree_node_to_xml('', item)
root.append(node)
# restore filter
self.filter_restore()
return ET.ElementTree(root) | [
"def",
"tree_to_xml",
"(",
"self",
")",
":",
"# Need to remove filter or hidden items will not be saved.",
"self",
".",
"filter_remove",
"(",
"remember",
"=",
"True",
")",
"tree",
"=",
"self",
".",
"treeview",
"root",
"=",
"ET",
".",
"Element",
"(",
"'interface'",... | Traverses treeview and generates a ElementTree object | [
"Traverses",
"treeview",
"and",
"generates",
"a",
"ElementTree",
"object"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L169-L185 | train | 227,749 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.tree_node_to_xml | def tree_node_to_xml(self, parent, item):
"""Converts a treeview item and children to xml nodes"""
tree = self.treeview
data = self.treedata[item]
node = data.to_xml_node()
children = tree.get_children(item)
for child in children:
cnode = ET.Element('child')
cwidget = self.tree_node_to_xml(item, child)
cnode.append(cwidget)
node.append(cnode)
return node | python | def tree_node_to_xml(self, parent, item):
"""Converts a treeview item and children to xml nodes"""
tree = self.treeview
data = self.treedata[item]
node = data.to_xml_node()
children = tree.get_children(item)
for child in children:
cnode = ET.Element('child')
cwidget = self.tree_node_to_xml(item, child)
cnode.append(cwidget)
node.append(cnode)
return node | [
"def",
"tree_node_to_xml",
"(",
"self",
",",
"parent",
",",
"item",
")",
":",
"tree",
"=",
"self",
".",
"treeview",
"data",
"=",
"self",
".",
"treedata",
"[",
"item",
"]",
"node",
"=",
"data",
".",
"to_xml_node",
"(",
")",
"children",
"=",
"tree",
".... | Converts a treeview item and children to xml nodes | [
"Converts",
"a",
"treeview",
"item",
"and",
"children",
"to",
"xml",
"nodes"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L187-L201 | train | 227,750 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor._insert_item | def _insert_item(self, root, data, from_file=False):
"""Insert a item on the treeview and fills columns from data"""
tree = self.treeview
treelabel = data.get_id()
row = col = ''
if root != '' and 'layout' in data:
row = data.get_layout_property('row')
col = data.get_layout_property('column')
# fix row position when using copy and paste
# If collision, increase by 1
row_count = self.get_max_row(root)
if not from_file and (row_count > int(row) and int(col) == 0):
row = str(row_count + 1)
data.set_layout_property('row', row)
image = ''
try:
image = StockImage.get('16x16-tk.default')
except StockImageException:
# TODO: notify something here
pass
try:
image = StockImage.get('16x16-{0}'.format(data.get_class()))
except StockImageException:
# TODO: notify something here
pass
values = (data.get_class(), row, col)
item = tree.insert(root, 'end', text=treelabel, values=values,
image=image)
data.attach(self)
self.treedata[item] = data
# Update grid r/c data
self._update_max_grid_rc(root, from_file=True)
self.app.set_changed()
return item | python | def _insert_item(self, root, data, from_file=False):
"""Insert a item on the treeview and fills columns from data"""
tree = self.treeview
treelabel = data.get_id()
row = col = ''
if root != '' and 'layout' in data:
row = data.get_layout_property('row')
col = data.get_layout_property('column')
# fix row position when using copy and paste
# If collision, increase by 1
row_count = self.get_max_row(root)
if not from_file and (row_count > int(row) and int(col) == 0):
row = str(row_count + 1)
data.set_layout_property('row', row)
image = ''
try:
image = StockImage.get('16x16-tk.default')
except StockImageException:
# TODO: notify something here
pass
try:
image = StockImage.get('16x16-{0}'.format(data.get_class()))
except StockImageException:
# TODO: notify something here
pass
values = (data.get_class(), row, col)
item = tree.insert(root, 'end', text=treelabel, values=values,
image=image)
data.attach(self)
self.treedata[item] = data
# Update grid r/c data
self._update_max_grid_rc(root, from_file=True)
self.app.set_changed()
return item | [
"def",
"_insert_item",
"(",
"self",
",",
"root",
",",
"data",
",",
"from_file",
"=",
"False",
")",
":",
"tree",
"=",
"self",
".",
"treeview",
"treelabel",
"=",
"data",
".",
"get_id",
"(",
")",
"row",
"=",
"col",
"=",
"''",
"if",
"root",
"!=",
"''",... | Insert a item on the treeview and fills columns from data | [
"Insert",
"a",
"item",
"on",
"the",
"treeview",
"and",
"fills",
"columns",
"from",
"data"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L203-L243 | train | 227,751 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.copy_to_clipboard | def copy_to_clipboard(self):
"""
Copies selected items to clipboard.
"""
tree = self.treeview
# get the selected item:
selection = tree.selection()
if selection:
self.filter_remove(remember=True)
root = ET.Element('selection')
for item in selection:
node = self.tree_node_to_xml('', item)
root.append(node)
# python2 issue
try:
text = ET.tostring(root, encoding='unicode')
except LookupError:
text = ET.tostring(root, encoding='UTF-8')
tree.clipboard_clear()
tree.clipboard_append(text)
self.filter_restore() | python | def copy_to_clipboard(self):
"""
Copies selected items to clipboard.
"""
tree = self.treeview
# get the selected item:
selection = tree.selection()
if selection:
self.filter_remove(remember=True)
root = ET.Element('selection')
for item in selection:
node = self.tree_node_to_xml('', item)
root.append(node)
# python2 issue
try:
text = ET.tostring(root, encoding='unicode')
except LookupError:
text = ET.tostring(root, encoding='UTF-8')
tree.clipboard_clear()
tree.clipboard_append(text)
self.filter_restore() | [
"def",
"copy_to_clipboard",
"(",
"self",
")",
":",
"tree",
"=",
"self",
".",
"treeview",
"# get the selected item:",
"selection",
"=",
"tree",
".",
"selection",
"(",
")",
"if",
"selection",
":",
"self",
".",
"filter_remove",
"(",
"remember",
"=",
"True",
")"... | Copies selected items to clipboard. | [
"Copies",
"selected",
"items",
"to",
"clipboard",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L255-L275 | train | 227,752 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.add_widget | def add_widget(self, wclass):
"""Adds a new item to the treeview."""
tree = self.treeview
# get the selected item:
selected_item = ''
tsel = tree.selection()
if tsel:
selected_item = tsel[0]
# Need to remove filter if set
self.filter_remove()
root = selected_item
# check if the widget can be added at selected point
if not self._validate_add(root, wclass, False):
# if not try to add at item parent level
parent = tree.parent(root)
if parent != root:
if self._validate_add(parent, wclass):
root = parent
else:
return
else:
return
# root item should be set at this point
# setup properties
widget_id = self.get_unique_id(wclass)
data = WidgetDescr(wclass, widget_id)
# setup default values for properties
for pname in builder.CLASS_MAP[wclass].builder.properties:
pdescription = {}
if pname in properties.WIDGET_PROPERTIES:
pdescription = properties.WIDGET_PROPERTIES[pname]
if wclass in pdescription:
pdescription = dict(pdescription, **pdescription[wclass])
default_value = str(pdescription.get('default', ''))
data.set_property(pname, default_value)
# default text for widgets with text prop:
if pname in ('text', 'label'):
data.set_property(pname, widget_id)
#
# default grid properties
#
# is_container = builder.CLASS_MAP[wclass].builder.container
for prop_name in properties.GRID_PROPERTIES:
pdescription = properties.LAYOUT_OPTIONS[prop_name]
if wclass in pdescription:
pdescription = dict(pdescription, **pdescription[wclass])
default_value = str(pdescription.get('default', ''))
data.set_layout_property(prop_name, default_value)
rownum = '0'
if root:
rownum = str(self.get_max_row(root)+1)
data.set_layout_property('row', rownum)
data.set_layout_property('column', '0')
item = self._insert_item(root, data)
# Do redraw
self.draw_widget(item)
# Select and show the item created
tree.after_idle(lambda: tree.selection_set(item))
tree.after_idle(lambda: tree.focus(item))
tree.after_idle(lambda: tree.see(item)) | python | def add_widget(self, wclass):
"""Adds a new item to the treeview."""
tree = self.treeview
# get the selected item:
selected_item = ''
tsel = tree.selection()
if tsel:
selected_item = tsel[0]
# Need to remove filter if set
self.filter_remove()
root = selected_item
# check if the widget can be added at selected point
if not self._validate_add(root, wclass, False):
# if not try to add at item parent level
parent = tree.parent(root)
if parent != root:
if self._validate_add(parent, wclass):
root = parent
else:
return
else:
return
# root item should be set at this point
# setup properties
widget_id = self.get_unique_id(wclass)
data = WidgetDescr(wclass, widget_id)
# setup default values for properties
for pname in builder.CLASS_MAP[wclass].builder.properties:
pdescription = {}
if pname in properties.WIDGET_PROPERTIES:
pdescription = properties.WIDGET_PROPERTIES[pname]
if wclass in pdescription:
pdescription = dict(pdescription, **pdescription[wclass])
default_value = str(pdescription.get('default', ''))
data.set_property(pname, default_value)
# default text for widgets with text prop:
if pname in ('text', 'label'):
data.set_property(pname, widget_id)
#
# default grid properties
#
# is_container = builder.CLASS_MAP[wclass].builder.container
for prop_name in properties.GRID_PROPERTIES:
pdescription = properties.LAYOUT_OPTIONS[prop_name]
if wclass in pdescription:
pdescription = dict(pdescription, **pdescription[wclass])
default_value = str(pdescription.get('default', ''))
data.set_layout_property(prop_name, default_value)
rownum = '0'
if root:
rownum = str(self.get_max_row(root)+1)
data.set_layout_property('row', rownum)
data.set_layout_property('column', '0')
item = self._insert_item(root, data)
# Do redraw
self.draw_widget(item)
# Select and show the item created
tree.after_idle(lambda: tree.selection_set(item))
tree.after_idle(lambda: tree.focus(item))
tree.after_idle(lambda: tree.see(item)) | [
"def",
"add_widget",
"(",
"self",
",",
"wclass",
")",
":",
"tree",
"=",
"self",
".",
"treeview",
"# get the selected item:",
"selected_item",
"=",
"''",
"tsel",
"=",
"tree",
".",
"selection",
"(",
")",
"if",
"tsel",
":",
"selected_item",
"=",
"tsel",
"[",... | Adds a new item to the treeview. | [
"Adds",
"a",
"new",
"item",
"to",
"the",
"treeview",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L422-L492 | train | 227,753 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.load_file | def load_file(self, filename):
"""Load file into treeview"""
self.counter.clear()
# python2 issues
try:
etree = ET.parse(filename)
except ET.ParseError:
parser = ET.XMLParser(encoding='UTF-8')
etree = ET.parse(filename, parser)
eroot = etree.getroot()
self.remove_all()
self.previewer.remove_all()
self.widget_editor.hide_all()
self.previewer.resource_paths.append(os.path.dirname(filename))
for element in eroot:
self.populate_tree('', eroot, element,from_file=True)
children = self.treeview.get_children('')
for child in children:
self.draw_widget(child)
self.previewer.show_selected(None, None) | python | def load_file(self, filename):
"""Load file into treeview"""
self.counter.clear()
# python2 issues
try:
etree = ET.parse(filename)
except ET.ParseError:
parser = ET.XMLParser(encoding='UTF-8')
etree = ET.parse(filename, parser)
eroot = etree.getroot()
self.remove_all()
self.previewer.remove_all()
self.widget_editor.hide_all()
self.previewer.resource_paths.append(os.path.dirname(filename))
for element in eroot:
self.populate_tree('', eroot, element,from_file=True)
children = self.treeview.get_children('')
for child in children:
self.draw_widget(child)
self.previewer.show_selected(None, None) | [
"def",
"load_file",
"(",
"self",
",",
"filename",
")",
":",
"self",
".",
"counter",
".",
"clear",
"(",
")",
"# python2 issues",
"try",
":",
"etree",
"=",
"ET",
".",
"parse",
"(",
"filename",
")",
"except",
"ET",
".",
"ParseError",
":",
"parser",
"=",
... | Load file into treeview | [
"Load",
"file",
"into",
"treeview"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L501-L523 | train | 227,754 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.populate_tree | def populate_tree(self, master, parent, element,from_file=False):
"""Reads xml nodes and populates tree item"""
data = WidgetDescr(None, None)
data.from_xml_node(element)
cname = data.get_class()
uniqueid = self.get_unique_id(cname, data.get_id())
data.set_property('id', uniqueid)
if cname in builder.CLASS_MAP:
pwidget = self._insert_item(master, data,from_file=from_file)
xpath = "./child"
children = element.findall(xpath)
for child in children:
child_object = child.find('./object')
cwidget = self.populate_tree(pwidget, child, child_object,from_file=from_file)
return pwidget
else:
raise Exception('Class "{0}" not mapped'.format(cname)) | python | def populate_tree(self, master, parent, element,from_file=False):
"""Reads xml nodes and populates tree item"""
data = WidgetDescr(None, None)
data.from_xml_node(element)
cname = data.get_class()
uniqueid = self.get_unique_id(cname, data.get_id())
data.set_property('id', uniqueid)
if cname in builder.CLASS_MAP:
pwidget = self._insert_item(master, data,from_file=from_file)
xpath = "./child"
children = element.findall(xpath)
for child in children:
child_object = child.find('./object')
cwidget = self.populate_tree(pwidget, child, child_object,from_file=from_file)
return pwidget
else:
raise Exception('Class "{0}" not mapped'.format(cname)) | [
"def",
"populate_tree",
"(",
"self",
",",
"master",
",",
"parent",
",",
"element",
",",
"from_file",
"=",
"False",
")",
":",
"data",
"=",
"WidgetDescr",
"(",
"None",
",",
"None",
")",
"data",
".",
"from_xml_node",
"(",
"element",
")",
"cname",
"=",
"da... | Reads xml nodes and populates tree item | [
"Reads",
"xml",
"nodes",
"and",
"populates",
"tree",
"item"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L525-L544 | train | 227,755 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor.update_event | def update_event(self, hint, obj):
"""Updates tree colums when itemdata is changed."""
tree = self.treeview
data = obj
item = self.get_item_by_data(obj)
if item:
if data.get_id() != tree.item(item, 'text'):
tree.item(item, text=data.get_id())
# if tree.parent(item) != '' and 'layout' in data:
if tree.parent(item) != '':
row = data.get_layout_property('row')
col = data.get_layout_property('column')
values = tree.item(item, 'values')
if (row != values[1] or col != values[2]):
values = (data.get_class(), row, col)
tree.item(item, values=values)
self.draw_widget(item)
self.app.set_changed() | python | def update_event(self, hint, obj):
"""Updates tree colums when itemdata is changed."""
tree = self.treeview
data = obj
item = self.get_item_by_data(obj)
if item:
if data.get_id() != tree.item(item, 'text'):
tree.item(item, text=data.get_id())
# if tree.parent(item) != '' and 'layout' in data:
if tree.parent(item) != '':
row = data.get_layout_property('row')
col = data.get_layout_property('column')
values = tree.item(item, 'values')
if (row != values[1] or col != values[2]):
values = (data.get_class(), row, col)
tree.item(item, values=values)
self.draw_widget(item)
self.app.set_changed() | [
"def",
"update_event",
"(",
"self",
",",
"hint",
",",
"obj",
")",
":",
"tree",
"=",
"self",
".",
"treeview",
"data",
"=",
"obj",
"item",
"=",
"self",
".",
"get_item_by_data",
"(",
"obj",
")",
"if",
"item",
":",
"if",
"data",
".",
"get_id",
"(",
")"... | Updates tree colums when itemdata is changed. | [
"Updates",
"tree",
"colums",
"when",
"itemdata",
"is",
"changed",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L586-L604 | train | 227,756 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor._reatach | def _reatach(self):
"""Reinsert the hidden items."""
for item, p, idx in self._detached:
# The item may have been deleted.
if self.treeview.exists(item) and self.treeview.exists(p):
self.treeview.move(item, p, idx)
self._detached = [] | python | def _reatach(self):
"""Reinsert the hidden items."""
for item, p, idx in self._detached:
# The item may have been deleted.
if self.treeview.exists(item) and self.treeview.exists(p):
self.treeview.move(item, p, idx)
self._detached = [] | [
"def",
"_reatach",
"(",
"self",
")",
":",
"for",
"item",
",",
"p",
",",
"idx",
"in",
"self",
".",
"_detached",
":",
"# The item may have been deleted.",
"if",
"self",
".",
"treeview",
".",
"exists",
"(",
"item",
")",
"and",
"self",
".",
"treeview",
".",
... | Reinsert the hidden items. | [
"Reinsert",
"the",
"hidden",
"items",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L741-L747 | train | 227,757 |
alejandroautalan/pygubu | pygubudesigner/uitreeeditor.py | WidgetsTreeEditor._detach | def _detach(self, item):
"""Hide items from treeview that do not match the search string."""
to_detach = []
children_det = []
children_match = False
match_found = False
value = self.filtervar.get()
txt = self.treeview.item(item, 'text').lower()
if value in txt:
match_found = True
else:
class_txt = self.treedata[item].get_class().lower()
if value in class_txt:
match_found = True
parent = self.treeview.parent(item)
idx = self.treeview.index(item)
children = self.treeview.get_children(item)
if children:
for child in children:
match, detach = self._detach(child)
children_match = children_match | match
if detach:
children_det.extend(detach)
if match_found:
if children_det:
to_detach.extend(children_det)
else:
if children_match:
if children_det:
to_detach.extend(children_det)
else:
to_detach.append((item, parent, idx))
match_found = match_found | children_match
return match_found, to_detach | python | def _detach(self, item):
"""Hide items from treeview that do not match the search string."""
to_detach = []
children_det = []
children_match = False
match_found = False
value = self.filtervar.get()
txt = self.treeview.item(item, 'text').lower()
if value in txt:
match_found = True
else:
class_txt = self.treedata[item].get_class().lower()
if value in class_txt:
match_found = True
parent = self.treeview.parent(item)
idx = self.treeview.index(item)
children = self.treeview.get_children(item)
if children:
for child in children:
match, detach = self._detach(child)
children_match = children_match | match
if detach:
children_det.extend(detach)
if match_found:
if children_det:
to_detach.extend(children_det)
else:
if children_match:
if children_det:
to_detach.extend(children_det)
else:
to_detach.append((item, parent, idx))
match_found = match_found | children_match
return match_found, to_detach | [
"def",
"_detach",
"(",
"self",
",",
"item",
")",
":",
"to_detach",
"=",
"[",
"]",
"children_det",
"=",
"[",
"]",
"children_match",
"=",
"False",
"match_found",
"=",
"False",
"value",
"=",
"self",
".",
"filtervar",
".",
"get",
"(",
")",
"txt",
"=",
"s... | Hide items from treeview that do not match the search string. | [
"Hide",
"items",
"from",
"treeview",
"that",
"do",
"not",
"match",
"the",
"search",
"string",
"."
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L749-L785 | train | 227,758 |
alejandroautalan/pygubu | pygubudesigner/main.py | PygubuUI.load_file | def load_file(self, filename):
"""Load xml into treeview"""
self.tree_editor.load_file(filename)
self.project_name.configure(text=filename)
self.currentfile = filename
self.is_changed = False | python | def load_file(self, filename):
"""Load xml into treeview"""
self.tree_editor.load_file(filename)
self.project_name.configure(text=filename)
self.currentfile = filename
self.is_changed = False | [
"def",
"load_file",
"(",
"self",
",",
"filename",
")",
":",
"self",
".",
"tree_editor",
".",
"load_file",
"(",
"filename",
")",
"self",
".",
"project_name",
".",
"configure",
"(",
"text",
"=",
"filename",
")",
"self",
".",
"currentfile",
"=",
"filename",
... | Load xml into treeview | [
"Load",
"xml",
"into",
"treeview"
] | 41c8fb37ef973736ec5d68cbe1cd4ecb78712e40 | https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/main.py#L514-L520 | train | 227,759 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_gremlin/__init__.py | lower_ir | def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None):
"""Lower the IR into an IR form that can be represented in Gremlin queries.
Args:
ir_blocks: list of IR blocks to lower into Gremlin-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
list of IR blocks suitable for outputting as Gremlin
"""
sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table)
ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table)
ir_blocks = optimize_boolean_expression_comparisons(ir_blocks)
if type_equivalence_hints:
ir_blocks = lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints)
ir_blocks = lower_coerce_type_blocks(ir_blocks)
ir_blocks = rewrite_filters_in_optional_blocks(ir_blocks)
ir_blocks = merge_consecutive_filter_clauses(ir_blocks)
ir_blocks = lower_folded_outputs(ir_blocks)
return ir_blocks | python | def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None):
"""Lower the IR into an IR form that can be represented in Gremlin queries.
Args:
ir_blocks: list of IR blocks to lower into Gremlin-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
list of IR blocks suitable for outputting as Gremlin
"""
sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table)
ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table)
ir_blocks = optimize_boolean_expression_comparisons(ir_blocks)
if type_equivalence_hints:
ir_blocks = lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints)
ir_blocks = lower_coerce_type_blocks(ir_blocks)
ir_blocks = rewrite_filters_in_optional_blocks(ir_blocks)
ir_blocks = merge_consecutive_filter_clauses(ir_blocks)
ir_blocks = lower_folded_outputs(ir_blocks)
return ir_blocks | [
"def",
"lower_ir",
"(",
"ir_blocks",
",",
"query_metadata_table",
",",
"type_equivalence_hints",
"=",
"None",
")",
":",
"sanity_check_ir_blocks_from_frontend",
"(",
"ir_blocks",
",",
"query_metadata_table",
")",
"ir_blocks",
"=",
"lower_context_field_existence",
"(",
"ir_... | Lower the IR into an IR form that can be represented in Gremlin queries.
Args:
ir_blocks: list of IR blocks to lower into Gremlin-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
list of IR blocks suitable for outputting as Gremlin | [
"Lower",
"the",
"IR",
"into",
"an",
"IR",
"form",
"that",
"can",
"be",
"represented",
"in",
"Gremlin",
"queries",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/__init__.py#L13-L53 | train | 227,760 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py | lower_coerce_type_block_type_data | def lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints):
"""Rewrite CoerceType blocks to explicitly state which types are allowed in the coercion."""
allowed_key_type_spec = (GraphQLInterfaceType, GraphQLObjectType)
allowed_value_type_spec = GraphQLUnionType
# Validate that the type_equivalence_hints parameter has correct types.
for key, value in six.iteritems(type_equivalence_hints):
if (not isinstance(key, allowed_key_type_spec) or
not isinstance(value, allowed_value_type_spec)):
msg = (u'Invalid type equivalence hints received! Hint {} ({}) -> {} ({}) '
u'was unexpected, expected a hint in the form '
u'GraphQLInterfaceType -> GraphQLUnionType or '
u'GraphQLObjectType -> GraphQLUnionType'.format(key.name, str(type(key)),
value.name, str(type(value))))
raise GraphQLCompilationError(msg)
# CoerceType blocks only know the name of the type to which they coerce,
# and not its corresponding GraphQL type object. Convert the type equivalence hints into
# a dict of type name -> set of names of equivalent types, which can be used more readily.
equivalent_type_names = {
key.name: {x.name for x in value.types}
for key, value in six.iteritems(type_equivalence_hints)
}
new_ir_blocks = []
for block in ir_blocks:
new_block = block
if isinstance(block, CoerceType):
target_class = get_only_element_from_collection(block.target_class)
if target_class in equivalent_type_names:
new_block = CoerceType(equivalent_type_names[target_class])
new_ir_blocks.append(new_block)
return new_ir_blocks | python | def lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints):
"""Rewrite CoerceType blocks to explicitly state which types are allowed in the coercion."""
allowed_key_type_spec = (GraphQLInterfaceType, GraphQLObjectType)
allowed_value_type_spec = GraphQLUnionType
# Validate that the type_equivalence_hints parameter has correct types.
for key, value in six.iteritems(type_equivalence_hints):
if (not isinstance(key, allowed_key_type_spec) or
not isinstance(value, allowed_value_type_spec)):
msg = (u'Invalid type equivalence hints received! Hint {} ({}) -> {} ({}) '
u'was unexpected, expected a hint in the form '
u'GraphQLInterfaceType -> GraphQLUnionType or '
u'GraphQLObjectType -> GraphQLUnionType'.format(key.name, str(type(key)),
value.name, str(type(value))))
raise GraphQLCompilationError(msg)
# CoerceType blocks only know the name of the type to which they coerce,
# and not its corresponding GraphQL type object. Convert the type equivalence hints into
# a dict of type name -> set of names of equivalent types, which can be used more readily.
equivalent_type_names = {
key.name: {x.name for x in value.types}
for key, value in six.iteritems(type_equivalence_hints)
}
new_ir_blocks = []
for block in ir_blocks:
new_block = block
if isinstance(block, CoerceType):
target_class = get_only_element_from_collection(block.target_class)
if target_class in equivalent_type_names:
new_block = CoerceType(equivalent_type_names[target_class])
new_ir_blocks.append(new_block)
return new_ir_blocks | [
"def",
"lower_coerce_type_block_type_data",
"(",
"ir_blocks",
",",
"type_equivalence_hints",
")",
":",
"allowed_key_type_spec",
"=",
"(",
"GraphQLInterfaceType",
",",
"GraphQLObjectType",
")",
"allowed_value_type_spec",
"=",
"GraphQLUnionType",
"# Validate that the type_equivalen... | Rewrite CoerceType blocks to explicitly state which types are allowed in the coercion. | [
"Rewrite",
"CoerceType",
"blocks",
"to",
"explicitly",
"state",
"which",
"types",
"are",
"allowed",
"in",
"the",
"coercion",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L31-L65 | train | 227,761 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py | lower_coerce_type_blocks | def lower_coerce_type_blocks(ir_blocks):
"""Lower CoerceType blocks into Filter blocks with a type-check predicate."""
new_ir_blocks = []
for block in ir_blocks:
new_block = block
if isinstance(block, CoerceType):
predicate = BinaryComposition(
u'contains', Literal(list(block.target_class)), LocalField('@class'))
new_block = Filter(predicate)
new_ir_blocks.append(new_block)
return new_ir_blocks | python | def lower_coerce_type_blocks(ir_blocks):
"""Lower CoerceType blocks into Filter blocks with a type-check predicate."""
new_ir_blocks = []
for block in ir_blocks:
new_block = block
if isinstance(block, CoerceType):
predicate = BinaryComposition(
u'contains', Literal(list(block.target_class)), LocalField('@class'))
new_block = Filter(predicate)
new_ir_blocks.append(new_block)
return new_ir_blocks | [
"def",
"lower_coerce_type_blocks",
"(",
"ir_blocks",
")",
":",
"new_ir_blocks",
"=",
"[",
"]",
"for",
"block",
"in",
"ir_blocks",
":",
"new_block",
"=",
"block",
"if",
"isinstance",
"(",
"block",
",",
"CoerceType",
")",
":",
"predicate",
"=",
"BinaryCompositio... | Lower CoerceType blocks into Filter blocks with a type-check predicate. | [
"Lower",
"CoerceType",
"blocks",
"into",
"Filter",
"blocks",
"with",
"a",
"type",
"-",
"check",
"predicate",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L68-L81 | train | 227,762 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py | rewrite_filters_in_optional_blocks | def rewrite_filters_in_optional_blocks(ir_blocks):
"""In optional contexts, add a check for null that allows non-existent optional data through.
Optional traversals in Gremlin represent missing optional data by setting the current vertex
to null until the exit from the optional scope. Therefore, filtering and type coercions
(which should have been lowered into filters by this point) must check for null before
applying their filtering predicates. Since missing optional data isn't filtered,
the new filtering predicate should be "(it == null) || existing_predicate".
Args:
ir_blocks: list of IR blocks to lower into Gremlin-compatible form
Returns:
new list of IR blocks with this lowering step applied
"""
new_ir_blocks = []
optional_context_depth = 0
for block in ir_blocks:
new_block = block
if isinstance(block, CoerceType):
raise AssertionError(u'Found a CoerceType block after all such blocks should have been '
u'lowered to Filter blocks: {}'.format(ir_blocks))
elif isinstance(block, Traverse) and block.optional:
optional_context_depth += 1
elif isinstance(block, Backtrack) and block.optional:
optional_context_depth -= 1
if optional_context_depth < 0:
raise AssertionError(u'Reached negative optional context depth for blocks: '
u'{}'.format(ir_blocks))
elif isinstance(block, Filter) and optional_context_depth > 0:
null_check = BinaryComposition(u'=', LocalField('@this'), NullLiteral)
new_block = Filter(BinaryComposition(u'||', null_check, block.predicate))
else:
pass
new_ir_blocks.append(new_block)
return new_ir_blocks | python | def rewrite_filters_in_optional_blocks(ir_blocks):
"""In optional contexts, add a check for null that allows non-existent optional data through.
Optional traversals in Gremlin represent missing optional data by setting the current vertex
to null until the exit from the optional scope. Therefore, filtering and type coercions
(which should have been lowered into filters by this point) must check for null before
applying their filtering predicates. Since missing optional data isn't filtered,
the new filtering predicate should be "(it == null) || existing_predicate".
Args:
ir_blocks: list of IR blocks to lower into Gremlin-compatible form
Returns:
new list of IR blocks with this lowering step applied
"""
new_ir_blocks = []
optional_context_depth = 0
for block in ir_blocks:
new_block = block
if isinstance(block, CoerceType):
raise AssertionError(u'Found a CoerceType block after all such blocks should have been '
u'lowered to Filter blocks: {}'.format(ir_blocks))
elif isinstance(block, Traverse) and block.optional:
optional_context_depth += 1
elif isinstance(block, Backtrack) and block.optional:
optional_context_depth -= 1
if optional_context_depth < 0:
raise AssertionError(u'Reached negative optional context depth for blocks: '
u'{}'.format(ir_blocks))
elif isinstance(block, Filter) and optional_context_depth > 0:
null_check = BinaryComposition(u'=', LocalField('@this'), NullLiteral)
new_block = Filter(BinaryComposition(u'||', null_check, block.predicate))
else:
pass
new_ir_blocks.append(new_block)
return new_ir_blocks | [
"def",
"rewrite_filters_in_optional_blocks",
"(",
"ir_blocks",
")",
":",
"new_ir_blocks",
"=",
"[",
"]",
"optional_context_depth",
"=",
"0",
"for",
"block",
"in",
"ir_blocks",
":",
"new_block",
"=",
"block",
"if",
"isinstance",
"(",
"block",
",",
"CoerceType",
"... | In optional contexts, add a check for null that allows non-existent optional data through.
Optional traversals in Gremlin represent missing optional data by setting the current vertex
to null until the exit from the optional scope. Therefore, filtering and type coercions
(which should have been lowered into filters by this point) must check for null before
applying their filtering predicates. Since missing optional data isn't filtered,
the new filtering predicate should be "(it == null) || existing_predicate".
Args:
ir_blocks: list of IR blocks to lower into Gremlin-compatible form
Returns:
new list of IR blocks with this lowering step applied | [
"In",
"optional",
"contexts",
"add",
"a",
"check",
"for",
"null",
"that",
"allows",
"non",
"-",
"existent",
"optional",
"data",
"through",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L84-L122 | train | 227,763 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py | lower_folded_outputs | def lower_folded_outputs(ir_blocks):
"""Lower standard folded output fields into GremlinFoldedContextField objects."""
folds, remaining_ir_blocks = extract_folds_from_ir_blocks(ir_blocks)
if not remaining_ir_blocks:
raise AssertionError(u'Expected at least one non-folded block to remain: {} {} '
u'{}'.format(folds, remaining_ir_blocks, ir_blocks))
output_block = remaining_ir_blocks[-1]
if not isinstance(output_block, ConstructResult):
raise AssertionError(u'Expected the last non-folded block to be ConstructResult, '
u'but instead was: {} {} '
u'{}'.format(type(output_block), output_block, ir_blocks))
# Turn folded Filter blocks into GremlinFoldedFilter blocks.
converted_folds = {
base_fold_location.get_location_name()[0]: _convert_folded_blocks(folded_ir_blocks)
for base_fold_location, folded_ir_blocks in six.iteritems(folds)
}
new_output_fields = dict()
for output_name, output_expression in six.iteritems(output_block.fields):
new_output_expression = output_expression
# Turn FoldedContextField expressions into GremlinFoldedContextField ones.
if isinstance(output_expression, FoldedContextField):
# Get the matching folded IR blocks and put them in the new context field.
base_fold_location_name = output_expression.fold_scope_location.get_location_name()[0]
folded_ir_blocks = converted_folds[base_fold_location_name]
new_output_expression = GremlinFoldedContextField(
output_expression.fold_scope_location, folded_ir_blocks,
output_expression.field_type)
new_output_fields[output_name] = new_output_expression
new_ir_blocks = remaining_ir_blocks[:-1]
new_ir_blocks.append(ConstructResult(new_output_fields))
return new_ir_blocks | python | def lower_folded_outputs(ir_blocks):
"""Lower standard folded output fields into GremlinFoldedContextField objects."""
folds, remaining_ir_blocks = extract_folds_from_ir_blocks(ir_blocks)
if not remaining_ir_blocks:
raise AssertionError(u'Expected at least one non-folded block to remain: {} {} '
u'{}'.format(folds, remaining_ir_blocks, ir_blocks))
output_block = remaining_ir_blocks[-1]
if not isinstance(output_block, ConstructResult):
raise AssertionError(u'Expected the last non-folded block to be ConstructResult, '
u'but instead was: {} {} '
u'{}'.format(type(output_block), output_block, ir_blocks))
# Turn folded Filter blocks into GremlinFoldedFilter blocks.
converted_folds = {
base_fold_location.get_location_name()[0]: _convert_folded_blocks(folded_ir_blocks)
for base_fold_location, folded_ir_blocks in six.iteritems(folds)
}
new_output_fields = dict()
for output_name, output_expression in six.iteritems(output_block.fields):
new_output_expression = output_expression
# Turn FoldedContextField expressions into GremlinFoldedContextField ones.
if isinstance(output_expression, FoldedContextField):
# Get the matching folded IR blocks and put them in the new context field.
base_fold_location_name = output_expression.fold_scope_location.get_location_name()[0]
folded_ir_blocks = converted_folds[base_fold_location_name]
new_output_expression = GremlinFoldedContextField(
output_expression.fold_scope_location, folded_ir_blocks,
output_expression.field_type)
new_output_fields[output_name] = new_output_expression
new_ir_blocks = remaining_ir_blocks[:-1]
new_ir_blocks.append(ConstructResult(new_output_fields))
return new_ir_blocks | [
"def",
"lower_folded_outputs",
"(",
"ir_blocks",
")",
":",
"folds",
",",
"remaining_ir_blocks",
"=",
"extract_folds_from_ir_blocks",
"(",
"ir_blocks",
")",
"if",
"not",
"remaining_ir_blocks",
":",
"raise",
"AssertionError",
"(",
"u'Expected at least one non-folded block to ... | Lower standard folded output fields into GremlinFoldedContextField objects. | [
"Lower",
"standard",
"folded",
"output",
"fields",
"into",
"GremlinFoldedContextField",
"objects",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L319-L355 | train | 227,764 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py | GremlinFoldedContextField.validate | def validate(self):
"""Validate that the GremlinFoldedContextField is correctly representable."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format(
type(self.fold_scope_location), self.fold_scope_location))
allowed_block_types = (GremlinFoldedFilter, GremlinFoldedTraverse, Backtrack)
for block in self.folded_ir_blocks:
if not isinstance(block, allowed_block_types):
raise AssertionError(
u'Found invalid block of type {} in folded_ir_blocks: {} '
u'Allowed types are {}.'
.format(type(block), self.folded_ir_blocks, allowed_block_types))
if not isinstance(self.field_type, GraphQLList):
raise ValueError(u'Invalid value of "field_type", expected a list type but got: '
u'{}'.format(self.field_type))
inner_type = strip_non_null_from_type(self.field_type.of_type)
if isinstance(inner_type, GraphQLList):
raise GraphQLCompilationError(
u'Outputting list-valued fields in a @fold context is currently '
u'not supported: {} {}'.format(self.fold_scope_location, self.field_type.of_type)) | python | def validate(self):
"""Validate that the GremlinFoldedContextField is correctly representable."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format(
type(self.fold_scope_location), self.fold_scope_location))
allowed_block_types = (GremlinFoldedFilter, GremlinFoldedTraverse, Backtrack)
for block in self.folded_ir_blocks:
if not isinstance(block, allowed_block_types):
raise AssertionError(
u'Found invalid block of type {} in folded_ir_blocks: {} '
u'Allowed types are {}.'
.format(type(block), self.folded_ir_blocks, allowed_block_types))
if not isinstance(self.field_type, GraphQLList):
raise ValueError(u'Invalid value of "field_type", expected a list type but got: '
u'{}'.format(self.field_type))
inner_type = strip_non_null_from_type(self.field_type.of_type)
if isinstance(inner_type, GraphQLList):
raise GraphQLCompilationError(
u'Outputting list-valued fields in a @fold context is currently '
u'not supported: {} {}'.format(self.fold_scope_location, self.field_type.of_type)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"fold_scope_location",
",",
"FoldScopeLocation",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected FoldScopeLocation fold_scope_location, got: {} {}'",
".",
"format",
"(",
"type",
... | Validate that the GremlinFoldedContextField is correctly representable. | [
"Validate",
"that",
"the",
"GremlinFoldedContextField",
"is",
"correctly",
"representable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L137-L159 | train | 227,765 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py | GremlinFoldedTraverse.from_traverse | def from_traverse(cls, traverse_block):
"""Create a GremlinFoldedTraverse block as a copy of the given Traverse block."""
if isinstance(traverse_block, Traverse):
return cls(traverse_block.direction, traverse_block.edge_name)
else:
raise AssertionError(u'Tried to initialize an instance of GremlinFoldedTraverse '
u'with block of type {}'.format(type(traverse_block))) | python | def from_traverse(cls, traverse_block):
"""Create a GremlinFoldedTraverse block as a copy of the given Traverse block."""
if isinstance(traverse_block, Traverse):
return cls(traverse_block.direction, traverse_block.edge_name)
else:
raise AssertionError(u'Tried to initialize an instance of GremlinFoldedTraverse '
u'with block of type {}'.format(type(traverse_block))) | [
"def",
"from_traverse",
"(",
"cls",
",",
"traverse_block",
")",
":",
"if",
"isinstance",
"(",
"traverse_block",
",",
"Traverse",
")",
":",
"return",
"cls",
"(",
"traverse_block",
".",
"direction",
",",
"traverse_block",
".",
"edge_name",
")",
"else",
":",
"r... | Create a GremlinFoldedTraverse block as a copy of the given Traverse block. | [
"Create",
"a",
"GremlinFoldedTraverse",
"block",
"as",
"a",
"copy",
"of",
"the",
"given",
"Traverse",
"block",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L258-L264 | train | 227,766 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/graphql_schema.py | _get_referenced_type_equivalences | def _get_referenced_type_equivalences(graphql_types, type_equivalence_hints):
"""Filter union types with no edges from the type equivalence hints dict."""
referenced_types = set()
for graphql_type in graphql_types.values():
if isinstance(graphql_type, (GraphQLObjectType, GraphQLInterfaceType)):
for _, field in graphql_type.fields.items():
if isinstance(field.type, GraphQLList):
referenced_types.add(field.type.of_type.name)
return {
original: union
for original, union in type_equivalence_hints.items()
if union.name in referenced_types
} | python | def _get_referenced_type_equivalences(graphql_types, type_equivalence_hints):
"""Filter union types with no edges from the type equivalence hints dict."""
referenced_types = set()
for graphql_type in graphql_types.values():
if isinstance(graphql_type, (GraphQLObjectType, GraphQLInterfaceType)):
for _, field in graphql_type.fields.items():
if isinstance(field.type, GraphQLList):
referenced_types.add(field.type.of_type.name)
return {
original: union
for original, union in type_equivalence_hints.items()
if union.name in referenced_types
} | [
"def",
"_get_referenced_type_equivalences",
"(",
"graphql_types",
",",
"type_equivalence_hints",
")",
":",
"referenced_types",
"=",
"set",
"(",
")",
"for",
"graphql_type",
"in",
"graphql_types",
".",
"values",
"(",
")",
":",
"if",
"isinstance",
"(",
"graphql_type",
... | Filter union types with no edges from the type equivalence hints dict. | [
"Filter",
"union",
"types",
"with",
"no",
"edges",
"from",
"the",
"type",
"equivalence",
"hints",
"dict",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L24-L36 | train | 227,767 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/graphql_schema.py | _get_inherited_field_types | def _get_inherited_field_types(class_to_field_type_overrides, schema_graph):
"""Return a dictionary describing the field type overrides in subclasses."""
inherited_field_type_overrides = dict()
for superclass_name, field_type_overrides in class_to_field_type_overrides.items():
for subclass_name in schema_graph.get_subclass_set(superclass_name):
inherited_field_type_overrides.setdefault(subclass_name, dict())
inherited_field_type_overrides[subclass_name].update(field_type_overrides)
return inherited_field_type_overrides | python | def _get_inherited_field_types(class_to_field_type_overrides, schema_graph):
"""Return a dictionary describing the field type overrides in subclasses."""
inherited_field_type_overrides = dict()
for superclass_name, field_type_overrides in class_to_field_type_overrides.items():
for subclass_name in schema_graph.get_subclass_set(superclass_name):
inherited_field_type_overrides.setdefault(subclass_name, dict())
inherited_field_type_overrides[subclass_name].update(field_type_overrides)
return inherited_field_type_overrides | [
"def",
"_get_inherited_field_types",
"(",
"class_to_field_type_overrides",
",",
"schema_graph",
")",
":",
"inherited_field_type_overrides",
"=",
"dict",
"(",
")",
"for",
"superclass_name",
",",
"field_type_overrides",
"in",
"class_to_field_type_overrides",
".",
"items",
"("... | Return a dictionary describing the field type overrides in subclasses. | [
"Return",
"a",
"dictionary",
"describing",
"the",
"field",
"type",
"overrides",
"in",
"subclasses",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L39-L46 | train | 227,768 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/graphql_schema.py | _validate_overriden_fields_are_not_defined_in_superclasses | def _validate_overriden_fields_are_not_defined_in_superclasses(class_to_field_type_overrides,
schema_graph):
"""Assert that the fields we want to override are not defined in superclasses."""
for class_name, field_type_overrides in six.iteritems(class_to_field_type_overrides):
for superclass_name in schema_graph.get_inheritance_set(class_name):
if superclass_name != class_name:
superclass = schema_graph.get_element_by_class_name(superclass_name)
for field_name in field_type_overrides:
if field_name in superclass.properties:
raise AssertionError(
u'Attempting to override field "{}" from class "{}", but the field is '
u'defined in superclass "{}"'
.format(field_name, class_name, superclass_name)) | python | def _validate_overriden_fields_are_not_defined_in_superclasses(class_to_field_type_overrides,
schema_graph):
"""Assert that the fields we want to override are not defined in superclasses."""
for class_name, field_type_overrides in six.iteritems(class_to_field_type_overrides):
for superclass_name in schema_graph.get_inheritance_set(class_name):
if superclass_name != class_name:
superclass = schema_graph.get_element_by_class_name(superclass_name)
for field_name in field_type_overrides:
if field_name in superclass.properties:
raise AssertionError(
u'Attempting to override field "{}" from class "{}", but the field is '
u'defined in superclass "{}"'
.format(field_name, class_name, superclass_name)) | [
"def",
"_validate_overriden_fields_are_not_defined_in_superclasses",
"(",
"class_to_field_type_overrides",
",",
"schema_graph",
")",
":",
"for",
"class_name",
",",
"field_type_overrides",
"in",
"six",
".",
"iteritems",
"(",
"class_to_field_type_overrides",
")",
":",
"for",
... | Assert that the fields we want to override are not defined in superclasses. | [
"Assert",
"that",
"the",
"fields",
"we",
"want",
"to",
"override",
"are",
"not",
"defined",
"in",
"superclasses",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L49-L61 | train | 227,769 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/graphql_schema.py | _property_descriptor_to_graphql_type | def _property_descriptor_to_graphql_type(property_obj):
"""Return the best GraphQL type representation for an OrientDB property descriptor."""
property_type = property_obj.type_id
scalar_types = {
PROPERTY_TYPE_BOOLEAN_ID: GraphQLBoolean,
PROPERTY_TYPE_DATE_ID: GraphQLDate,
PROPERTY_TYPE_DATETIME_ID: GraphQLDateTime,
PROPERTY_TYPE_DECIMAL_ID: GraphQLDecimal,
PROPERTY_TYPE_DOUBLE_ID: GraphQLFloat,
PROPERTY_TYPE_FLOAT_ID: GraphQLFloat,
PROPERTY_TYPE_INTEGER_ID: GraphQLInt,
PROPERTY_TYPE_STRING_ID: GraphQLString,
}
result = scalar_types.get(property_type, None)
if result:
return result
mapping_types = {
PROPERTY_TYPE_EMBEDDED_SET_ID: GraphQLList,
PROPERTY_TYPE_EMBEDDED_LIST_ID: GraphQLList,
}
wrapping_type = mapping_types.get(property_type, None)
if wrapping_type:
linked_property_obj = property_obj.qualifier
# There are properties that are embedded collections of non-primitive types,
# for example, ProxyEventSet.scalar_parameters.
# The GraphQL compiler does not currently support these.
if linked_property_obj in scalar_types:
return wrapping_type(scalar_types[linked_property_obj])
# We weren't able to represent this property in GraphQL, so we'll hide it instead.
return None | python | def _property_descriptor_to_graphql_type(property_obj):
"""Return the best GraphQL type representation for an OrientDB property descriptor."""
property_type = property_obj.type_id
scalar_types = {
PROPERTY_TYPE_BOOLEAN_ID: GraphQLBoolean,
PROPERTY_TYPE_DATE_ID: GraphQLDate,
PROPERTY_TYPE_DATETIME_ID: GraphQLDateTime,
PROPERTY_TYPE_DECIMAL_ID: GraphQLDecimal,
PROPERTY_TYPE_DOUBLE_ID: GraphQLFloat,
PROPERTY_TYPE_FLOAT_ID: GraphQLFloat,
PROPERTY_TYPE_INTEGER_ID: GraphQLInt,
PROPERTY_TYPE_STRING_ID: GraphQLString,
}
result = scalar_types.get(property_type, None)
if result:
return result
mapping_types = {
PROPERTY_TYPE_EMBEDDED_SET_ID: GraphQLList,
PROPERTY_TYPE_EMBEDDED_LIST_ID: GraphQLList,
}
wrapping_type = mapping_types.get(property_type, None)
if wrapping_type:
linked_property_obj = property_obj.qualifier
# There are properties that are embedded collections of non-primitive types,
# for example, ProxyEventSet.scalar_parameters.
# The GraphQL compiler does not currently support these.
if linked_property_obj in scalar_types:
return wrapping_type(scalar_types[linked_property_obj])
# We weren't able to represent this property in GraphQL, so we'll hide it instead.
return None | [
"def",
"_property_descriptor_to_graphql_type",
"(",
"property_obj",
")",
":",
"property_type",
"=",
"property_obj",
".",
"type_id",
"scalar_types",
"=",
"{",
"PROPERTY_TYPE_BOOLEAN_ID",
":",
"GraphQLBoolean",
",",
"PROPERTY_TYPE_DATE_ID",
":",
"GraphQLDate",
",",
"PROPERT... | Return the best GraphQL type representation for an OrientDB property descriptor. | [
"Return",
"the",
"best",
"GraphQL",
"type",
"representation",
"for",
"an",
"OrientDB",
"property",
"descriptor",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L64-L96 | train | 227,770 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/graphql_schema.py | _get_union_type_name | def _get_union_type_name(type_names_to_union):
"""Construct a unique union type name based on the type names being unioned."""
if not type_names_to_union:
raise AssertionError(u'Expected a non-empty list of type names to union, received: '
u'{}'.format(type_names_to_union))
return u'Union__' + u'__'.join(sorted(type_names_to_union)) | python | def _get_union_type_name(type_names_to_union):
"""Construct a unique union type name based on the type names being unioned."""
if not type_names_to_union:
raise AssertionError(u'Expected a non-empty list of type names to union, received: '
u'{}'.format(type_names_to_union))
return u'Union__' + u'__'.join(sorted(type_names_to_union)) | [
"def",
"_get_union_type_name",
"(",
"type_names_to_union",
")",
":",
"if",
"not",
"type_names_to_union",
":",
"raise",
"AssertionError",
"(",
"u'Expected a non-empty list of type names to union, received: '",
"u'{}'",
".",
"format",
"(",
"type_names_to_union",
")",
")",
"re... | Construct a unique union type name based on the type names being unioned. | [
"Construct",
"a",
"unique",
"union",
"type",
"name",
"based",
"on",
"the",
"type",
"names",
"being",
"unioned",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L99-L104 | train | 227,771 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/graphql_schema.py | _get_fields_for_class | def _get_fields_for_class(schema_graph, graphql_types, field_type_overrides, hidden_classes,
cls_name):
"""Return a dict from field name to GraphQL field type, for the specified graph class."""
properties = schema_graph.get_element_by_class_name(cls_name).properties
# Add leaf GraphQL fields (class properties).
all_properties = {
property_name: _property_descriptor_to_graphql_type(property_obj)
for property_name, property_obj in six.iteritems(properties)
}
result = {
property_name: graphql_representation
for property_name, graphql_representation in six.iteritems(all_properties)
if graphql_representation is not None
}
# Add edge GraphQL fields (edges to other vertex classes).
schema_element = schema_graph.get_element_by_class_name(cls_name)
outbound_edges = (
('out_{}'.format(out_edge_name),
schema_graph.get_element_by_class_name(out_edge_name).properties[
EDGE_DESTINATION_PROPERTY_NAME].qualifier)
for out_edge_name in schema_element.out_connections
)
inbound_edges = (
('in_{}'.format(in_edge_name),
schema_graph.get_element_by_class_name(in_edge_name).properties[
EDGE_SOURCE_PROPERTY_NAME].qualifier)
for in_edge_name in schema_element.in_connections
)
for field_name, to_type_name in chain(outbound_edges, inbound_edges):
edge_endpoint_type_name = None
subclasses = schema_graph.get_subclass_set(to_type_name)
to_type_abstract = schema_graph.get_element_by_class_name(to_type_name).abstract
if not to_type_abstract and len(subclasses) > 1:
# If the edge endpoint type has no subclasses, it can't be coerced into any other type.
# If the edge endpoint type is abstract (an interface type), we can already
# coerce it to the proper type with a GraphQL fragment. However, if the endpoint type
# is non-abstract and has subclasses, we need to return its subclasses as an union type.
# This is because GraphQL fragments cannot be applied on concrete types, and
# GraphQL does not support inheritance of concrete types.
type_names_to_union = [
subclass
for subclass in subclasses
if subclass not in hidden_classes
]
if type_names_to_union:
edge_endpoint_type_name = _get_union_type_name(type_names_to_union)
else:
if to_type_name not in hidden_classes:
edge_endpoint_type_name = to_type_name
if edge_endpoint_type_name is not None:
# If we decided to not hide this edge due to its endpoint type being non-representable,
# represent the edge field as the GraphQL type List(edge_endpoint_type_name).
result[field_name] = GraphQLList(graphql_types[edge_endpoint_type_name])
for field_name, field_type in six.iteritems(field_type_overrides):
if field_name not in result:
raise AssertionError(u'Attempting to override field "{}" from class "{}", but the '
u'class does not contain said field'.format(field_name, cls_name))
else:
result[field_name] = field_type
return result | python | def _get_fields_for_class(schema_graph, graphql_types, field_type_overrides, hidden_classes,
cls_name):
"""Return a dict from field name to GraphQL field type, for the specified graph class."""
properties = schema_graph.get_element_by_class_name(cls_name).properties
# Add leaf GraphQL fields (class properties).
all_properties = {
property_name: _property_descriptor_to_graphql_type(property_obj)
for property_name, property_obj in six.iteritems(properties)
}
result = {
property_name: graphql_representation
for property_name, graphql_representation in six.iteritems(all_properties)
if graphql_representation is not None
}
# Add edge GraphQL fields (edges to other vertex classes).
schema_element = schema_graph.get_element_by_class_name(cls_name)
outbound_edges = (
('out_{}'.format(out_edge_name),
schema_graph.get_element_by_class_name(out_edge_name).properties[
EDGE_DESTINATION_PROPERTY_NAME].qualifier)
for out_edge_name in schema_element.out_connections
)
inbound_edges = (
('in_{}'.format(in_edge_name),
schema_graph.get_element_by_class_name(in_edge_name).properties[
EDGE_SOURCE_PROPERTY_NAME].qualifier)
for in_edge_name in schema_element.in_connections
)
for field_name, to_type_name in chain(outbound_edges, inbound_edges):
edge_endpoint_type_name = None
subclasses = schema_graph.get_subclass_set(to_type_name)
to_type_abstract = schema_graph.get_element_by_class_name(to_type_name).abstract
if not to_type_abstract and len(subclasses) > 1:
# If the edge endpoint type has no subclasses, it can't be coerced into any other type.
# If the edge endpoint type is abstract (an interface type), we can already
# coerce it to the proper type with a GraphQL fragment. However, if the endpoint type
# is non-abstract and has subclasses, we need to return its subclasses as an union type.
# This is because GraphQL fragments cannot be applied on concrete types, and
# GraphQL does not support inheritance of concrete types.
type_names_to_union = [
subclass
for subclass in subclasses
if subclass not in hidden_classes
]
if type_names_to_union:
edge_endpoint_type_name = _get_union_type_name(type_names_to_union)
else:
if to_type_name not in hidden_classes:
edge_endpoint_type_name = to_type_name
if edge_endpoint_type_name is not None:
# If we decided to not hide this edge due to its endpoint type being non-representable,
# represent the edge field as the GraphQL type List(edge_endpoint_type_name).
result[field_name] = GraphQLList(graphql_types[edge_endpoint_type_name])
for field_name, field_type in six.iteritems(field_type_overrides):
if field_name not in result:
raise AssertionError(u'Attempting to override field "{}" from class "{}", but the '
u'class does not contain said field'.format(field_name, cls_name))
else:
result[field_name] = field_type
return result | [
"def",
"_get_fields_for_class",
"(",
"schema_graph",
",",
"graphql_types",
",",
"field_type_overrides",
",",
"hidden_classes",
",",
"cls_name",
")",
":",
"properties",
"=",
"schema_graph",
".",
"get_element_by_class_name",
"(",
"cls_name",
")",
".",
"properties",
"# A... | Return a dict from field name to GraphQL field type, for the specified graph class. | [
"Return",
"a",
"dict",
"from",
"field",
"name",
"to",
"GraphQL",
"field",
"type",
"for",
"the",
"specified",
"graph",
"class",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L107-L172 | train | 227,772 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/graphql_schema.py | _create_field_specification | def _create_field_specification(schema_graph, graphql_types, field_type_overrides,
hidden_classes, cls_name):
"""Return a function that specifies the fields present on the given type."""
def field_maker_func():
"""Create and return the fields for the given GraphQL type."""
result = EXTENDED_META_FIELD_DEFINITIONS.copy()
result.update(OrderedDict([
(name, GraphQLField(value))
for name, value in sorted(six.iteritems(_get_fields_for_class(
schema_graph, graphql_types, field_type_overrides, hidden_classes, cls_name)),
key=lambda x: x[0])
]))
return result
return field_maker_func | python | def _create_field_specification(schema_graph, graphql_types, field_type_overrides,
hidden_classes, cls_name):
"""Return a function that specifies the fields present on the given type."""
def field_maker_func():
"""Create and return the fields for the given GraphQL type."""
result = EXTENDED_META_FIELD_DEFINITIONS.copy()
result.update(OrderedDict([
(name, GraphQLField(value))
for name, value in sorted(six.iteritems(_get_fields_for_class(
schema_graph, graphql_types, field_type_overrides, hidden_classes, cls_name)),
key=lambda x: x[0])
]))
return result
return field_maker_func | [
"def",
"_create_field_specification",
"(",
"schema_graph",
",",
"graphql_types",
",",
"field_type_overrides",
",",
"hidden_classes",
",",
"cls_name",
")",
":",
"def",
"field_maker_func",
"(",
")",
":",
"\"\"\"Create and return the fields for the given GraphQL type.\"\"\"",
"r... | Return a function that specifies the fields present on the given type. | [
"Return",
"a",
"function",
"that",
"specifies",
"the",
"fields",
"present",
"on",
"the",
"given",
"type",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L175-L189 | train | 227,773 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/graphql_schema.py | _create_interface_specification | def _create_interface_specification(schema_graph, graphql_types, hidden_classes, cls_name):
"""Return a function that specifies the interfaces implemented by the given type."""
def interface_spec():
"""Return a list of GraphQL interface types implemented by the type named 'cls_name'."""
abstract_inheritance_set = (
superclass_name
for superclass_name in sorted(list(schema_graph.get_inheritance_set(cls_name)))
if (superclass_name not in hidden_classes and
schema_graph.get_element_by_class_name(superclass_name).abstract)
)
return [
graphql_types[x]
for x in abstract_inheritance_set
if x not in hidden_classes
]
return interface_spec | python | def _create_interface_specification(schema_graph, graphql_types, hidden_classes, cls_name):
"""Return a function that specifies the interfaces implemented by the given type."""
def interface_spec():
"""Return a list of GraphQL interface types implemented by the type named 'cls_name'."""
abstract_inheritance_set = (
superclass_name
for superclass_name in sorted(list(schema_graph.get_inheritance_set(cls_name)))
if (superclass_name not in hidden_classes and
schema_graph.get_element_by_class_name(superclass_name).abstract)
)
return [
graphql_types[x]
for x in abstract_inheritance_set
if x not in hidden_classes
]
return interface_spec | [
"def",
"_create_interface_specification",
"(",
"schema_graph",
",",
"graphql_types",
",",
"hidden_classes",
",",
"cls_name",
")",
":",
"def",
"interface_spec",
"(",
")",
":",
"\"\"\"Return a list of GraphQL interface types implemented by the type named 'cls_name'.\"\"\"",
"abstra... | Return a function that specifies the interfaces implemented by the given type. | [
"Return",
"a",
"function",
"that",
"specifies",
"the",
"interfaces",
"implemented",
"by",
"the",
"given",
"type",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L192-L209 | train | 227,774 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/graphql_schema.py | _create_union_types_specification | def _create_union_types_specification(schema_graph, graphql_types, hidden_classes, base_name):
"""Return a function that gives the types in the union type rooted at base_name."""
# When edges point to vertices of type base_name, and base_name is both non-abstract and
# has subclasses, we need to represent the edge endpoint type with a union type based on
# base_name and its subclasses. This function calculates what types that union should include.
def types_spec():
"""Return a list of GraphQL types that this class' corresponding union type includes."""
return [
graphql_types[x]
for x in sorted(list(schema_graph.get_subclass_set(base_name)))
if x not in hidden_classes
]
return types_spec | python | def _create_union_types_specification(schema_graph, graphql_types, hidden_classes, base_name):
"""Return a function that gives the types in the union type rooted at base_name."""
# When edges point to vertices of type base_name, and base_name is both non-abstract and
# has subclasses, we need to represent the edge endpoint type with a union type based on
# base_name and its subclasses. This function calculates what types that union should include.
def types_spec():
"""Return a list of GraphQL types that this class' corresponding union type includes."""
return [
graphql_types[x]
for x in sorted(list(schema_graph.get_subclass_set(base_name)))
if x not in hidden_classes
]
return types_spec | [
"def",
"_create_union_types_specification",
"(",
"schema_graph",
",",
"graphql_types",
",",
"hidden_classes",
",",
"base_name",
")",
":",
"# When edges point to vertices of type base_name, and base_name is both non-abstract and",
"# has subclasses, we need to represent the edge endpoint ty... | Return a function that gives the types in the union type rooted at base_name. | [
"Return",
"a",
"function",
"that",
"gives",
"the",
"types",
"in",
"the",
"union",
"type",
"rooted",
"at",
"base_name",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L212-L225 | train | 227,775 |
kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/graphql_schema.py | get_graphql_schema_from_schema_graph | def get_graphql_schema_from_schema_graph(schema_graph, class_to_field_type_overrides,
hidden_classes):
"""Return a GraphQL schema object corresponding to the schema of the given schema graph.
Args:
schema_graph: SchemaGraph
class_to_field_type_overrides: dict, class name -> {field name -> field type},
(string -> {string -> GraphQLType}). Used to override the
type of a field in the class where it's first defined and all
the class's subclasses.
hidden_classes: set of strings, classes to not include in the GraphQL schema.
Returns:
tuple of (GraphQL schema object, GraphQL type equivalence hints dict).
The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}).
"""
_validate_overriden_fields_are_not_defined_in_superclasses(class_to_field_type_overrides,
schema_graph)
# The field types of subclasses must also be overridden.
# Remember that the result returned by get_subclass_set(class_name) includes class_name itself.
inherited_field_type_overrides = _get_inherited_field_types(class_to_field_type_overrides,
schema_graph)
# We remove the base vertex class from the schema if it has no properties.
# If it has no properties, it's meaningless and makes the schema less syntactically sweet.
if not schema_graph.get_element_by_class_name(ORIENTDB_BASE_VERTEX_CLASS_NAME).properties:
hidden_classes.add(ORIENTDB_BASE_VERTEX_CLASS_NAME)
graphql_types = OrderedDict()
type_equivalence_hints = OrderedDict()
# For each vertex class, construct its analogous GraphQL type representation.
for vertex_cls_name in sorted(schema_graph.vertex_class_names):
vertex_cls = schema_graph.get_element_by_class_name(vertex_cls_name)
if vertex_cls_name in hidden_classes:
continue
inherited_field_type_overrides.setdefault(vertex_cls_name, dict())
field_type_overrides = inherited_field_type_overrides[vertex_cls_name]
# We have to use delayed type binding here, because some of the type references
# are circular: if an edge connects vertices of types A and B, then
# GraphQL type A has a List[B] field, and type B has a List[A] field.
# To avoid the circular dependency, GraphQL allows us to initialize the types
# initially without their field information, and fill in their field information
# later using a lambda function as the second argument to GraphQLObjectType.
# This lambda function will be called on each type after all types are created
# in their initial blank state.
#
# However, 'cls_name' is a variable that would not be correctly bound
# if we naively tried to construct a lambda in-place, because Python lambdas
# are not closures. Instead, call a function with 'cls_name' as an argument,
# and have that function construct and return the required lambda.
field_specification_lambda = _create_field_specification(
schema_graph, graphql_types, field_type_overrides, hidden_classes, vertex_cls_name)
# Abstract classes are interfaces, concrete classes are object types.
current_graphql_type = None
if vertex_cls.abstract:
# "fields" is a kwarg in the interface constructor, even though
# it's a positional arg in the object type constructor.
current_graphql_type = GraphQLInterfaceType(vertex_cls_name,
fields=field_specification_lambda)
else:
# For similar reasons as the field_specification_lambda,
# we need to create an interface specification lambda function that
# specifies the interfaces implemented by this type.
interface_specification_lambda = _create_interface_specification(
schema_graph, graphql_types, hidden_classes, vertex_cls_name)
# N.B.: Ignore the "is_type_of" argument below, it is simply a circumvention of
# a sanity check inside the GraphQL library. The library assumes that we'll use
# its execution system, so it complains that we don't provide a means to
# differentiate between different implementations of the same interface.
# We don't care, because we compile the GraphQL query to a database query.
current_graphql_type = GraphQLObjectType(vertex_cls_name,
field_specification_lambda,
interfaces=interface_specification_lambda,
is_type_of=lambda: None)
graphql_types[vertex_cls_name] = current_graphql_type
# For each vertex class, construct all union types representations.
for vertex_cls_name in sorted(schema_graph.vertex_class_names):
vertex_cls = schema_graph.get_element_by_class_name(vertex_cls_name)
if vertex_cls_name in hidden_classes:
continue
vertex_cls_subclasses = schema_graph.get_subclass_set(vertex_cls_name)
if not vertex_cls.abstract and len(vertex_cls_subclasses) > 1:
# In addition to creating this class' corresponding GraphQL type, we'll need a
# union type to represent it when it appears as the endpoint of an edge.
union_type_name = _get_union_type_name(vertex_cls_subclasses)
# For similar reasons as the field_specification_lambda,
# we need to create a union type specification lambda function that specifies
# the types that this union type is composed of.
type_specification_lambda = _create_union_types_specification(
schema_graph, graphql_types, hidden_classes, vertex_cls_name)
union_type = GraphQLUnionType(union_type_name, types=type_specification_lambda)
graphql_types[union_type_name] = union_type
type_equivalence_hints[graphql_types[vertex_cls_name]] = union_type
# Include all abstract non-vertex classes whose only non-abstract subclasses are vertices.
for non_graph_cls_name in sorted(schema_graph.non_graph_class_names):
if non_graph_cls_name in hidden_classes:
continue
if not schema_graph.get_element_by_class_name(non_graph_cls_name).abstract:
continue
cls_subclasses = schema_graph.get_subclass_set(non_graph_cls_name)
# No need to add the possible abstract class if it doesn't have subclasses besides itself.
if len(cls_subclasses) > 1:
all_non_abstract_subclasses_are_vertices = True
# Check all non-abstract subclasses are vertices.
for subclass_name in cls_subclasses:
subclass = schema_graph.get_element_by_class_name(subclass_name)
if subclass_name != non_graph_cls_name:
if not subclass.abstract and not subclass.is_vertex:
all_non_abstract_subclasses_are_vertices = False
break
if all_non_abstract_subclasses_are_vertices:
# Add abstract class as an interface.
inherited_field_type_overrides.setdefault(non_graph_cls_name, dict())
field_type_overrides = inherited_field_type_overrides[non_graph_cls_name]
field_specification_lambda = _create_field_specification(
schema_graph, graphql_types, field_type_overrides, hidden_classes,
non_graph_cls_name)
graphql_type = GraphQLInterfaceType(non_graph_cls_name,
fields=field_specification_lambda)
graphql_types[non_graph_cls_name] = graphql_type
if not graphql_types:
raise EmptySchemaError(u'After evaluating all subclasses of V, we were not able to find '
u'visible schema data to import into the GraphQL schema object')
# Create the root query GraphQL type. Consists of all non-union classes, i.e.
# all non-abstract classes (as GraphQL types) and all abstract classes (as GraphQL interfaces).
RootSchemaQuery = GraphQLObjectType('RootSchemaQuery', OrderedDict([
(name, GraphQLField(value))
for name, value in sorted(six.iteritems(graphql_types), key=lambda x: x[0])
if not isinstance(value, GraphQLUnionType)
]))
schema = GraphQLSchema(RootSchemaQuery, directives=DIRECTIVES)
# Note that the GraphQLSchema reconstructs the set of types in the schema by recursively
# searching through the fields of the RootSchemaQuery. Since union types can only appear in the
# fields of other types as edges, union types with no in or out edges will not appear in the
# schema. Therefore, we remove these unions and their keys from the type equivalence hints.
return schema, _get_referenced_type_equivalences(graphql_types,
type_equivalence_hints) | python | def get_graphql_schema_from_schema_graph(schema_graph, class_to_field_type_overrides,
hidden_classes):
"""Return a GraphQL schema object corresponding to the schema of the given schema graph.
Args:
schema_graph: SchemaGraph
class_to_field_type_overrides: dict, class name -> {field name -> field type},
(string -> {string -> GraphQLType}). Used to override the
type of a field in the class where it's first defined and all
the class's subclasses.
hidden_classes: set of strings, classes to not include in the GraphQL schema.
Returns:
tuple of (GraphQL schema object, GraphQL type equivalence hints dict).
The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}).
"""
_validate_overriden_fields_are_not_defined_in_superclasses(class_to_field_type_overrides,
schema_graph)
# The field types of subclasses must also be overridden.
# Remember that the result returned by get_subclass_set(class_name) includes class_name itself.
inherited_field_type_overrides = _get_inherited_field_types(class_to_field_type_overrides,
schema_graph)
# We remove the base vertex class from the schema if it has no properties.
# If it has no properties, it's meaningless and makes the schema less syntactically sweet.
if not schema_graph.get_element_by_class_name(ORIENTDB_BASE_VERTEX_CLASS_NAME).properties:
hidden_classes.add(ORIENTDB_BASE_VERTEX_CLASS_NAME)
graphql_types = OrderedDict()
type_equivalence_hints = OrderedDict()
# For each vertex class, construct its analogous GraphQL type representation.
for vertex_cls_name in sorted(schema_graph.vertex_class_names):
vertex_cls = schema_graph.get_element_by_class_name(vertex_cls_name)
if vertex_cls_name in hidden_classes:
continue
inherited_field_type_overrides.setdefault(vertex_cls_name, dict())
field_type_overrides = inherited_field_type_overrides[vertex_cls_name]
# We have to use delayed type binding here, because some of the type references
# are circular: if an edge connects vertices of types A and B, then
# GraphQL type A has a List[B] field, and type B has a List[A] field.
# To avoid the circular dependency, GraphQL allows us to initialize the types
# initially without their field information, and fill in their field information
# later using a lambda function as the second argument to GraphQLObjectType.
# This lambda function will be called on each type after all types are created
# in their initial blank state.
#
# However, 'cls_name' is a variable that would not be correctly bound
# if we naively tried to construct a lambda in-place, because Python lambdas
# are not closures. Instead, call a function with 'cls_name' as an argument,
# and have that function construct and return the required lambda.
field_specification_lambda = _create_field_specification(
schema_graph, graphql_types, field_type_overrides, hidden_classes, vertex_cls_name)
# Abstract classes are interfaces, concrete classes are object types.
current_graphql_type = None
if vertex_cls.abstract:
# "fields" is a kwarg in the interface constructor, even though
# it's a positional arg in the object type constructor.
current_graphql_type = GraphQLInterfaceType(vertex_cls_name,
fields=field_specification_lambda)
else:
# For similar reasons as the field_specification_lambda,
# we need to create an interface specification lambda function that
# specifies the interfaces implemented by this type.
interface_specification_lambda = _create_interface_specification(
schema_graph, graphql_types, hidden_classes, vertex_cls_name)
# N.B.: Ignore the "is_type_of" argument below, it is simply a circumvention of
# a sanity check inside the GraphQL library. The library assumes that we'll use
# its execution system, so it complains that we don't provide a means to
# differentiate between different implementations of the same interface.
# We don't care, because we compile the GraphQL query to a database query.
current_graphql_type = GraphQLObjectType(vertex_cls_name,
field_specification_lambda,
interfaces=interface_specification_lambda,
is_type_of=lambda: None)
graphql_types[vertex_cls_name] = current_graphql_type
# For each vertex class, construct all union types representations.
for vertex_cls_name in sorted(schema_graph.vertex_class_names):
vertex_cls = schema_graph.get_element_by_class_name(vertex_cls_name)
if vertex_cls_name in hidden_classes:
continue
vertex_cls_subclasses = schema_graph.get_subclass_set(vertex_cls_name)
if not vertex_cls.abstract and len(vertex_cls_subclasses) > 1:
# In addition to creating this class' corresponding GraphQL type, we'll need a
# union type to represent it when it appears as the endpoint of an edge.
union_type_name = _get_union_type_name(vertex_cls_subclasses)
# For similar reasons as the field_specification_lambda,
# we need to create a union type specification lambda function that specifies
# the types that this union type is composed of.
type_specification_lambda = _create_union_types_specification(
schema_graph, graphql_types, hidden_classes, vertex_cls_name)
union_type = GraphQLUnionType(union_type_name, types=type_specification_lambda)
graphql_types[union_type_name] = union_type
type_equivalence_hints[graphql_types[vertex_cls_name]] = union_type
# Include all abstract non-vertex classes whose only non-abstract subclasses are vertices.
for non_graph_cls_name in sorted(schema_graph.non_graph_class_names):
if non_graph_cls_name in hidden_classes:
continue
if not schema_graph.get_element_by_class_name(non_graph_cls_name).abstract:
continue
cls_subclasses = schema_graph.get_subclass_set(non_graph_cls_name)
# No need to add the possible abstract class if it doesn't have subclasses besides itself.
if len(cls_subclasses) > 1:
all_non_abstract_subclasses_are_vertices = True
# Check all non-abstract subclasses are vertices.
for subclass_name in cls_subclasses:
subclass = schema_graph.get_element_by_class_name(subclass_name)
if subclass_name != non_graph_cls_name:
if not subclass.abstract and not subclass.is_vertex:
all_non_abstract_subclasses_are_vertices = False
break
if all_non_abstract_subclasses_are_vertices:
# Add abstract class as an interface.
inherited_field_type_overrides.setdefault(non_graph_cls_name, dict())
field_type_overrides = inherited_field_type_overrides[non_graph_cls_name]
field_specification_lambda = _create_field_specification(
schema_graph, graphql_types, field_type_overrides, hidden_classes,
non_graph_cls_name)
graphql_type = GraphQLInterfaceType(non_graph_cls_name,
fields=field_specification_lambda)
graphql_types[non_graph_cls_name] = graphql_type
if not graphql_types:
raise EmptySchemaError(u'After evaluating all subclasses of V, we were not able to find '
u'visible schema data to import into the GraphQL schema object')
# Create the root query GraphQL type. Consists of all non-union classes, i.e.
# all non-abstract classes (as GraphQL types) and all abstract classes (as GraphQL interfaces).
RootSchemaQuery = GraphQLObjectType('RootSchemaQuery', OrderedDict([
(name, GraphQLField(value))
for name, value in sorted(six.iteritems(graphql_types), key=lambda x: x[0])
if not isinstance(value, GraphQLUnionType)
]))
schema = GraphQLSchema(RootSchemaQuery, directives=DIRECTIVES)
# Note that the GraphQLSchema reconstructs the set of types in the schema by recursively
# searching through the fields of the RootSchemaQuery. Since union types can only appear in the
# fields of other types as edges, union types with no in or out edges will not appear in the
# schema. Therefore, we remove these unions and their keys from the type equivalence hints.
return schema, _get_referenced_type_equivalences(graphql_types,
type_equivalence_hints) | [
"def",
"get_graphql_schema_from_schema_graph",
"(",
"schema_graph",
",",
"class_to_field_type_overrides",
",",
"hidden_classes",
")",
":",
"_validate_overriden_fields_are_not_defined_in_superclasses",
"(",
"class_to_field_type_overrides",
",",
"schema_graph",
")",
"# The field types ... | Return a GraphQL schema object corresponding to the schema of the given schema graph.
Args:
schema_graph: SchemaGraph
class_to_field_type_overrides: dict, class name -> {field name -> field type},
(string -> {string -> GraphQLType}). Used to override the
type of a field in the class where it's first defined and all
the class's subclasses.
hidden_classes: set of strings, classes to not include in the GraphQL schema.
Returns:
tuple of (GraphQL schema object, GraphQL type equivalence hints dict).
The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}). | [
"Return",
"a",
"GraphQL",
"schema",
"object",
"corresponding",
"to",
"the",
"schema",
"of",
"the",
"given",
"schema",
"graph",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L228-L383 | train | 227,776 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py | workaround_lowering_pass | def workaround_lowering_pass(ir_blocks, query_metadata_table):
"""Extract locations from TernaryConditionals and rewrite their Filter blocks as necessary."""
new_ir_blocks = []
for block in ir_blocks:
if isinstance(block, Filter):
new_block = _process_filter_block(query_metadata_table, block)
else:
new_block = block
new_ir_blocks.append(new_block)
return new_ir_blocks | python | def workaround_lowering_pass(ir_blocks, query_metadata_table):
"""Extract locations from TernaryConditionals and rewrite their Filter blocks as necessary."""
new_ir_blocks = []
for block in ir_blocks:
if isinstance(block, Filter):
new_block = _process_filter_block(query_metadata_table, block)
else:
new_block = block
new_ir_blocks.append(new_block)
return new_ir_blocks | [
"def",
"workaround_lowering_pass",
"(",
"ir_blocks",
",",
"query_metadata_table",
")",
":",
"new_ir_blocks",
"=",
"[",
"]",
"for",
"block",
"in",
"ir_blocks",
":",
"if",
"isinstance",
"(",
"block",
",",
"Filter",
")",
":",
"new_block",
"=",
"_process_filter_bloc... | Extract locations from TernaryConditionals and rewrite their Filter blocks as necessary. | [
"Extract",
"locations",
"from",
"TernaryConditionals",
"and",
"rewrite",
"their",
"Filter",
"blocks",
"as",
"necessary",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py#L19-L30 | train | 227,777 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py | _process_filter_block | def _process_filter_block(query_metadata_table, block):
"""Rewrite the provided Filter block if necessary."""
# For a given Filter block with BinaryComposition predicate expression X,
# let L be the set of all Locations referenced in any TernaryConditional
# predicate expression enclosed in X.
# For each location l in L, we construct a tautological expression that looks like:
# ((l IS NULL) OR (l IS NOT NULL))
# and then join the original BinaryComposition X with all such expressions with ANDs.
# We set this new BinaryComposition expression as the predicate of the Filter block.
base_predicate = block.predicate
# These variables are used by the visitor functions below.
ternary_conditionals = []
# "problematic_locations" is a list and not a set,
# to preserve ordering and generate a deterministic order of added clauses.
# We expect the maximum size of this list to be a small constant number,
# so the linear "in" operator is really not a concern.
problematic_locations = []
def find_ternary_conditionals(expression):
"""Visitor function that extracts all enclosed TernaryConditional expressions."""
if isinstance(expression, TernaryConditional):
ternary_conditionals.append(expression)
return expression
def extract_locations_visitor(expression):
"""Visitor function that extracts all the problematic locations."""
if isinstance(expression, (ContextField, ContextFieldExistence)):
# We get the location at the vertex, ignoring property fields.
# The vertex-level location is sufficient to work around the OrientDB bug,
# and we want as few location as possible overall.
location_at_vertex = expression.location.at_vertex()
if location_at_vertex not in problematic_locations:
problematic_locations.append(location_at_vertex)
return expression
# We aren't modifying the base predicate itself, just traversing it.
# The returned "updated" value must be the exact same as the original.
return_value = base_predicate.visit_and_update(find_ternary_conditionals)
if return_value is not base_predicate:
raise AssertionError(u'Read-only visitor function "find_ternary_conditionals" '
u'caused state to change: '
u'{} {}'.format(base_predicate, return_value))
for ternary in ternary_conditionals:
# We aren't modifying the ternary itself, just traversing it.
# The returned "updated" value must be the exact same as the original.
return_value = ternary.visit_and_update(extract_locations_visitor)
if return_value is not ternary:
raise AssertionError(u'Read-only visitor function "extract_locations_visitor" '
u'caused state to change: '
u'{} {}'.format(ternary, return_value))
tautologies = [
_create_tautological_expression_for_location(query_metadata_table, location)
for location in problematic_locations
]
if not tautologies:
return block
final_predicate = base_predicate
for tautology in tautologies:
final_predicate = BinaryComposition(u'&&', final_predicate, tautology)
return Filter(final_predicate) | python | def _process_filter_block(query_metadata_table, block):
"""Rewrite the provided Filter block if necessary."""
# For a given Filter block with BinaryComposition predicate expression X,
# let L be the set of all Locations referenced in any TernaryConditional
# predicate expression enclosed in X.
# For each location l in L, we construct a tautological expression that looks like:
# ((l IS NULL) OR (l IS NOT NULL))
# and then join the original BinaryComposition X with all such expressions with ANDs.
# We set this new BinaryComposition expression as the predicate of the Filter block.
base_predicate = block.predicate
# These variables are used by the visitor functions below.
ternary_conditionals = []
# "problematic_locations" is a list and not a set,
# to preserve ordering and generate a deterministic order of added clauses.
# We expect the maximum size of this list to be a small constant number,
# so the linear "in" operator is really not a concern.
problematic_locations = []
def find_ternary_conditionals(expression):
"""Visitor function that extracts all enclosed TernaryConditional expressions."""
if isinstance(expression, TernaryConditional):
ternary_conditionals.append(expression)
return expression
def extract_locations_visitor(expression):
"""Visitor function that extracts all the problematic locations."""
if isinstance(expression, (ContextField, ContextFieldExistence)):
# We get the location at the vertex, ignoring property fields.
# The vertex-level location is sufficient to work around the OrientDB bug,
# and we want as few location as possible overall.
location_at_vertex = expression.location.at_vertex()
if location_at_vertex not in problematic_locations:
problematic_locations.append(location_at_vertex)
return expression
# We aren't modifying the base predicate itself, just traversing it.
# The returned "updated" value must be the exact same as the original.
return_value = base_predicate.visit_and_update(find_ternary_conditionals)
if return_value is not base_predicate:
raise AssertionError(u'Read-only visitor function "find_ternary_conditionals" '
u'caused state to change: '
u'{} {}'.format(base_predicate, return_value))
for ternary in ternary_conditionals:
# We aren't modifying the ternary itself, just traversing it.
# The returned "updated" value must be the exact same as the original.
return_value = ternary.visit_and_update(extract_locations_visitor)
if return_value is not ternary:
raise AssertionError(u'Read-only visitor function "extract_locations_visitor" '
u'caused state to change: '
u'{} {}'.format(ternary, return_value))
tautologies = [
_create_tautological_expression_for_location(query_metadata_table, location)
for location in problematic_locations
]
if not tautologies:
return block
final_predicate = base_predicate
for tautology in tautologies:
final_predicate = BinaryComposition(u'&&', final_predicate, tautology)
return Filter(final_predicate) | [
"def",
"_process_filter_block",
"(",
"query_metadata_table",
",",
"block",
")",
":",
"# For a given Filter block with BinaryComposition predicate expression X,",
"# let L be the set of all Locations referenced in any TernaryConditional",
"# predicate expression enclosed in X.",
"# For each loc... | Rewrite the provided Filter block if necessary. | [
"Rewrite",
"the",
"provided",
"Filter",
"block",
"if",
"necessary",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py#L33-L98 | train | 227,778 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py | _create_tautological_expression_for_location | def _create_tautological_expression_for_location(query_metadata_table, location):
"""For a given location, create a BinaryComposition that always evaluates to 'true'."""
location_type = query_metadata_table.get_location_info(location).type
location_exists = BinaryComposition(
u'!=', ContextField(location, location_type), NullLiteral)
location_does_not_exist = BinaryComposition(
u'=', ContextField(location, location_type), NullLiteral)
return BinaryComposition(u'||', location_exists, location_does_not_exist) | python | def _create_tautological_expression_for_location(query_metadata_table, location):
"""For a given location, create a BinaryComposition that always evaluates to 'true'."""
location_type = query_metadata_table.get_location_info(location).type
location_exists = BinaryComposition(
u'!=', ContextField(location, location_type), NullLiteral)
location_does_not_exist = BinaryComposition(
u'=', ContextField(location, location_type), NullLiteral)
return BinaryComposition(u'||', location_exists, location_does_not_exist) | [
"def",
"_create_tautological_expression_for_location",
"(",
"query_metadata_table",
",",
"location",
")",
":",
"location_type",
"=",
"query_metadata_table",
".",
"get_location_info",
"(",
"location",
")",
".",
"type",
"location_exists",
"=",
"BinaryComposition",
"(",
"u'!... | For a given location, create a BinaryComposition that always evaluates to 'true'. | [
"For",
"a",
"given",
"location",
"create",
"a",
"BinaryComposition",
"that",
"always",
"evaluates",
"to",
"true",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py#L101-L109 | train | 227,779 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | get_only_element_from_collection | def get_only_element_from_collection(one_element_collection):
"""Assert that the collection has exactly one element, then return that element."""
if len(one_element_collection) != 1:
raise AssertionError(u'Expected a collection with exactly one element, but got: {}'
.format(one_element_collection))
return funcy.first(one_element_collection) | python | def get_only_element_from_collection(one_element_collection):
"""Assert that the collection has exactly one element, then return that element."""
if len(one_element_collection) != 1:
raise AssertionError(u'Expected a collection with exactly one element, but got: {}'
.format(one_element_collection))
return funcy.first(one_element_collection) | [
"def",
"get_only_element_from_collection",
"(",
"one_element_collection",
")",
":",
"if",
"len",
"(",
"one_element_collection",
")",
"!=",
"1",
":",
"raise",
"AssertionError",
"(",
"u'Expected a collection with exactly one element, but got: {}'",
".",
"format",
"(",
"one_el... | Assert that the collection has exactly one element, then return that element. | [
"Assert",
"that",
"the",
"collection",
"has",
"exactly",
"one",
"element",
"then",
"return",
"that",
"element",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L37-L42 | train | 227,780 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | get_ast_field_name | def get_ast_field_name(ast):
"""Return the normalized field name for the given AST node."""
replacements = {
# We always rewrite the following field names into their proper underlying counterparts.
TYPENAME_META_FIELD_NAME: '@class'
}
base_field_name = ast.name.value
normalized_name = replacements.get(base_field_name, base_field_name)
return normalized_name | python | def get_ast_field_name(ast):
"""Return the normalized field name for the given AST node."""
replacements = {
# We always rewrite the following field names into their proper underlying counterparts.
TYPENAME_META_FIELD_NAME: '@class'
}
base_field_name = ast.name.value
normalized_name = replacements.get(base_field_name, base_field_name)
return normalized_name | [
"def",
"get_ast_field_name",
"(",
"ast",
")",
":",
"replacements",
"=",
"{",
"# We always rewrite the following field names into their proper underlying counterparts.",
"TYPENAME_META_FIELD_NAME",
":",
"'@class'",
"}",
"base_field_name",
"=",
"ast",
".",
"name",
".",
"value",... | Return the normalized field name for the given AST node. | [
"Return",
"the",
"normalized",
"field",
"name",
"for",
"the",
"given",
"AST",
"node",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L45-L53 | train | 227,781 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | get_field_type_from_schema | def get_field_type_from_schema(schema_type, field_name):
"""Return the type of the field in the given type, accounting for field name normalization."""
if field_name == '@class':
return GraphQLString
else:
if field_name not in schema_type.fields:
raise AssertionError(u'Field {} passed validation but was not present on type '
u'{}'.format(field_name, schema_type))
# Validation guarantees that the field must exist in the schema.
return schema_type.fields[field_name].type | python | def get_field_type_from_schema(schema_type, field_name):
"""Return the type of the field in the given type, accounting for field name normalization."""
if field_name == '@class':
return GraphQLString
else:
if field_name not in schema_type.fields:
raise AssertionError(u'Field {} passed validation but was not present on type '
u'{}'.format(field_name, schema_type))
# Validation guarantees that the field must exist in the schema.
return schema_type.fields[field_name].type | [
"def",
"get_field_type_from_schema",
"(",
"schema_type",
",",
"field_name",
")",
":",
"if",
"field_name",
"==",
"'@class'",
":",
"return",
"GraphQLString",
"else",
":",
"if",
"field_name",
"not",
"in",
"schema_type",
".",
"fields",
":",
"raise",
"AssertionError",
... | Return the type of the field in the given type, accounting for field name normalization. | [
"Return",
"the",
"type",
"of",
"the",
"field",
"in",
"the",
"given",
"type",
"accounting",
"for",
"field",
"name",
"normalization",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L63-L73 | train | 227,782 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | get_vertex_field_type | def get_vertex_field_type(current_schema_type, vertex_field_name):
"""Return the type of the vertex within the specified vertex field name of the given type."""
# According to the schema, the vertex field itself is of type GraphQLList, and this is
# what get_field_type_from_schema returns. We care about what the type *inside* the list is,
# i.e., the type on the other side of the edge (hence .of_type).
# Validation guarantees that the field must exist in the schema.
if not is_vertex_field_name(vertex_field_name):
raise AssertionError(u'Trying to load the vertex field type of a non-vertex field: '
u'{} {}'.format(current_schema_type, vertex_field_name))
raw_field_type = get_field_type_from_schema(current_schema_type, vertex_field_name)
if not isinstance(strip_non_null_from_type(raw_field_type), GraphQLList):
raise AssertionError(u'Found an edge whose schema type was not GraphQLList: '
u'{} {} {}'.format(current_schema_type, vertex_field_name,
raw_field_type))
return raw_field_type.of_type | python | def get_vertex_field_type(current_schema_type, vertex_field_name):
"""Return the type of the vertex within the specified vertex field name of the given type."""
# According to the schema, the vertex field itself is of type GraphQLList, and this is
# what get_field_type_from_schema returns. We care about what the type *inside* the list is,
# i.e., the type on the other side of the edge (hence .of_type).
# Validation guarantees that the field must exist in the schema.
if not is_vertex_field_name(vertex_field_name):
raise AssertionError(u'Trying to load the vertex field type of a non-vertex field: '
u'{} {}'.format(current_schema_type, vertex_field_name))
raw_field_type = get_field_type_from_schema(current_schema_type, vertex_field_name)
if not isinstance(strip_non_null_from_type(raw_field_type), GraphQLList):
raise AssertionError(u'Found an edge whose schema type was not GraphQLList: '
u'{} {} {}'.format(current_schema_type, vertex_field_name,
raw_field_type))
return raw_field_type.of_type | [
"def",
"get_vertex_field_type",
"(",
"current_schema_type",
",",
"vertex_field_name",
")",
":",
"# According to the schema, the vertex field itself is of type GraphQLList, and this is",
"# what get_field_type_from_schema returns. We care about what the type *inside* the list is,",
"# i.e., the t... | Return the type of the vertex within the specified vertex field name of the given type. | [
"Return",
"the",
"type",
"of",
"the",
"vertex",
"within",
"the",
"specified",
"vertex",
"field",
"name",
"of",
"the",
"given",
"type",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L76-L91 | train | 227,783 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | get_edge_direction_and_name | def get_edge_direction_and_name(vertex_field_name):
"""Get the edge direction and name from a non-root vertex field name."""
edge_direction = None
edge_name = None
if vertex_field_name.startswith(OUTBOUND_EDGE_FIELD_PREFIX):
edge_direction = OUTBOUND_EDGE_DIRECTION
edge_name = vertex_field_name[len(OUTBOUND_EDGE_FIELD_PREFIX):]
elif vertex_field_name.startswith(INBOUND_EDGE_FIELD_PREFIX):
edge_direction = INBOUND_EDGE_DIRECTION
edge_name = vertex_field_name[len(INBOUND_EDGE_FIELD_PREFIX):]
else:
raise AssertionError(u'Unreachable condition reached:', vertex_field_name)
validate_safe_string(edge_name)
return edge_direction, edge_name | python | def get_edge_direction_and_name(vertex_field_name):
"""Get the edge direction and name from a non-root vertex field name."""
edge_direction = None
edge_name = None
if vertex_field_name.startswith(OUTBOUND_EDGE_FIELD_PREFIX):
edge_direction = OUTBOUND_EDGE_DIRECTION
edge_name = vertex_field_name[len(OUTBOUND_EDGE_FIELD_PREFIX):]
elif vertex_field_name.startswith(INBOUND_EDGE_FIELD_PREFIX):
edge_direction = INBOUND_EDGE_DIRECTION
edge_name = vertex_field_name[len(INBOUND_EDGE_FIELD_PREFIX):]
else:
raise AssertionError(u'Unreachable condition reached:', vertex_field_name)
validate_safe_string(edge_name)
return edge_direction, edge_name | [
"def",
"get_edge_direction_and_name",
"(",
"vertex_field_name",
")",
":",
"edge_direction",
"=",
"None",
"edge_name",
"=",
"None",
"if",
"vertex_field_name",
".",
"startswith",
"(",
"OUTBOUND_EDGE_FIELD_PREFIX",
")",
":",
"edge_direction",
"=",
"OUTBOUND_EDGE_DIRECTION",
... | Get the edge direction and name from a non-root vertex field name. | [
"Get",
"the",
"edge",
"direction",
"and",
"name",
"from",
"a",
"non",
"-",
"root",
"vertex",
"field",
"name",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L101-L116 | train | 227,784 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | is_vertex_field_type | def is_vertex_field_type(graphql_type):
"""Return True if the argument is a vertex field type, and False otherwise."""
# This will need to change if we ever support complex embedded types or edge field types.
underlying_type = strip_non_null_from_type(graphql_type)
return isinstance(underlying_type, (GraphQLInterfaceType, GraphQLObjectType, GraphQLUnionType)) | python | def is_vertex_field_type(graphql_type):
"""Return True if the argument is a vertex field type, and False otherwise."""
# This will need to change if we ever support complex embedded types or edge field types.
underlying_type = strip_non_null_from_type(graphql_type)
return isinstance(underlying_type, (GraphQLInterfaceType, GraphQLObjectType, GraphQLUnionType)) | [
"def",
"is_vertex_field_type",
"(",
"graphql_type",
")",
":",
"# This will need to change if we ever support complex embedded types or edge field types.",
"underlying_type",
"=",
"strip_non_null_from_type",
"(",
"graphql_type",
")",
"return",
"isinstance",
"(",
"underlying_type",
"... | Return True if the argument is a vertex field type, and False otherwise. | [
"Return",
"True",
"if",
"the",
"argument",
"is",
"a",
"vertex",
"field",
"type",
"and",
"False",
"otherwise",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L127-L131 | train | 227,785 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | ensure_unicode_string | def ensure_unicode_string(value):
"""Ensure the value is a string, and return it as unicode."""
if not isinstance(value, six.string_types):
raise TypeError(u'Expected string value, got: {}'.format(value))
return six.text_type(value) | python | def ensure_unicode_string(value):
"""Ensure the value is a string, and return it as unicode."""
if not isinstance(value, six.string_types):
raise TypeError(u'Expected string value, got: {}'.format(value))
return six.text_type(value) | [
"def",
"ensure_unicode_string",
"(",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"six",
".",
"string_types",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected string value, got: {}'",
".",
"format",
"(",
"value",
")",
")",
"return",
"six",
... | Ensure the value is a string, and return it as unicode. | [
"Ensure",
"the",
"value",
"is",
"a",
"string",
"and",
"return",
"it",
"as",
"unicode",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L140-L144 | train | 227,786 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | get_uniquely_named_objects_by_name | def get_uniquely_named_objects_by_name(object_list):
"""Return dict of name -> object pairs from a list of objects with unique names.
Args:
object_list: list of objects, each X of which has a unique name accessible as X.name.value
Returns:
dict, { X.name.value: X for x in object_list }
If the list is empty or None, returns an empty dict.
"""
if not object_list:
return dict()
result = dict()
for obj in object_list:
name = obj.name.value
if name in result:
raise GraphQLCompilationError(u'Found duplicate object key: '
u'{} {}'.format(name, object_list))
result[name] = obj
return result | python | def get_uniquely_named_objects_by_name(object_list):
"""Return dict of name -> object pairs from a list of objects with unique names.
Args:
object_list: list of objects, each X of which has a unique name accessible as X.name.value
Returns:
dict, { X.name.value: X for x in object_list }
If the list is empty or None, returns an empty dict.
"""
if not object_list:
return dict()
result = dict()
for obj in object_list:
name = obj.name.value
if name in result:
raise GraphQLCompilationError(u'Found duplicate object key: '
u'{} {}'.format(name, object_list))
result[name] = obj
return result | [
"def",
"get_uniquely_named_objects_by_name",
"(",
"object_list",
")",
":",
"if",
"not",
"object_list",
":",
"return",
"dict",
"(",
")",
"result",
"=",
"dict",
"(",
")",
"for",
"obj",
"in",
"object_list",
":",
"name",
"=",
"obj",
".",
"name",
".",
"value",
... | Return dict of name -> object pairs from a list of objects with unique names.
Args:
object_list: list of objects, each X of which has a unique name accessible as X.name.value
Returns:
dict, { X.name.value: X for x in object_list }
If the list is empty or None, returns an empty dict. | [
"Return",
"dict",
"of",
"name",
"-",
">",
"object",
"pairs",
"from",
"a",
"list",
"of",
"objects",
"with",
"unique",
"names",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L147-L168 | train | 227,787 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | validate_safe_string | def validate_safe_string(value):
"""Ensure the provided string does not have illegal characters."""
# The following strings are explicitly allowed, despite having otherwise-illegal chars.
legal_strings_with_special_chars = frozenset({'@rid', '@class', '@this', '%'})
if not isinstance(value, six.string_types):
raise TypeError(u'Expected string value, got: {} {}'.format(
type(value).__name__, value))
if not value:
raise GraphQLCompilationError(u'Empty strings are not allowed!')
if value[0] in string.digits:
raise GraphQLCompilationError(u'String values cannot start with a digit: {}'.format(value))
if not set(value).issubset(VARIABLE_ALLOWED_CHARS) and \
value not in legal_strings_with_special_chars:
raise GraphQLCompilationError(u'Encountered illegal characters in string: {}'.format(value)) | python | def validate_safe_string(value):
"""Ensure the provided string does not have illegal characters."""
# The following strings are explicitly allowed, despite having otherwise-illegal chars.
legal_strings_with_special_chars = frozenset({'@rid', '@class', '@this', '%'})
if not isinstance(value, six.string_types):
raise TypeError(u'Expected string value, got: {} {}'.format(
type(value).__name__, value))
if not value:
raise GraphQLCompilationError(u'Empty strings are not allowed!')
if value[0] in string.digits:
raise GraphQLCompilationError(u'String values cannot start with a digit: {}'.format(value))
if not set(value).issubset(VARIABLE_ALLOWED_CHARS) and \
value not in legal_strings_with_special_chars:
raise GraphQLCompilationError(u'Encountered illegal characters in string: {}'.format(value)) | [
"def",
"validate_safe_string",
"(",
"value",
")",
":",
"# The following strings are explicitly allowed, despite having otherwise-illegal chars.",
"legal_strings_with_special_chars",
"=",
"frozenset",
"(",
"{",
"'@rid'",
",",
"'@class'",
",",
"'@this'",
",",
"'%'",
"}",
")",
... | Ensure the provided string does not have illegal characters. | [
"Ensure",
"the",
"provided",
"string",
"does",
"not",
"have",
"illegal",
"characters",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L177-L194 | train | 227,788 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | validate_edge_direction | def validate_edge_direction(edge_direction):
"""Ensure the provided edge direction is either "in" or "out"."""
if not isinstance(edge_direction, six.string_types):
raise TypeError(u'Expected string edge_direction, got: {} {}'.format(
type(edge_direction), edge_direction))
if edge_direction not in ALLOWED_EDGE_DIRECTIONS:
raise ValueError(u'Unrecognized edge direction: {}'.format(edge_direction)) | python | def validate_edge_direction(edge_direction):
"""Ensure the provided edge direction is either "in" or "out"."""
if not isinstance(edge_direction, six.string_types):
raise TypeError(u'Expected string edge_direction, got: {} {}'.format(
type(edge_direction), edge_direction))
if edge_direction not in ALLOWED_EDGE_DIRECTIONS:
raise ValueError(u'Unrecognized edge direction: {}'.format(edge_direction)) | [
"def",
"validate_edge_direction",
"(",
"edge_direction",
")",
":",
"if",
"not",
"isinstance",
"(",
"edge_direction",
",",
"six",
".",
"string_types",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected string edge_direction, got: {} {}'",
".",
"format",
"(",
"type",
"(... | Ensure the provided edge direction is either "in" or "out". | [
"Ensure",
"the",
"provided",
"edge",
"direction",
"is",
"either",
"in",
"or",
"out",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L205-L212 | train | 227,789 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | validate_marked_location | def validate_marked_location(location):
"""Validate that a Location object is safe for marking, and not at a field."""
if not isinstance(location, (Location, FoldScopeLocation)):
raise TypeError(u'Expected Location or FoldScopeLocation location, got: {} {}'.format(
type(location).__name__, location))
if location.field is not None:
raise GraphQLCompilationError(u'Cannot mark location at a field: {}'.format(location)) | python | def validate_marked_location(location):
"""Validate that a Location object is safe for marking, and not at a field."""
if not isinstance(location, (Location, FoldScopeLocation)):
raise TypeError(u'Expected Location or FoldScopeLocation location, got: {} {}'.format(
type(location).__name__, location))
if location.field is not None:
raise GraphQLCompilationError(u'Cannot mark location at a field: {}'.format(location)) | [
"def",
"validate_marked_location",
"(",
"location",
")",
":",
"if",
"not",
"isinstance",
"(",
"location",
",",
"(",
"Location",
",",
"FoldScopeLocation",
")",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected Location or FoldScopeLocation location, got: {} {}'",
".",
"... | Validate that a Location object is safe for marking, and not at a field. | [
"Validate",
"that",
"a",
"Location",
"object",
"is",
"safe",
"for",
"marking",
"and",
"not",
"at",
"a",
"field",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L215-L222 | train | 227,790 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/helpers.py | invert_dict | def invert_dict(invertible_dict):
"""Invert a dict. A dict is invertible if values are unique and hashable."""
inverted = {}
for k, v in six.iteritems(invertible_dict):
if not isinstance(v, Hashable):
raise TypeError(u'Expected an invertible dict, but value at key {} has type {}'.format(
k, type(v).__name__))
if v in inverted:
raise TypeError(u'Expected an invertible dict, but keys '
u'{} and {} map to the same value'.format(
inverted[v], k))
inverted[v] = k
return inverted | python | def invert_dict(invertible_dict):
"""Invert a dict. A dict is invertible if values are unique and hashable."""
inverted = {}
for k, v in six.iteritems(invertible_dict):
if not isinstance(v, Hashable):
raise TypeError(u'Expected an invertible dict, but value at key {} has type {}'.format(
k, type(v).__name__))
if v in inverted:
raise TypeError(u'Expected an invertible dict, but keys '
u'{} and {} map to the same value'.format(
inverted[v], k))
inverted[v] = k
return inverted | [
"def",
"invert_dict",
"(",
"invertible_dict",
")",
":",
"inverted",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"six",
".",
"iteritems",
"(",
"invertible_dict",
")",
":",
"if",
"not",
"isinstance",
"(",
"v",
",",
"Hashable",
")",
":",
"raise",
"TypeErro... | Invert a dict. A dict is invertible if values are unique and hashable. | [
"Invert",
"a",
"dict",
".",
"A",
"dict",
"is",
"invertible",
"if",
"values",
"are",
"unique",
"and",
"hashable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L230-L242 | train | 227,791 |
kensho-technologies/graphql-compiler | setup.py | read_file | def read_file(filename):
"""Read package file as text to get name and version"""
# intentionally *not* adding an encoding option to open
# see here:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, 'graphql_compiler', filename), 'r') as f:
return f.read() | python | def read_file(filename):
"""Read package file as text to get name and version"""
# intentionally *not* adding an encoding option to open
# see here:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, 'graphql_compiler', filename), 'r') as f:
return f.read() | [
"def",
"read_file",
"(",
"filename",
")",
":",
"# intentionally *not* adding an encoding option to open",
"# see here:",
"# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690",
"here",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"dir... | Read package file as text to get name and version | [
"Read",
"package",
"file",
"as",
"text",
"to",
"get",
"name",
"and",
"version"
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/setup.py#L13-L20 | train | 227,792 |
kensho-technologies/graphql-compiler | setup.py | find_version | def find_version():
"""Only define version in one place"""
version_file = read_file('__init__.py')
version_match = re.search(r'^__version__ = ["\']([^"\']*)["\']',
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.') | python | def find_version():
"""Only define version in one place"""
version_file = read_file('__init__.py')
version_match = re.search(r'^__version__ = ["\']([^"\']*)["\']',
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.') | [
"def",
"find_version",
"(",
")",
":",
"version_file",
"=",
"read_file",
"(",
"'__init__.py'",
")",
"version_match",
"=",
"re",
".",
"search",
"(",
"r'^__version__ = [\"\\']([^\"\\']*)[\"\\']'",
",",
"version_file",
",",
"re",
".",
"M",
")",
"if",
"version_match",
... | Only define version in one place | [
"Only",
"define",
"version",
"in",
"one",
"place"
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/setup.py#L23-L30 | train | 227,793 |
kensho-technologies/graphql-compiler | setup.py | find_name | def find_name():
"""Only define name in one place"""
name_file = read_file('__init__.py')
name_match = re.search(r'^__package_name__ = ["\']([^"\']*)["\']',
name_file, re.M)
if name_match:
return name_match.group(1)
raise RuntimeError('Unable to find name string.') | python | def find_name():
"""Only define name in one place"""
name_file = read_file('__init__.py')
name_match = re.search(r'^__package_name__ = ["\']([^"\']*)["\']',
name_file, re.M)
if name_match:
return name_match.group(1)
raise RuntimeError('Unable to find name string.') | [
"def",
"find_name",
"(",
")",
":",
"name_file",
"=",
"read_file",
"(",
"'__init__.py'",
")",
"name_match",
"=",
"re",
".",
"search",
"(",
"r'^__package_name__ = [\"\\']([^\"\\']*)[\"\\']'",
",",
"name_file",
",",
"re",
".",
"M",
")",
"if",
"name_match",
":",
"... | Only define name in one place | [
"Only",
"define",
"name",
"in",
"one",
"place"
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/setup.py#L33-L40 | train | 227,794 |
kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_class_with_while.py | workaround_type_coercions_in_recursions | def workaround_type_coercions_in_recursions(match_query):
"""Lower CoerceType blocks into Filter blocks within Recurse steps."""
# This step is required to work around an OrientDB bug that causes queries with both
# "while:" and "class:" in the same query location to fail to parse correctly.
#
# This bug is reported upstream: https://github.com/orientechnologies/orientdb/issues/8129
#
# Instead of "class:", we use "INSTANCEOF" in the "where:" clause to get correct behavior.
# However, we don't want to switch all coercions to this format, since the "class:" clause
# provides valuable info to the MATCH query scheduler about how to schedule efficiently.
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_match_step = match_step
has_coerce_type = match_step.coerce_type_block is not None
has_recurse_root = isinstance(match_step.root_block, Recurse)
if has_coerce_type and has_recurse_root:
new_where_block = convert_coerce_type_and_add_to_where_block(
match_step.coerce_type_block, match_step.where_block)
new_match_step = match_step._replace(coerce_type_block=None,
where_block=new_where_block)
new_traversal.append(new_match_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | python | def workaround_type_coercions_in_recursions(match_query):
"""Lower CoerceType blocks into Filter blocks within Recurse steps."""
# This step is required to work around an OrientDB bug that causes queries with both
# "while:" and "class:" in the same query location to fail to parse correctly.
#
# This bug is reported upstream: https://github.com/orientechnologies/orientdb/issues/8129
#
# Instead of "class:", we use "INSTANCEOF" in the "where:" clause to get correct behavior.
# However, we don't want to switch all coercions to this format, since the "class:" clause
# provides valuable info to the MATCH query scheduler about how to schedule efficiently.
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_match_step = match_step
has_coerce_type = match_step.coerce_type_block is not None
has_recurse_root = isinstance(match_step.root_block, Recurse)
if has_coerce_type and has_recurse_root:
new_where_block = convert_coerce_type_and_add_to_where_block(
match_step.coerce_type_block, match_step.where_block)
new_match_step = match_step._replace(coerce_type_block=None,
where_block=new_where_block)
new_traversal.append(new_match_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | [
"def",
"workaround_type_coercions_in_recursions",
"(",
"match_query",
")",
":",
"# This step is required to work around an OrientDB bug that causes queries with both",
"# \"while:\" and \"class:\" in the same query location to fail to parse correctly.",
"#",
"# This bug is reported upstream: https... | Lower CoerceType blocks into Filter blocks within Recurse steps. | [
"Lower",
"CoerceType",
"blocks",
"into",
"Filter",
"blocks",
"within",
"Recurse",
"steps",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_class_with_while.py#L11-L42 | train | 227,795 |
kensho-technologies/graphql-compiler | graphql_compiler/tool.py | main | def main():
"""Read a GraphQL query from standard input, and output it pretty-printed to standard output."""
query = ' '.join(sys.stdin.readlines())
sys.stdout.write(pretty_print_graphql(query)) | python | def main():
"""Read a GraphQL query from standard input, and output it pretty-printed to standard output."""
query = ' '.join(sys.stdin.readlines())
sys.stdout.write(pretty_print_graphql(query)) | [
"def",
"main",
"(",
")",
":",
"query",
"=",
"' '",
".",
"join",
"(",
"sys",
".",
"stdin",
".",
"readlines",
"(",
")",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"pretty_print_graphql",
"(",
"query",
")",
")"
] | Read a GraphQL query from standard input, and output it pretty-printed to standard output. | [
"Read",
"a",
"GraphQL",
"query",
"from",
"standard",
"input",
"and",
"output",
"it",
"pretty",
"-",
"printed",
"to",
"standard",
"output",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/tool.py#L12-L16 | train | 227,796 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/gremlin_formatting.py | _safe_gremlin_string | def _safe_gremlin_string(value):
"""Sanitize and represent a string argument in Gremlin."""
if not isinstance(value, six.string_types):
if isinstance(value, bytes): # should only happen in py3
value = value.decode('utf-8')
else:
raise GraphQLInvalidArgumentError(u'Attempting to convert a non-string into a string: '
u'{}'.format(value))
# Using JSON encoding means that all unicode literals and special chars
# (e.g. newlines and backslashes) are replaced by appropriate escape sequences.
# However, the quoted result is wrapped in double quotes, and $ signs are not escaped,
# so that would allow arbitrary code execution in Gremlin.
# We will therefore turn the double-quoted string into a single-quoted one to avoid this risk.
escaped_and_quoted = json.dumps(value)
# Double-quoted string literals in Gremlin/Groovy allow
# arbitrary code execution via string interpolation and closures.
# To avoid this, we perform the following steps:
# - we strip the wrapping double quotes;
# - we un-escape any double-quotes in the string, by replacing \" with ";
# - we escape any single-quotes in the string, by replacing ' with \';
# - finally, we wrap the string in single quotes.
# http://www.groovy-lang.org/syntax.html#_double_quoted_string
if not escaped_and_quoted[0] == escaped_and_quoted[-1] == '"':
raise AssertionError(u'Unreachable state reached: {} {}'.format(value, escaped_and_quoted))
no_quotes = escaped_and_quoted[1:-1]
re_escaped = no_quotes.replace('\\"', '"').replace('\'', '\\\'')
final_escaped_value = '\'' + re_escaped + '\''
return final_escaped_value | python | def _safe_gremlin_string(value):
"""Sanitize and represent a string argument in Gremlin."""
if not isinstance(value, six.string_types):
if isinstance(value, bytes): # should only happen in py3
value = value.decode('utf-8')
else:
raise GraphQLInvalidArgumentError(u'Attempting to convert a non-string into a string: '
u'{}'.format(value))
# Using JSON encoding means that all unicode literals and special chars
# (e.g. newlines and backslashes) are replaced by appropriate escape sequences.
# However, the quoted result is wrapped in double quotes, and $ signs are not escaped,
# so that would allow arbitrary code execution in Gremlin.
# We will therefore turn the double-quoted string into a single-quoted one to avoid this risk.
escaped_and_quoted = json.dumps(value)
# Double-quoted string literals in Gremlin/Groovy allow
# arbitrary code execution via string interpolation and closures.
# To avoid this, we perform the following steps:
# - we strip the wrapping double quotes;
# - we un-escape any double-quotes in the string, by replacing \" with ";
# - we escape any single-quotes in the string, by replacing ' with \';
# - finally, we wrap the string in single quotes.
# http://www.groovy-lang.org/syntax.html#_double_quoted_string
if not escaped_and_quoted[0] == escaped_and_quoted[-1] == '"':
raise AssertionError(u'Unreachable state reached: {} {}'.format(value, escaped_and_quoted))
no_quotes = escaped_and_quoted[1:-1]
re_escaped = no_quotes.replace('\\"', '"').replace('\'', '\\\'')
final_escaped_value = '\'' + re_escaped + '\''
return final_escaped_value | [
"def",
"_safe_gremlin_string",
"(",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"six",
".",
"string_types",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"bytes",
")",
":",
"# should only happen in py3",
"value",
"=",
"value",
".",
... | Sanitize and represent a string argument in Gremlin. | [
"Sanitize",
"and",
"represent",
"a",
"string",
"argument",
"in",
"Gremlin",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/gremlin_formatting.py#L18-L48 | train | 227,797 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/gremlin_formatting.py | _safe_gremlin_list | def _safe_gremlin_list(inner_type, argument_value):
"""Represent the list of "inner_type" objects in Gremlin form."""
if not isinstance(argument_value, list):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-list as a list: '
u'{}'.format(argument_value))
stripped_type = strip_non_null_from_type(inner_type)
components = (
_safe_gremlin_argument(stripped_type, x)
for x in argument_value
)
return u'[' + u','.join(components) + u']' | python | def _safe_gremlin_list(inner_type, argument_value):
"""Represent the list of "inner_type" objects in Gremlin form."""
if not isinstance(argument_value, list):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-list as a list: '
u'{}'.format(argument_value))
stripped_type = strip_non_null_from_type(inner_type)
components = (
_safe_gremlin_argument(stripped_type, x)
for x in argument_value
)
return u'[' + u','.join(components) + u']' | [
"def",
"_safe_gremlin_list",
"(",
"inner_type",
",",
"argument_value",
")",
":",
"if",
"not",
"isinstance",
"(",
"argument_value",
",",
"list",
")",
":",
"raise",
"GraphQLInvalidArgumentError",
"(",
"u'Attempting to represent a non-list as a list: '",
"u'{}'",
".",
"for... | Represent the list of "inner_type" objects in Gremlin form. | [
"Represent",
"the",
"list",
"of",
"inner_type",
"objects",
"in",
"Gremlin",
"form",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/gremlin_formatting.py#L81-L92 | train | 227,798 |
kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/gremlin_formatting.py | _safe_gremlin_argument | def _safe_gremlin_argument(expected_type, argument_value):
"""Return a Gremlin string representing the given argument value."""
if GraphQLString.is_same_type(expected_type):
return _safe_gremlin_string(argument_value)
elif GraphQLID.is_same_type(expected_type):
# IDs can be strings or numbers, but the GraphQL library coerces them to strings.
# We will follow suit and treat them as strings.
if not isinstance(argument_value, six.string_types):
if isinstance(argument_value, bytes): # should only happen in py3
argument_value = argument_value.decode('utf-8')
else:
argument_value = six.text_type(argument_value)
return _safe_gremlin_string(argument_value)
elif GraphQLFloat.is_same_type(expected_type):
return represent_float_as_str(argument_value)
elif GraphQLInt.is_same_type(expected_type):
# Special case: in Python, isinstance(True, int) returns True.
# Safeguard against this with an explicit check against bool type.
if isinstance(argument_value, bool):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-int as an int: '
u'{}'.format(argument_value))
return type_check_and_str(int, argument_value)
elif GraphQLBoolean.is_same_type(expected_type):
return type_check_and_str(bool, argument_value)
elif GraphQLDecimal.is_same_type(expected_type):
return _safe_gremlin_decimal(argument_value)
elif GraphQLDate.is_same_type(expected_type):
return _safe_gremlin_date_and_datetime(expected_type, (datetime.date,), argument_value)
elif GraphQLDateTime.is_same_type(expected_type):
return _safe_gremlin_date_and_datetime(expected_type,
(datetime.datetime, arrow.Arrow), argument_value)
elif isinstance(expected_type, GraphQLList):
return _safe_gremlin_list(expected_type.of_type, argument_value)
else:
raise AssertionError(u'Could not safely represent the requested GraphQL type: '
u'{} {}'.format(expected_type, argument_value)) | python | def _safe_gremlin_argument(expected_type, argument_value):
"""Return a Gremlin string representing the given argument value."""
if GraphQLString.is_same_type(expected_type):
return _safe_gremlin_string(argument_value)
elif GraphQLID.is_same_type(expected_type):
# IDs can be strings or numbers, but the GraphQL library coerces them to strings.
# We will follow suit and treat them as strings.
if not isinstance(argument_value, six.string_types):
if isinstance(argument_value, bytes): # should only happen in py3
argument_value = argument_value.decode('utf-8')
else:
argument_value = six.text_type(argument_value)
return _safe_gremlin_string(argument_value)
elif GraphQLFloat.is_same_type(expected_type):
return represent_float_as_str(argument_value)
elif GraphQLInt.is_same_type(expected_type):
# Special case: in Python, isinstance(True, int) returns True.
# Safeguard against this with an explicit check against bool type.
if isinstance(argument_value, bool):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-int as an int: '
u'{}'.format(argument_value))
return type_check_and_str(int, argument_value)
elif GraphQLBoolean.is_same_type(expected_type):
return type_check_and_str(bool, argument_value)
elif GraphQLDecimal.is_same_type(expected_type):
return _safe_gremlin_decimal(argument_value)
elif GraphQLDate.is_same_type(expected_type):
return _safe_gremlin_date_and_datetime(expected_type, (datetime.date,), argument_value)
elif GraphQLDateTime.is_same_type(expected_type):
return _safe_gremlin_date_and_datetime(expected_type,
(datetime.datetime, arrow.Arrow), argument_value)
elif isinstance(expected_type, GraphQLList):
return _safe_gremlin_list(expected_type.of_type, argument_value)
else:
raise AssertionError(u'Could not safely represent the requested GraphQL type: '
u'{} {}'.format(expected_type, argument_value)) | [
"def",
"_safe_gremlin_argument",
"(",
"expected_type",
",",
"argument_value",
")",
":",
"if",
"GraphQLString",
".",
"is_same_type",
"(",
"expected_type",
")",
":",
"return",
"_safe_gremlin_string",
"(",
"argument_value",
")",
"elif",
"GraphQLID",
".",
"is_same_type",
... | Return a Gremlin string representing the given argument value. | [
"Return",
"a",
"Gremlin",
"string",
"representing",
"the",
"given",
"argument",
"value",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/gremlin_formatting.py#L95-L131 | train | 227,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.