INSTRUCTION
stringlengths 1
46.3k
| RESPONSE
stringlengths 75
80.2k
|
|---|---|
Finaliza la seleccion.
Marca como seleccionados todos los objetos que se encuentran
dentro del recuadro de seleccion.
|
def _finish_selecting(self, event):
"""Finaliza la seleccion.
Marca como seleccionados todos los objetos que se encuentran
dentro del recuadro de seleccion."""
self._selecting = False
canvas = self._canvas
x = canvas.canvasx(event.x)
y = canvas.canvasy(event.y)
canvas.coords(self._sobject, -1, -1, -1, -1)
canvas.itemconfigure(self._sobject, state=tk.HIDDEN)
sel_region = self._sstart[0], self._sstart[1], x, y
canvas.region_selected = sel_region
canvas.event_generate('<<RegionSelected>>')
|
Create a callback to manage mousewheel events
orient: string (posible values: ('x', 'y'))
widget: widget that implement tk xview and yview methods
|
def make_onmousewheel_cb(widget, orient, factor = 1):
"""Create a callback to manage mousewheel events
orient: string (posible values: ('x', 'y'))
widget: widget that implement tk xview and yview methods
"""
_os = platform.system()
view_command = getattr(widget, orient+'view')
if _os in ('Linux', 'OpenBSD', 'FreeBSD'):
def on_mousewheel(event):
if event.num == 4:
view_command('scroll', (-1)*factor, 'units')
elif event.num == 5:
view_command('scroll', factor, 'units')
elif _os == 'Windows':
def on_mousewheel(event):
view_command('scroll', (-1)*int((event.delta/120)*factor), 'units')
elif _os == 'Darwin':
def on_mousewheel(event):
view_command('scroll', event.delta, 'units')
else:
# FIXME: unknown platform scroll method
def on_mousewheel(event):
pass
return on_mousewheel
|
- json list of key, value pairs:
Example: [["A", "Option 1 Label"], ["B", "Option 2 Label"]]
- space separated string with a list of options:
Example: "Option1 Opt2 Opt3"
will be converted to a key, value pair of the following form:
(("Option1", "Option1), ("Opt2", "Opt2"), ("Opt3", "Opt3"))
- an iterable of key, value pairs
|
def __obj2choices(self, values):
"""
- json list of key, value pairs:
Example: [["A", "Option 1 Label"], ["B", "Option 2 Label"]]
- space separated string with a list of options:
Example: "Option1 Opt2 Opt3"
will be converted to a key, value pair of the following form:
(("Option1", "Option1), ("Opt2", "Opt2"), ("Opt3", "Opt3"))
- an iterable of key, value pairs
"""
choices = values
# choices from string
if type(values) == type(''):
obj = None
# try json string
try:
obj = json.loads(values)
except ValueError:
obj = values
if type(obj) == type([]):
choices = obj
else:
choices = obj.split()
return choices
|
choices: iterable of key, value pairs
|
def __choices2tkvalues(self, choices):
"""choices: iterable of key, value pairs"""
values = []
for k, v in choices:
values.append(v)
return values
|
Generate coords for a matrix of rects
|
def matrix_coords(rows, cols, rowh, colw, ox=0, oy=0):
"Generate coords for a matrix of rects"
for i, f, c in rowmajor(rows, cols):
x = ox + c * colw
y = oy + f * rowh
x1 = x + colw
y1 = y + rowh
yield (i, x, y, x1, y1)
|
Marks the specified month day with a visual marker
(typically by making the number bold).
If only day is specified and the calendar month and year
are changed, the marked day remain marked.
You can be more specific setting month and year parameters.
|
def mark_day(self, day, month=None, year=None):
"""Marks the specified month day with a visual marker
(typically by making the number bold).
If only day is specified and the calendar month and year
are changed, the marked day remain marked.
You can be more specific setting month and year parameters.
"""
self._remark_date(day, month, year, highlight=True)
|
Draws calendar.
|
def _draw_calendar(self, canvas, redraw=False):
"""Draws calendar."""
options = self.__options
# Update labels:
name = self._cal.formatmonthname(self._date.year, self._date.month, 0,
withyear=False)
self._lmonth.configure(text=name.title())
self._lyear.configure(text=str(self._date.year))
# Update calendar
ch = canvas.winfo_height()
cw = canvas.winfo_width()
rowh = ch / 7.0
colw = cw / 7.0
# Header background
if self._rheader is None:
self._rheader = canvas.create_rectangle(0, 0, cw, rowh, width=0,
fill=options['headerbg'])
else:
canvas.itemconfigure(self._rheader, fill=options['headerbg'])
canvas.coords(self._rheader, 0, 0, cw, rowh)
# Header text
ox = 0
oy = rowh / 2.0
coffset = colw / 2.0
cols = self._cal.formatweekheader(3).split()
for i in range(0, 7):
x = ox + i * colw + coffset
if redraw:
item = self._theader[i]
canvas.coords(item, x, oy)
canvas.itemconfigure(item, text=cols[i],
fill=options['headerfg'])
else:
self._theader[i] = canvas.create_text(x, oy, text=cols[i],
fill=options['headerbg'])
# background matrix
oy = rowh
ox = 0
for i, x, y, x1, y1 in matrix_coords(6, 7, rowh, colw, ox, oy):
x1 -= 1
y1 -= 1
if redraw:
rec = self._recmat[i]
canvas.coords(rec, x, y, x1, y1)
canvas.itemconfigure(rec, fill=options['calendarbg'])
else:
rec = canvas.create_rectangle(x, y, x1, y1, width=1,
fill=options['calendarbg'],
outline=options['calendarbg'],
activeoutline=options['selectbg'],
activewidth=1, tags='cell')
self._recmat[i] = rec
# text matrix
weeks = self._weeks
xoffset = colw / 2.0
yoffset = rowh / 2.0
oy = rowh
ox = 0
for i, x, y, x1, y1 in matrix_coords(6, 7, rowh, colw, ox, oy):
x += coffset
y += yoffset
# day text
txt = ""
f, c = i2rc(i, 7)
if f < len(weeks):
day = weeks[f][c]
txt = "{0}".format(day) if day != 0 else ""
if redraw:
item = self._txtmat[i]
canvas.coords(item, x, y)
canvas.itemconfigure(item, text=txt)
else:
self._txtmat[i] = canvas.create_text(x, y, text=txt,
state=tk.DISABLED)
# Mark days
self._mark_days()
|
Return a dictionary that represents the Tcl array
|
def get(self):
'''Return a dictionary that represents the Tcl array'''
value = {}
for (elementname, elementvar) in self._elementvars.items():
value[elementname] = elementvar.get()
return value
|
Update inplace widgets position when doing vertical scroll
|
def yview(self, *args):
"""Update inplace widgets position when doing vertical scroll"""
self.after_idle(self.__updateWnds)
ttk.Treeview.yview(self, *args)
|
Update inplace widgets position when doing horizontal scroll
|
def xview(self, *args):
"""Update inplace widgets position when doing horizontal scroll"""
self.after_idle(self.__updateWnds)
ttk.Treeview.xview(self, *args)
|
Checks if the focus has changed
|
def __check_focus(self, event):
"""Checks if the focus has changed"""
#print('Event:', event.type, event.x, event.y)
changed = False
if not self._curfocus:
changed = True
elif self._curfocus != self.focus():
self.__clear_inplace_widgets()
changed = True
newfocus = self.focus()
if changed:
if newfocus:
#print('Focus changed to:', newfocus)
self._curfocus= newfocus
self.__focus(newfocus)
self.__updateWnds()
|
Called when focus item has changed
|
def __focus(self, item):
"""Called when focus item has changed"""
cols = self.__get_display_columns()
for col in cols:
self.__event_info =(col,item)
self.event_generate('<<TreeviewInplaceEdit>>')
if col in self._inplace_widgets:
w = self._inplace_widgets[col]
w.bind('<Key-Tab>',
lambda e: w.tk_focusNext().focus_set())
w.bind('<Shift-Key-Tab>',
lambda e: w.tk_focusPrev().focus_set())
|
Remove all inplace edit widgets.
|
def __clear_inplace_widgets(self):
"""Remove all inplace edit widgets."""
cols = self.__get_display_columns()
#print('Clear:', cols)
for c in cols:
if c in self._inplace_widgets:
widget = self._inplace_widgets[c]
widget.place_forget()
self._inplace_widgets_show.pop(c, None)
|
Run parent install, and then save the install dir in the script.
|
def run(self):
"""Run parent install, and then save the install dir in the script."""
install.run(self)
#
# Remove old pygubu.py from scripts path if exists
spath = os.path.join(self.install_scripts, 'pygubu')
for ext in ('.py', '.pyw'):
filename = spath + ext
if os.path.exists(filename):
os.remove(filename)
#
# Remove old pygubu-designer.bat
if platform.system() == 'Windows':
spath = os.path.join(self.install_scripts, 'pygubu-designer.bat')
if os.path.exists(spath):
os.remove(spath)
|
Populate a frame with a list of all editable properties
|
def _create_properties(self):
"""Populate a frame with a list of all editable properties"""
self._rcbag = {} # bag for row/column prop editors
self._fgrid = f = ttk.Labelframe(self._sframe.innerframe,
text=_('Grid options:'), padding=5)
f.grid(sticky='nswe')
# hack to resize correctly when properties are hidden
label = ttk.Label(f)
label.grid()
label_tpl = "{0}:"
row = 0
col = 0
groups = (
('00', properties.GRID_PROPERTIES, properties.LAYOUT_OPTIONS),
)
for gcode, plist, propdescr in groups:
for name in plist:
kwdata = propdescr[name]
labeltext = label_tpl.format(name)
label = ttk.Label(self._fgrid, text=labeltext, anchor=tk.W)
label.grid(row=row, column=col, sticky=tk.EW, pady=2)
widget = self._create_editor(self._fgrid, name, kwdata)
widget.grid(row=row, column=col+1, sticky=tk.EW, pady=2)
row += 1
self._propbag[gcode+name] = (label, widget)
# Grid row/col properties
#labels
gcode = '01'
self._fgrc = fgrc = ttk.LabelFrame(self._sframe.innerframe,
text=_('Grid row/column options:'),
padding=5)
fgrc.grid(row=1, column=0, sticky=tk.NSEW, pady='10 0')
#hack to resize correctly when properties are hidden
label = ttk.Label(fgrc)
label.grid()
row = col = 0
icol = 1
headers = []
for pname in properties.GRID_RC_PROPERTIES:
label = ttk.Label(fgrc, text=pname)
label.grid(row=row, column=icol)
headers.append(label)
icol += 1
self._internal = {'grc_headers': headers}
#
name_format = '{}_{}_{}' # {row/column}_{number}_{name}
MAX_RC = 50
#rowconfig
row += 1
trow_label = _('Row {0}:')
tcol_label = _('Column {0}:')
for index in range(0, MAX_RC):
labeltext = trow_label.format(index)
label = ttk.Label(fgrc, text=labeltext)
label.grid(row=row, column=0)
labeltext = tcol_label.format(index)
labelc = ttk.Label(fgrc, text=labeltext)
labelc.grid(row=row + MAX_RC, column=0, sticky=tk.E, pady=2)
icol = 1
for pname in properties.GRID_RC_PROPERTIES:
kwdata = properties.LAYOUT_OPTIONS[pname]
alias = name_format.format('row', index, pname)
widget = self._create_editor(fgrc, alias, kwdata)
widget.grid(row=row, column=icol, pady=2, sticky='ew')
self._rcbag[alias] = (label, widget)
alias = name_format.format('column', index, pname)
widget = self._create_editor(fgrc, alias, kwdata)
widget.grid(row=row + MAX_RC, column=icol, pady=2, sticky='ew')
self._rcbag[alias] = (labelc, widget)
icol += 1
row += 1
|
Populate a frame with a list of all editable properties
|
def _create_properties(self):
"""Populate a frame with a list of all editable properties"""
self._frame = f = ttk.Labelframe(self._sframe.innerframe,
text=_('Widget properties'))
f.grid(sticky='nswe')
label_tpl = "{0}:"
row = 0
col = 0
groups = (
('00', _('Required'), properties.WIDGET_REQUIRED_OPTIONS,
properties.REQUIRED_OPTIONS),
('01', _('Standard'), properties.WIDGET_STANDARD_OPTIONS,
properties.TK_WIDGET_OPTIONS),
('02', _('Specific'), properties.WIDGET_SPECIFIC_OPTIONS,
properties.TK_WIDGET_OPTIONS),
('03', _('Custom'), properties.WIDGET_CUSTOM_OPTIONS,
properties.CUSTOM_OPTIONS),
)
for gcode, gname, plist, propdescr in groups:
padding = '0 0 0 5' if row == 0 else '0 5 0 5'
label = ttk.Label(self._frame, text=gname,
font='TkDefaultFont 10 bold', padding=padding,
foreground='#000059')
label.grid(row=row, column=0, sticky='we', columnspan=2)
row += 1
for name in plist:
kwdata = propdescr[name]
labeltext = label_tpl.format(name)
label = ttk.Label(self._frame, text=labeltext, anchor=tk.W)
label.grid(row=row, column=col, sticky=tk.EW, pady=2)
widget = self._create_editor(self._frame, name, kwdata)
widget.grid(row=row, column=col+1, sticky=tk.EW, pady=2)
row += 1
self._propbag[gcode+name] = (label, widget)
logger.debug('Created property: {0}-{1}'.format(gname,name))
|
Hide all properties from property editor.
|
def hide_all(self):
"""Hide all properties from property editor."""
self.current = None
for _v, (label, widget) in self._propbag.items():
label.grid_remove()
widget.grid_remove()
|
Creates dict with properties marked as readonly
|
def _get_init_args(self):
"""Creates dict with properties marked as readonly"""
args = {}
for rop in self.ro_properties:
if rop in self.properties:
args[rop] = self.properties[rop]
return args
|
Calculate menu widht and height.
|
def _calculate_menu_wh(self):
""" Calculate menu widht and height."""
w = iw = 50
h = ih = 0
# menu.index returns None if there are no choices
index = self._menu.index(tk.END)
index = index if index is not None else 0
count = index + 1
# First calculate using the font paramters of root menu:
font = self._menu.cget('font')
font = self._get_font(font)
for i in range(0, count):
mtype = self._menu.type(i)
if mtype == 'tearoff':
continue
label = 'default'
ifont = 'TkMenuFont'
if mtype != 'separator':
label = self._menu.entrycget(i, 'label')
ifont = self._menu.entrycget(i, 'font')
wpx = font.measure(label)
hpx = font.metrics('linespace')
w += wpx
if hpx > h:
h = hpx * 2
# Calculate using font configured for each subitem
ifont = self._get_font(ifont)
wpx = ifont.measure(label)
hpx = ifont.metrics('linespace')
iw += wpx
if hpx > ih:
ih = hpx * 2
# Then compare 2 sizes and use the greatest
w = max(w, iw, 100)
h = max(h, ih, 25)
self._cwidth = w + int(w * 0.25)
self._cheight = h + int(h * 0.25)
|
Returns True if mouse is over a resizer
|
def _over_resizer(self, x, y):
"Returns True if mouse is over a resizer"
over_resizer = False
c = self.canvas
ids = c.find_overlapping(x, y, x, y)
if ids:
o = ids[0]
tags = c.gettags(o)
if 'resizer' in tags:
over_resizer = True
return over_resizer
|
Resizes preview that is currently dragged
|
def resize_preview(self, dw, dh):
"Resizes preview that is currently dragged"
# identify preview
if self._objects_moving:
id_ = self._objects_moving[0]
tags = self.canvas.gettags(id_)
for tag in tags:
if tag.startswith('preview_'):
_, ident = tag.split('preview_')
preview = self.previews[ident]
preview.resize_by(dw, dh)
self.move_previews()
break
self._update_cregion()
|
Move previews after a resize event
|
def move_previews(self):
"Move previews after a resize event"
# calculate new positions
min_y = self._calc_preview_ypos()
for idx, (key, p) in enumerate(self.previews.items()):
new_dy = min_y[idx] - p.y
self.previews[key].move_by(0, new_dy)
self._update_cregion()
self.show_selected(self._sel_id, self._sel_widget)
|
Calculates the previews positions on canvas
|
def _calc_preview_ypos(self):
"Calculates the previews positions on canvas"
y = 10
min_y = [y]
for k, p in self.previews.items():
y += p.height() + self.padding
min_y.append(y)
return min_y
|
Returns the next coordinates for a preview
|
def _get_slot(self):
"Returns the next coordinates for a preview"
x = y = 10
for k, p in self.previews.items():
y += p.height() + self.padding
return x, y
|
Call this before closing tk root
|
def clear_cache(cls):
"""Call this before closing tk root"""
#Prevent tkinter errors on python 2 ??
for key in cls._cached:
cls._cached[key] = None
cls._cached = {}
|
Register a image file using key
|
def register(cls, key, filename):
"""Register a image file using key"""
if key in cls._stock:
logger.info('Warning, replacing resource ' + str(key))
cls._stock[key] = {'type': 'custom', 'filename': filename}
logger.info('%s registered as %s' % (filename, key))
|
Register a image data using key
|
def register_from_data(cls, key, format, data):
"""Register a image data using key"""
if key in cls._stock:
logger.info('Warning, replacing resource ' + str(key))
cls._stock[key] = {'type': 'data', 'data': data, 'format': format }
logger.info('%s registered as %s' % ('data', key))
|
Register an already created image using key
|
def register_created(cls, key, image):
"""Register an already created image using key"""
if key in cls._stock:
logger.info('Warning, replacing resource ' + str(key))
cls._stock[key] = {'type': 'created', 'image': image}
logger.info('%s registered as %s' % ('data', key))
|
List files from dir_path and register images with
filename as key (without extension)
Additionaly a prefix for the key can be provided,
so the resulting key will be prefix + filename
|
def register_from_dir(cls, dir_path, prefix=''):
"""List files from dir_path and register images with
filename as key (without extension)
Additionaly a prefix for the key can be provided,
so the resulting key will be prefix + filename
"""
for filename in os.listdir(dir_path):
name, file_ext = os.path.splitext(filename)
if file_ext in cls._formats:
fkey = '{0}{1}'.format(prefix, name)
cls.register(fkey, os.path.join(dir_path, filename))
|
Load image from file or return the cached instance.
|
def _load_image(cls, rkey):
"""Load image from file or return the cached instance."""
v = cls._stock[rkey]
img = None
itype = v['type']
if itype in ('stock', 'data'):
img = tk.PhotoImage(format=v['format'], data=v['data'])
elif itype == 'created':
img = v['image']
else:
img = tk.PhotoImage(file=v['filename'])
cls._cached[rkey] = img
logger.info('Loaded resource %s.' % rkey)
return img
|
Get image previously registered with key rkey.
If key not exist, raise StockImageException
|
def get(cls, rkey):
"""Get image previously registered with key rkey.
If key not exist, raise StockImageException
"""
if rkey in cls._cached:
logger.info('Resource %s is in cache.' % rkey)
return cls._cached[rkey]
if rkey in cls._stock:
img = cls._load_image(rkey)
return img
else:
raise StockImageException('StockImage: %s not registered.' % rkey)
|
Sets treeview columns and other params
|
def config_treeview(self):
"""Sets treeview columns and other params"""
tree = self.treeview
tree.bind('<Double-1>', self.on_treeview_double_click)
tree.bind('<<TreeviewSelect>>', self.on_treeview_select, add='+')
|
Returns the top level parent for treeitem.
|
def get_toplevel_parent(self, treeitem):
"""Returns the top level parent for treeitem."""
tv = self.treeview
toplevel_items = tv.get_children()
item = treeitem
while not (item in toplevel_items):
item = tv.parent(item)
return item
|
Create a preview of the selected treeview item
|
def draw_widget(self, item):
"""Create a preview of the selected treeview item"""
if item:
self.filter_remove(remember=True)
selected_id = self.treedata[item]['id']
item = self.get_toplevel_parent(item)
widget_id = self.treedata[item]['id']
wclass = self.treedata[item]['class']
xmlnode = self.tree_node_to_xml('', item)
self.previewer.draw(item, widget_id, xmlnode, wclass)
self.previewer.show_selected(item, selected_id)
self.filter_restore()
|
Removes selected items from treeview
|
def on_treeview_delete_selection(self, event=None):
"""Removes selected items from treeview"""
tv = self.treeview
selection = tv.selection()
# Need to remove filter
self.filter_remove(remember=True)
toplevel_items = tv.get_children()
parents_to_redraw = set()
for item in selection:
try:
parent = ''
if item not in toplevel_items:
parent = self.get_toplevel_parent(item)
else:
self.previewer.delete(item)
del self.treedata[item]
tv.delete(item)
self.app.set_changed()
if parent:
self._update_max_grid_rc(parent)
parents_to_redraw.add(parent)
self.widget_editor.hide_all()
except tk.TclError:
# Selection of parent and child items ??
# TODO: notify something here
pass
# redraw widgets
for item in parents_to_redraw:
self.draw_widget(item)
# restore filter
self.filter_restore()
|
Traverses treeview and generates a ElementTree object
|
def tree_to_xml(self):
"""Traverses treeview and generates a ElementTree object"""
# Need to remove filter or hidden items will not be saved.
self.filter_remove(remember=True)
tree = self.treeview
root = ET.Element('interface')
items = tree.get_children()
for item in items:
node = self.tree_node_to_xml('', item)
root.append(node)
# restore filter
self.filter_restore()
return ET.ElementTree(root)
|
Converts a treeview item and children to xml nodes
|
def tree_node_to_xml(self, parent, item):
"""Converts a treeview item and children to xml nodes"""
tree = self.treeview
data = self.treedata[item]
node = data.to_xml_node()
children = tree.get_children(item)
for child in children:
cnode = ET.Element('child')
cwidget = self.tree_node_to_xml(item, child)
cnode.append(cwidget)
node.append(cnode)
return node
|
Insert a item on the treeview and fills columns from data
|
def _insert_item(self, root, data, from_file=False):
"""Insert a item on the treeview and fills columns from data"""
tree = self.treeview
treelabel = data.get_id()
row = col = ''
if root != '' and 'layout' in data:
row = data.get_layout_property('row')
col = data.get_layout_property('column')
# fix row position when using copy and paste
# If collision, increase by 1
row_count = self.get_max_row(root)
if not from_file and (row_count > int(row) and int(col) == 0):
row = str(row_count + 1)
data.set_layout_property('row', row)
image = ''
try:
image = StockImage.get('16x16-tk.default')
except StockImageException:
# TODO: notify something here
pass
try:
image = StockImage.get('16x16-{0}'.format(data.get_class()))
except StockImageException:
# TODO: notify something here
pass
values = (data.get_class(), row, col)
item = tree.insert(root, 'end', text=treelabel, values=values,
image=image)
data.attach(self)
self.treedata[item] = data
# Update grid r/c data
self._update_max_grid_rc(root, from_file=True)
self.app.set_changed()
return item
|
Copies selected items to clipboard.
|
def copy_to_clipboard(self):
"""
Copies selected items to clipboard.
"""
tree = self.treeview
# get the selected item:
selection = tree.selection()
if selection:
self.filter_remove(remember=True)
root = ET.Element('selection')
for item in selection:
node = self.tree_node_to_xml('', item)
root.append(node)
# python2 issue
try:
text = ET.tostring(root, encoding='unicode')
except LookupError:
text = ET.tostring(root, encoding='UTF-8')
tree.clipboard_clear()
tree.clipboard_append(text)
self.filter_restore()
|
Adds a new item to the treeview.
|
def add_widget(self, wclass):
"""Adds a new item to the treeview."""
tree = self.treeview
# get the selected item:
selected_item = ''
tsel = tree.selection()
if tsel:
selected_item = tsel[0]
# Need to remove filter if set
self.filter_remove()
root = selected_item
# check if the widget can be added at selected point
if not self._validate_add(root, wclass, False):
# if not try to add at item parent level
parent = tree.parent(root)
if parent != root:
if self._validate_add(parent, wclass):
root = parent
else:
return
else:
return
# root item should be set at this point
# setup properties
widget_id = self.get_unique_id(wclass)
data = WidgetDescr(wclass, widget_id)
# setup default values for properties
for pname in builder.CLASS_MAP[wclass].builder.properties:
pdescription = {}
if pname in properties.WIDGET_PROPERTIES:
pdescription = properties.WIDGET_PROPERTIES[pname]
if wclass in pdescription:
pdescription = dict(pdescription, **pdescription[wclass])
default_value = str(pdescription.get('default', ''))
data.set_property(pname, default_value)
# default text for widgets with text prop:
if pname in ('text', 'label'):
data.set_property(pname, widget_id)
#
# default grid properties
#
# is_container = builder.CLASS_MAP[wclass].builder.container
for prop_name in properties.GRID_PROPERTIES:
pdescription = properties.LAYOUT_OPTIONS[prop_name]
if wclass in pdescription:
pdescription = dict(pdescription, **pdescription[wclass])
default_value = str(pdescription.get('default', ''))
data.set_layout_property(prop_name, default_value)
rownum = '0'
if root:
rownum = str(self.get_max_row(root)+1)
data.set_layout_property('row', rownum)
data.set_layout_property('column', '0')
item = self._insert_item(root, data)
# Do redraw
self.draw_widget(item)
# Select and show the item created
tree.after_idle(lambda: tree.selection_set(item))
tree.after_idle(lambda: tree.focus(item))
tree.after_idle(lambda: tree.see(item))
|
Load file into treeview
|
def load_file(self, filename):
"""Load file into treeview"""
self.counter.clear()
# python2 issues
try:
etree = ET.parse(filename)
except ET.ParseError:
parser = ET.XMLParser(encoding='UTF-8')
etree = ET.parse(filename, parser)
eroot = etree.getroot()
self.remove_all()
self.previewer.remove_all()
self.widget_editor.hide_all()
self.previewer.resource_paths.append(os.path.dirname(filename))
for element in eroot:
self.populate_tree('', eroot, element,from_file=True)
children = self.treeview.get_children('')
for child in children:
self.draw_widget(child)
self.previewer.show_selected(None, None)
|
Reads xml nodes and populates tree item
|
def populate_tree(self, master, parent, element,from_file=False):
"""Reads xml nodes and populates tree item"""
data = WidgetDescr(None, None)
data.from_xml_node(element)
cname = data.get_class()
uniqueid = self.get_unique_id(cname, data.get_id())
data.set_property('id', uniqueid)
if cname in builder.CLASS_MAP:
pwidget = self._insert_item(master, data,from_file=from_file)
xpath = "./child"
children = element.findall(xpath)
for child in children:
child_object = child.find('./object')
cwidget = self.populate_tree(pwidget, child, child_object,from_file=from_file)
return pwidget
else:
raise Exception('Class "{0}" not mapped'.format(cname))
|
Updates tree colums when itemdata is changed.
|
def update_event(self, hint, obj):
"""Updates tree colums when itemdata is changed."""
tree = self.treeview
data = obj
item = self.get_item_by_data(obj)
if item:
if data.get_id() != tree.item(item, 'text'):
tree.item(item, text=data.get_id())
# if tree.parent(item) != '' and 'layout' in data:
if tree.parent(item) != '':
row = data.get_layout_property('row')
col = data.get_layout_property('column')
values = tree.item(item, 'values')
if (row != values[1] or col != values[2]):
values = (data.get_class(), row, col)
tree.item(item, values=values)
self.draw_widget(item)
self.app.set_changed()
|
Filters treeview
|
def filter_by(self, string):
"""Filters treeview"""
self._reatach()
if string == '':
self.filter_remove()
return
self._expand_all()
self.treeview.selection_set('')
children = self.treeview.get_children('')
for item in children:
_, detached = self._detach(item)
if detached:
self._detached.extend(detached)
for i, p, idx in self._detached:
# txt = self.treeview.item(i, 'text')
self.treeview.detach(i)
self.filter_on = True
|
Reinsert the hidden items.
|
def _reatach(self):
"""Reinsert the hidden items."""
for item, p, idx in self._detached:
# The item may have been deleted.
if self.treeview.exists(item) and self.treeview.exists(p):
self.treeview.move(item, p, idx)
self._detached = []
|
Hide items from treeview that do not match the search string.
|
def _detach(self, item):
"""Hide items from treeview that do not match the search string."""
to_detach = []
children_det = []
children_match = False
match_found = False
value = self.filtervar.get()
txt = self.treeview.item(item, 'text').lower()
if value in txt:
match_found = True
else:
class_txt = self.treedata[item].get_class().lower()
if value in class_txt:
match_found = True
parent = self.treeview.parent(item)
idx = self.treeview.index(item)
children = self.treeview.get_children(item)
if children:
for child in children:
match, detach = self._detach(child)
children_match = children_match | match
if detach:
children_det.extend(detach)
if match_found:
if children_det:
to_detach.extend(children_det)
else:
if children_match:
if children_det:
to_detach.extend(children_det)
else:
to_detach.append((item, parent, idx))
match_found = match_found | children_match
return match_found, to_detach
|
Creates all gui widgets
|
def _create_ui(self):
"""Creates all gui widgets"""
self.preview = None
self.about_dialog = None
self.preferences = None
self.builder = pygubu.Builder(translator)
self.currentfile = None
self.is_changed = False
uifile = os.path.join(FILE_PATH, "ui/pygubu-ui.ui")
self.builder.add_from_file(uifile)
self.builder.add_resource_path(os.path.join(FILE_PATH, "images"))
#build main ui
self.builder.get_object('mainwindow', self.master)
toplevel = self.master.winfo_toplevel()
menu = self.builder.get_object('mainmenu', toplevel)
toplevel['menu'] = menu
#project name
self.project_name = self.builder.get_object('projectname_lbl')
#Class selector values
self.widgetlist_sf = self.builder.get_object("widgetlist_sf")
self.widgetlist = self.builder.get_object("widgetlist")
self.configure_widget_list()
#widget tree
self.treeview = self.builder.get_object('treeview1')
self.bindings_frame = self.builder.get_object('bindingsframe')
self.bindings_tree = self.builder.get_object('bindingstree')
#Preview
previewc = self.builder.get_object('preview_canvas')
self.previewer = PreviewHelper(previewc)
#tree editor
self.tree_editor = WidgetsTreeEditor(self)
self.builder.connect_callbacks(self)
#Status bar
self.statusbar = self.builder.get_object('statusbar')
handler = StatusBarHandler(self.statusbar)
handler.setLevel(logging.INFO)
logger.addHandler(handler)
pygubu.builder.logger.addHandler(handler)
#app grid
self.set_resizable()
#
#Application bindings
#
master = self.master
master.bind_all(
'<Control-KeyPress-n>',
lambda e: self.on_file_menuitem_clicked('file_new'))
master.bind_all(
'<Control-KeyPress-o>',
lambda e: self.on_file_menuitem_clicked('file_open'))
master.bind_all(
'<Control-KeyPress-s>',
lambda e: self.on_file_menuitem_clicked('file_save'))
master.bind_all(
'<Control-KeyPress-q>',
lambda e: self.on_file_menuitem_clicked('file_quit'))
master.bind_all(
'<Control-KeyPress-i>',
lambda e: self.on_edit_menuitem_clicked('edit_item_up'))
master.bind_all(
'<Control-KeyPress-k>',
lambda e: self.on_edit_menuitem_clicked('edit_item_down'))
master.bind_all(
'<F5>',
lambda e: self.tree_editor.preview_in_toplevel())
master.bind_all(
'<F6>',
lambda e: self.previewer.close_toplevel_previews())
#
# Widget bindings
#
self.tree_editor.treeview.bind(
'<Control-KeyPress-c>',
lambda e: self.tree_editor.copy_to_clipboard())
self.tree_editor.treeview.bind(
'<Control-KeyPress-v>',
lambda e: self.tree_editor.paste_from_clipboard())
self.tree_editor.treeview.bind(
'<Control-KeyPress-x>',
lambda e: self.tree_editor.cut_to_clipboard())
self.tree_editor.treeview.bind(
'<KeyPress-Delete>',
lambda e: self.on_edit_menuitem_clicked('edit_item_delete'))
def clear_key_pressed(event, newevent):
# when KeyPress, not Ctrl-KeyPress, generate event.
if event.keysym_num == ord(event.char):
self.tree_editor.treeview.event_generate(newevent)
self.tree_editor.treeview.bind('<i>',
lambda e: clear_key_pressed(e, '<Up>'))
self.tree_editor.treeview.bind('<k>',
lambda e: clear_key_pressed(e, '<Down>'))
#grid move bindings
self.tree_editor.treeview.bind(
'<Alt-KeyPress-i>',
lambda e: self.on_edit_menuitem_clicked('grid_up'))
self.tree_editor.treeview.bind(
'<Alt-KeyPress-k>',
lambda e: self.on_edit_menuitem_clicked('grid_down'))
self.tree_editor.treeview.bind(
'<Alt-KeyPress-j>',
lambda e: self.on_edit_menuitem_clicked('grid_left'))
self.tree_editor.treeview.bind(
'<Alt-KeyPress-l>',
lambda e: self.on_edit_menuitem_clicked('grid_right'))
# On preferences save binding
self.master.bind('<<PygubuDesignerPreferencesSaved>>', self.on_preferences_saved)
#
# Setup tkk styles
#
self._setup_styles()
#
# Setup dynamic theme submenu
#
self._setup_theme_menu()
#app config
top = self.master.winfo_toplevel()
try:
top.wm_iconname('pygubu')
top.tk.call('wm', 'iconphoto', '.', StockImage.get('pygubu'))
except StockImageException as e:
pass
self.set_title(_('Pygubu a GUI builder for tkinter'))
self.set_size('640x480')
|
Load xml into treeview
|
def load_file(self, filename):
"""Load xml into treeview"""
self.tree_editor.load_file(filename)
self.project_name.configure(text=filename)
self.currentfile = filename
self.is_changed = False
|
Lower the IR into an IR form that can be represented in Gremlin queries.
Args:
ir_blocks: list of IR blocks to lower into Gremlin-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
list of IR blocks suitable for outputting as Gremlin
|
def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None):
"""Lower the IR into an IR form that can be represented in Gremlin queries.
Args:
ir_blocks: list of IR blocks to lower into Gremlin-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
list of IR blocks suitable for outputting as Gremlin
"""
sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table)
ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table)
ir_blocks = optimize_boolean_expression_comparisons(ir_blocks)
if type_equivalence_hints:
ir_blocks = lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints)
ir_blocks = lower_coerce_type_blocks(ir_blocks)
ir_blocks = rewrite_filters_in_optional_blocks(ir_blocks)
ir_blocks = merge_consecutive_filter_clauses(ir_blocks)
ir_blocks = lower_folded_outputs(ir_blocks)
return ir_blocks
|
Rewrite CoerceType blocks to explicitly state which types are allowed in the coercion.
|
def lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints):
"""Rewrite CoerceType blocks to explicitly state which types are allowed in the coercion."""
allowed_key_type_spec = (GraphQLInterfaceType, GraphQLObjectType)
allowed_value_type_spec = GraphQLUnionType
# Validate that the type_equivalence_hints parameter has correct types.
for key, value in six.iteritems(type_equivalence_hints):
if (not isinstance(key, allowed_key_type_spec) or
not isinstance(value, allowed_value_type_spec)):
msg = (u'Invalid type equivalence hints received! Hint {} ({}) -> {} ({}) '
u'was unexpected, expected a hint in the form '
u'GraphQLInterfaceType -> GraphQLUnionType or '
u'GraphQLObjectType -> GraphQLUnionType'.format(key.name, str(type(key)),
value.name, str(type(value))))
raise GraphQLCompilationError(msg)
# CoerceType blocks only know the name of the type to which they coerce,
# and not its corresponding GraphQL type object. Convert the type equivalence hints into
# a dict of type name -> set of names of equivalent types, which can be used more readily.
equivalent_type_names = {
key.name: {x.name for x in value.types}
for key, value in six.iteritems(type_equivalence_hints)
}
new_ir_blocks = []
for block in ir_blocks:
new_block = block
if isinstance(block, CoerceType):
target_class = get_only_element_from_collection(block.target_class)
if target_class in equivalent_type_names:
new_block = CoerceType(equivalent_type_names[target_class])
new_ir_blocks.append(new_block)
return new_ir_blocks
|
Lower CoerceType blocks into Filter blocks with a type-check predicate.
|
def lower_coerce_type_blocks(ir_blocks):
"""Lower CoerceType blocks into Filter blocks with a type-check predicate."""
new_ir_blocks = []
for block in ir_blocks:
new_block = block
if isinstance(block, CoerceType):
predicate = BinaryComposition(
u'contains', Literal(list(block.target_class)), LocalField('@class'))
new_block = Filter(predicate)
new_ir_blocks.append(new_block)
return new_ir_blocks
|
In optional contexts, add a check for null that allows non-existent optional data through.
Optional traversals in Gremlin represent missing optional data by setting the current vertex
to null until the exit from the optional scope. Therefore, filtering and type coercions
(which should have been lowered into filters by this point) must check for null before
applying their filtering predicates. Since missing optional data isn't filtered,
the new filtering predicate should be "(it == null) || existing_predicate".
Args:
ir_blocks: list of IR blocks to lower into Gremlin-compatible form
Returns:
new list of IR blocks with this lowering step applied
|
def rewrite_filters_in_optional_blocks(ir_blocks):
"""In optional contexts, add a check for null that allows non-existent optional data through.
Optional traversals in Gremlin represent missing optional data by setting the current vertex
to null until the exit from the optional scope. Therefore, filtering and type coercions
(which should have been lowered into filters by this point) must check for null before
applying their filtering predicates. Since missing optional data isn't filtered,
the new filtering predicate should be "(it == null) || existing_predicate".
Args:
ir_blocks: list of IR blocks to lower into Gremlin-compatible form
Returns:
new list of IR blocks with this lowering step applied
"""
new_ir_blocks = []
optional_context_depth = 0
for block in ir_blocks:
new_block = block
if isinstance(block, CoerceType):
raise AssertionError(u'Found a CoerceType block after all such blocks should have been '
u'lowered to Filter blocks: {}'.format(ir_blocks))
elif isinstance(block, Traverse) and block.optional:
optional_context_depth += 1
elif isinstance(block, Backtrack) and block.optional:
optional_context_depth -= 1
if optional_context_depth < 0:
raise AssertionError(u'Reached negative optional context depth for blocks: '
u'{}'.format(ir_blocks))
elif isinstance(block, Filter) and optional_context_depth > 0:
null_check = BinaryComposition(u'=', LocalField('@this'), NullLiteral)
new_block = Filter(BinaryComposition(u'||', null_check, block.predicate))
else:
pass
new_ir_blocks.append(new_block)
return new_ir_blocks
|
Convert Filter/Traverse blocks and LocalField expressions within @fold to Gremlin objects.
|
def _convert_folded_blocks(folded_ir_blocks):
"""Convert Filter/Traverse blocks and LocalField expressions within @fold to Gremlin objects."""
new_folded_ir_blocks = []
def folded_context_visitor(expression):
"""Transform LocalField objects into their Gremlin-specific counterpart."""
if not isinstance(expression, LocalField):
return expression
return GremlinFoldedLocalField(expression.field_name)
for block in folded_ir_blocks:
new_block = block
if isinstance(block, Filter):
new_predicate = block.predicate.visit_and_update(folded_context_visitor)
new_block = GremlinFoldedFilter(new_predicate)
elif isinstance(block, Traverse):
new_block = GremlinFoldedTraverse.from_traverse(block)
elif isinstance(block, (MarkLocation, Backtrack)):
# We remove MarkLocation and Backtrack blocks from the folded blocks output,
# since they do not produce any Gremlin output code inside folds.
continue
else:
raise AssertionError(u'Found an unexpected IR block in the folded IR blocks: '
u'{} {} {}'.format(type(block), block, folded_ir_blocks))
new_folded_ir_blocks.append(new_block)
return new_folded_ir_blocks
|
Lower standard folded output fields into GremlinFoldedContextField objects.
|
def lower_folded_outputs(ir_blocks):
"""Lower standard folded output fields into GremlinFoldedContextField objects."""
folds, remaining_ir_blocks = extract_folds_from_ir_blocks(ir_blocks)
if not remaining_ir_blocks:
raise AssertionError(u'Expected at least one non-folded block to remain: {} {} '
u'{}'.format(folds, remaining_ir_blocks, ir_blocks))
output_block = remaining_ir_blocks[-1]
if not isinstance(output_block, ConstructResult):
raise AssertionError(u'Expected the last non-folded block to be ConstructResult, '
u'but instead was: {} {} '
u'{}'.format(type(output_block), output_block, ir_blocks))
# Turn folded Filter blocks into GremlinFoldedFilter blocks.
converted_folds = {
base_fold_location.get_location_name()[0]: _convert_folded_blocks(folded_ir_blocks)
for base_fold_location, folded_ir_blocks in six.iteritems(folds)
}
new_output_fields = dict()
for output_name, output_expression in six.iteritems(output_block.fields):
new_output_expression = output_expression
# Turn FoldedContextField expressions into GremlinFoldedContextField ones.
if isinstance(output_expression, FoldedContextField):
# Get the matching folded IR blocks and put them in the new context field.
base_fold_location_name = output_expression.fold_scope_location.get_location_name()[0]
folded_ir_blocks = converted_folds[base_fold_location_name]
new_output_expression = GremlinFoldedContextField(
output_expression.fold_scope_location, folded_ir_blocks,
output_expression.field_type)
new_output_fields[output_name] = new_output_expression
new_ir_blocks = remaining_ir_blocks[:-1]
new_ir_blocks.append(ConstructResult(new_output_fields))
return new_ir_blocks
|
Validate that the GremlinFoldedContextField is correctly representable.
|
def validate(self):
"""Validate that the GremlinFoldedContextField is correctly representable."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format(
type(self.fold_scope_location), self.fold_scope_location))
allowed_block_types = (GremlinFoldedFilter, GremlinFoldedTraverse, Backtrack)
for block in self.folded_ir_blocks:
if not isinstance(block, allowed_block_types):
raise AssertionError(
u'Found invalid block of type {} in folded_ir_blocks: {} '
u'Allowed types are {}.'
.format(type(block), self.folded_ir_blocks, allowed_block_types))
if not isinstance(self.field_type, GraphQLList):
raise ValueError(u'Invalid value of "field_type", expected a list type but got: '
u'{}'.format(self.field_type))
inner_type = strip_non_null_from_type(self.field_type.of_type)
if isinstance(inner_type, GraphQLList):
raise GraphQLCompilationError(
u'Outputting list-valued fields in a @fold context is currently '
u'not supported: {} {}'.format(self.fold_scope_location, self.field_type.of_type))
|
Return a unicode object with the Gremlin representation of this expression.
|
def to_gremlin(self):
"""Return a unicode object with the Gremlin representation of this expression."""
self.validate()
edge_direction, edge_name = self.fold_scope_location.get_first_folded_edge()
validate_safe_string(edge_name)
inverse_direction_table = {
'out': 'in',
'in': 'out',
}
inverse_direction = inverse_direction_table[edge_direction]
base_location_name, _ = self.fold_scope_location.base_location.get_location_name()
validate_safe_string(base_location_name)
_, field_name = self.fold_scope_location.get_location_name()
validate_safe_string(field_name)
if not self.folded_ir_blocks:
# There is no filtering nor type coercions applied to this @fold scope.
#
# This template generates code like:
# (
# (m.base.in_Animal_ParentOf == null) ?
# [] : (
# m.base.in_Animal_ParentOf.collect{entry -> entry.outV.next().uuid}
# )
# )
template = (
u'((m.{base_location_name}.{direction}_{edge_name} == null) ? [] : ('
u'm.{base_location_name}.{direction}_{edge_name}.collect{{'
u'entry -> entry.{inverse_direction}V.next().{field_name}{maybe_format}'
u'}}'
u'))'
)
filter_and_traverse_data = ''
else:
# There is filtering or type coercions in this @fold scope.
#
# This template generates code like:
# (
# (m.base.in_Animal_ParentOf == null) ?
# [] : (
# m.base.in_Animal_ParentOf
# .collect{entry -> entry.outV.next()}
# .findAll{it.alias.contains($wanted)}
# .collect{it.uuid}
# )
# )
template = (
u'((m.{base_location_name}.{direction}_{edge_name} == null) ? [] : ('
u'm.{base_location_name}.{direction}_{edge_name}.collect{{'
u'entry -> entry.{inverse_direction}V.next()'
u'}}'
u'.{filters_and_traverses}'
u'.collect{{entry -> entry.{field_name}{maybe_format}}}'
u'))'
)
filter_and_traverse_data = u'.'.join(block.to_gremlin()
for block in self.folded_ir_blocks)
maybe_format = ''
inner_type = strip_non_null_from_type(self.field_type.of_type)
if GraphQLDate.is_same_type(inner_type):
maybe_format = '.format("' + STANDARD_DATE_FORMAT + '")'
elif GraphQLDateTime.is_same_type(inner_type):
maybe_format = '.format("' + STANDARD_DATETIME_FORMAT + '")'
template_data = {
'base_location_name': base_location_name,
'direction': edge_direction,
'edge_name': edge_name,
'field_name': field_name,
'inverse_direction': inverse_direction,
'maybe_format': maybe_format,
'filters_and_traverses': filter_and_traverse_data,
}
return template.format(**template_data)
|
Create a GremlinFoldedTraverse block as a copy of the given Traverse block.
|
def from_traverse(cls, traverse_block):
"""Create a GremlinFoldedTraverse block as a copy of the given Traverse block."""
if isinstance(traverse_block, Traverse):
return cls(traverse_block.direction, traverse_block.edge_name)
else:
raise AssertionError(u'Tried to initialize an instance of GremlinFoldedTraverse '
u'with block of type {}'.format(type(traverse_block)))
|
Return a unicode object with the Gremlin representation of this block.
|
def to_gremlin(self):
"""Return a unicode object with the Gremlin representation of this block."""
self.validate()
template_data = {
'direction': self.direction,
'edge_name': self.edge_name,
'inverse_direction': 'in' if self.direction == 'out' else 'out'
}
return (u'collectMany{{entry -> entry.{direction}_{edge_name}'
u'.collect{{edge -> edge.{inverse_direction}V.next()}}}}'
.format(**template_data))
|
Filter union types with no edges from the type equivalence hints dict.
|
def _get_referenced_type_equivalences(graphql_types, type_equivalence_hints):
"""Filter union types with no edges from the type equivalence hints dict."""
referenced_types = set()
for graphql_type in graphql_types.values():
if isinstance(graphql_type, (GraphQLObjectType, GraphQLInterfaceType)):
for _, field in graphql_type.fields.items():
if isinstance(field.type, GraphQLList):
referenced_types.add(field.type.of_type.name)
return {
original: union
for original, union in type_equivalence_hints.items()
if union.name in referenced_types
}
|
Return a dictionary describing the field type overrides in subclasses.
|
def _get_inherited_field_types(class_to_field_type_overrides, schema_graph):
"""Return a dictionary describing the field type overrides in subclasses."""
inherited_field_type_overrides = dict()
for superclass_name, field_type_overrides in class_to_field_type_overrides.items():
for subclass_name in schema_graph.get_subclass_set(superclass_name):
inherited_field_type_overrides.setdefault(subclass_name, dict())
inherited_field_type_overrides[subclass_name].update(field_type_overrides)
return inherited_field_type_overrides
|
Assert that the fields we want to override are not defined in superclasses.
|
def _validate_overriden_fields_are_not_defined_in_superclasses(class_to_field_type_overrides,
schema_graph):
"""Assert that the fields we want to override are not defined in superclasses."""
for class_name, field_type_overrides in six.iteritems(class_to_field_type_overrides):
for superclass_name in schema_graph.get_inheritance_set(class_name):
if superclass_name != class_name:
superclass = schema_graph.get_element_by_class_name(superclass_name)
for field_name in field_type_overrides:
if field_name in superclass.properties:
raise AssertionError(
u'Attempting to override field "{}" from class "{}", but the field is '
u'defined in superclass "{}"'
.format(field_name, class_name, superclass_name))
|
Return the best GraphQL type representation for an OrientDB property descriptor.
|
def _property_descriptor_to_graphql_type(property_obj):
"""Return the best GraphQL type representation for an OrientDB property descriptor."""
property_type = property_obj.type_id
scalar_types = {
PROPERTY_TYPE_BOOLEAN_ID: GraphQLBoolean,
PROPERTY_TYPE_DATE_ID: GraphQLDate,
PROPERTY_TYPE_DATETIME_ID: GraphQLDateTime,
PROPERTY_TYPE_DECIMAL_ID: GraphQLDecimal,
PROPERTY_TYPE_DOUBLE_ID: GraphQLFloat,
PROPERTY_TYPE_FLOAT_ID: GraphQLFloat,
PROPERTY_TYPE_INTEGER_ID: GraphQLInt,
PROPERTY_TYPE_STRING_ID: GraphQLString,
}
result = scalar_types.get(property_type, None)
if result:
return result
mapping_types = {
PROPERTY_TYPE_EMBEDDED_SET_ID: GraphQLList,
PROPERTY_TYPE_EMBEDDED_LIST_ID: GraphQLList,
}
wrapping_type = mapping_types.get(property_type, None)
if wrapping_type:
linked_property_obj = property_obj.qualifier
# There are properties that are embedded collections of non-primitive types,
# for example, ProxyEventSet.scalar_parameters.
# The GraphQL compiler does not currently support these.
if linked_property_obj in scalar_types:
return wrapping_type(scalar_types[linked_property_obj])
# We weren't able to represent this property in GraphQL, so we'll hide it instead.
return None
|
Construct a unique union type name based on the type names being unioned.
|
def _get_union_type_name(type_names_to_union):
"""Construct a unique union type name based on the type names being unioned."""
if not type_names_to_union:
raise AssertionError(u'Expected a non-empty list of type names to union, received: '
u'{}'.format(type_names_to_union))
return u'Union__' + u'__'.join(sorted(type_names_to_union))
|
Return a dict from field name to GraphQL field type, for the specified graph class.
|
def _get_fields_for_class(schema_graph, graphql_types, field_type_overrides, hidden_classes,
cls_name):
"""Return a dict from field name to GraphQL field type, for the specified graph class."""
properties = schema_graph.get_element_by_class_name(cls_name).properties
# Add leaf GraphQL fields (class properties).
all_properties = {
property_name: _property_descriptor_to_graphql_type(property_obj)
for property_name, property_obj in six.iteritems(properties)
}
result = {
property_name: graphql_representation
for property_name, graphql_representation in six.iteritems(all_properties)
if graphql_representation is not None
}
# Add edge GraphQL fields (edges to other vertex classes).
schema_element = schema_graph.get_element_by_class_name(cls_name)
outbound_edges = (
('out_{}'.format(out_edge_name),
schema_graph.get_element_by_class_name(out_edge_name).properties[
EDGE_DESTINATION_PROPERTY_NAME].qualifier)
for out_edge_name in schema_element.out_connections
)
inbound_edges = (
('in_{}'.format(in_edge_name),
schema_graph.get_element_by_class_name(in_edge_name).properties[
EDGE_SOURCE_PROPERTY_NAME].qualifier)
for in_edge_name in schema_element.in_connections
)
for field_name, to_type_name in chain(outbound_edges, inbound_edges):
edge_endpoint_type_name = None
subclasses = schema_graph.get_subclass_set(to_type_name)
to_type_abstract = schema_graph.get_element_by_class_name(to_type_name).abstract
if not to_type_abstract and len(subclasses) > 1:
# If the edge endpoint type has no subclasses, it can't be coerced into any other type.
# If the edge endpoint type is abstract (an interface type), we can already
# coerce it to the proper type with a GraphQL fragment. However, if the endpoint type
# is non-abstract and has subclasses, we need to return its subclasses as an union type.
# This is because GraphQL fragments cannot be applied on concrete types, and
# GraphQL does not support inheritance of concrete types.
type_names_to_union = [
subclass
for subclass in subclasses
if subclass not in hidden_classes
]
if type_names_to_union:
edge_endpoint_type_name = _get_union_type_name(type_names_to_union)
else:
if to_type_name not in hidden_classes:
edge_endpoint_type_name = to_type_name
if edge_endpoint_type_name is not None:
# If we decided to not hide this edge due to its endpoint type being non-representable,
# represent the edge field as the GraphQL type List(edge_endpoint_type_name).
result[field_name] = GraphQLList(graphql_types[edge_endpoint_type_name])
for field_name, field_type in six.iteritems(field_type_overrides):
if field_name not in result:
raise AssertionError(u'Attempting to override field "{}" from class "{}", but the '
u'class does not contain said field'.format(field_name, cls_name))
else:
result[field_name] = field_type
return result
|
Return a function that specifies the fields present on the given type.
|
def _create_field_specification(schema_graph, graphql_types, field_type_overrides,
hidden_classes, cls_name):
"""Return a function that specifies the fields present on the given type."""
def field_maker_func():
"""Create and return the fields for the given GraphQL type."""
result = EXTENDED_META_FIELD_DEFINITIONS.copy()
result.update(OrderedDict([
(name, GraphQLField(value))
for name, value in sorted(six.iteritems(_get_fields_for_class(
schema_graph, graphql_types, field_type_overrides, hidden_classes, cls_name)),
key=lambda x: x[0])
]))
return result
return field_maker_func
|
Return a function that specifies the interfaces implemented by the given type.
|
def _create_interface_specification(schema_graph, graphql_types, hidden_classes, cls_name):
"""Return a function that specifies the interfaces implemented by the given type."""
def interface_spec():
"""Return a list of GraphQL interface types implemented by the type named 'cls_name'."""
abstract_inheritance_set = (
superclass_name
for superclass_name in sorted(list(schema_graph.get_inheritance_set(cls_name)))
if (superclass_name not in hidden_classes and
schema_graph.get_element_by_class_name(superclass_name).abstract)
)
return [
graphql_types[x]
for x in abstract_inheritance_set
if x not in hidden_classes
]
return interface_spec
|
Return a function that gives the types in the union type rooted at base_name.
|
def _create_union_types_specification(schema_graph, graphql_types, hidden_classes, base_name):
"""Return a function that gives the types in the union type rooted at base_name."""
# When edges point to vertices of type base_name, and base_name is both non-abstract and
# has subclasses, we need to represent the edge endpoint type with a union type based on
# base_name and its subclasses. This function calculates what types that union should include.
def types_spec():
"""Return a list of GraphQL types that this class' corresponding union type includes."""
return [
graphql_types[x]
for x in sorted(list(schema_graph.get_subclass_set(base_name)))
if x not in hidden_classes
]
return types_spec
|
Return a GraphQL schema object corresponding to the schema of the given schema graph.
Args:
schema_graph: SchemaGraph
class_to_field_type_overrides: dict, class name -> {field name -> field type},
(string -> {string -> GraphQLType}). Used to override the
type of a field in the class where it's first defined and all
the class's subclasses.
hidden_classes: set of strings, classes to not include in the GraphQL schema.
Returns:
tuple of (GraphQL schema object, GraphQL type equivalence hints dict).
The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}).
|
def get_graphql_schema_from_schema_graph(schema_graph, class_to_field_type_overrides,
hidden_classes):
"""Return a GraphQL schema object corresponding to the schema of the given schema graph.
Args:
schema_graph: SchemaGraph
class_to_field_type_overrides: dict, class name -> {field name -> field type},
(string -> {string -> GraphQLType}). Used to override the
type of a field in the class where it's first defined and all
the class's subclasses.
hidden_classes: set of strings, classes to not include in the GraphQL schema.
Returns:
tuple of (GraphQL schema object, GraphQL type equivalence hints dict).
The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}).
"""
_validate_overriden_fields_are_not_defined_in_superclasses(class_to_field_type_overrides,
schema_graph)
# The field types of subclasses must also be overridden.
# Remember that the result returned by get_subclass_set(class_name) includes class_name itself.
inherited_field_type_overrides = _get_inherited_field_types(class_to_field_type_overrides,
schema_graph)
# We remove the base vertex class from the schema if it has no properties.
# If it has no properties, it's meaningless and makes the schema less syntactically sweet.
if not schema_graph.get_element_by_class_name(ORIENTDB_BASE_VERTEX_CLASS_NAME).properties:
hidden_classes.add(ORIENTDB_BASE_VERTEX_CLASS_NAME)
graphql_types = OrderedDict()
type_equivalence_hints = OrderedDict()
# For each vertex class, construct its analogous GraphQL type representation.
for vertex_cls_name in sorted(schema_graph.vertex_class_names):
vertex_cls = schema_graph.get_element_by_class_name(vertex_cls_name)
if vertex_cls_name in hidden_classes:
continue
inherited_field_type_overrides.setdefault(vertex_cls_name, dict())
field_type_overrides = inherited_field_type_overrides[vertex_cls_name]
# We have to use delayed type binding here, because some of the type references
# are circular: if an edge connects vertices of types A and B, then
# GraphQL type A has a List[B] field, and type B has a List[A] field.
# To avoid the circular dependency, GraphQL allows us to initialize the types
# initially without their field information, and fill in their field information
# later using a lambda function as the second argument to GraphQLObjectType.
# This lambda function will be called on each type after all types are created
# in their initial blank state.
#
# However, 'cls_name' is a variable that would not be correctly bound
# if we naively tried to construct a lambda in-place, because Python lambdas
# are not closures. Instead, call a function with 'cls_name' as an argument,
# and have that function construct and return the required lambda.
field_specification_lambda = _create_field_specification(
schema_graph, graphql_types, field_type_overrides, hidden_classes, vertex_cls_name)
# Abstract classes are interfaces, concrete classes are object types.
current_graphql_type = None
if vertex_cls.abstract:
# "fields" is a kwarg in the interface constructor, even though
# it's a positional arg in the object type constructor.
current_graphql_type = GraphQLInterfaceType(vertex_cls_name,
fields=field_specification_lambda)
else:
# For similar reasons as the field_specification_lambda,
# we need to create an interface specification lambda function that
# specifies the interfaces implemented by this type.
interface_specification_lambda = _create_interface_specification(
schema_graph, graphql_types, hidden_classes, vertex_cls_name)
# N.B.: Ignore the "is_type_of" argument below, it is simply a circumvention of
# a sanity check inside the GraphQL library. The library assumes that we'll use
# its execution system, so it complains that we don't provide a means to
# differentiate between different implementations of the same interface.
# We don't care, because we compile the GraphQL query to a database query.
current_graphql_type = GraphQLObjectType(vertex_cls_name,
field_specification_lambda,
interfaces=interface_specification_lambda,
is_type_of=lambda: None)
graphql_types[vertex_cls_name] = current_graphql_type
# For each vertex class, construct all union types representations.
for vertex_cls_name in sorted(schema_graph.vertex_class_names):
vertex_cls = schema_graph.get_element_by_class_name(vertex_cls_name)
if vertex_cls_name in hidden_classes:
continue
vertex_cls_subclasses = schema_graph.get_subclass_set(vertex_cls_name)
if not vertex_cls.abstract and len(vertex_cls_subclasses) > 1:
# In addition to creating this class' corresponding GraphQL type, we'll need a
# union type to represent it when it appears as the endpoint of an edge.
union_type_name = _get_union_type_name(vertex_cls_subclasses)
# For similar reasons as the field_specification_lambda,
# we need to create a union type specification lambda function that specifies
# the types that this union type is composed of.
type_specification_lambda = _create_union_types_specification(
schema_graph, graphql_types, hidden_classes, vertex_cls_name)
union_type = GraphQLUnionType(union_type_name, types=type_specification_lambda)
graphql_types[union_type_name] = union_type
type_equivalence_hints[graphql_types[vertex_cls_name]] = union_type
# Include all abstract non-vertex classes whose only non-abstract subclasses are vertices.
for non_graph_cls_name in sorted(schema_graph.non_graph_class_names):
if non_graph_cls_name in hidden_classes:
continue
if not schema_graph.get_element_by_class_name(non_graph_cls_name).abstract:
continue
cls_subclasses = schema_graph.get_subclass_set(non_graph_cls_name)
# No need to add the possible abstract class if it doesn't have subclasses besides itself.
if len(cls_subclasses) > 1:
all_non_abstract_subclasses_are_vertices = True
# Check all non-abstract subclasses are vertices.
for subclass_name in cls_subclasses:
subclass = schema_graph.get_element_by_class_name(subclass_name)
if subclass_name != non_graph_cls_name:
if not subclass.abstract and not subclass.is_vertex:
all_non_abstract_subclasses_are_vertices = False
break
if all_non_abstract_subclasses_are_vertices:
# Add abstract class as an interface.
inherited_field_type_overrides.setdefault(non_graph_cls_name, dict())
field_type_overrides = inherited_field_type_overrides[non_graph_cls_name]
field_specification_lambda = _create_field_specification(
schema_graph, graphql_types, field_type_overrides, hidden_classes,
non_graph_cls_name)
graphql_type = GraphQLInterfaceType(non_graph_cls_name,
fields=field_specification_lambda)
graphql_types[non_graph_cls_name] = graphql_type
if not graphql_types:
raise EmptySchemaError(u'After evaluating all subclasses of V, we were not able to find '
u'visible schema data to import into the GraphQL schema object')
# Create the root query GraphQL type. Consists of all non-union classes, i.e.
# all non-abstract classes (as GraphQL types) and all abstract classes (as GraphQL interfaces).
RootSchemaQuery = GraphQLObjectType('RootSchemaQuery', OrderedDict([
(name, GraphQLField(value))
for name, value in sorted(six.iteritems(graphql_types), key=lambda x: x[0])
if not isinstance(value, GraphQLUnionType)
]))
schema = GraphQLSchema(RootSchemaQuery, directives=DIRECTIVES)
# Note that the GraphQLSchema reconstructs the set of types in the schema by recursively
# searching through the fields of the RootSchemaQuery. Since union types can only appear in the
# fields of other types as edges, union types with no in or out edges will not appear in the
# schema. Therefore, we remove these unions and their keys from the type equivalence hints.
return schema, _get_referenced_type_equivalences(graphql_types,
type_equivalence_hints)
|
Extract locations from TernaryConditionals and rewrite their Filter blocks as necessary.
|
def workaround_lowering_pass(ir_blocks, query_metadata_table):
"""Extract locations from TernaryConditionals and rewrite their Filter blocks as necessary."""
new_ir_blocks = []
for block in ir_blocks:
if isinstance(block, Filter):
new_block = _process_filter_block(query_metadata_table, block)
else:
new_block = block
new_ir_blocks.append(new_block)
return new_ir_blocks
|
Rewrite the provided Filter block if necessary.
|
def _process_filter_block(query_metadata_table, block):
"""Rewrite the provided Filter block if necessary."""
# For a given Filter block with BinaryComposition predicate expression X,
# let L be the set of all Locations referenced in any TernaryConditional
# predicate expression enclosed in X.
# For each location l in L, we construct a tautological expression that looks like:
# ((l IS NULL) OR (l IS NOT NULL))
# and then join the original BinaryComposition X with all such expressions with ANDs.
# We set this new BinaryComposition expression as the predicate of the Filter block.
base_predicate = block.predicate
# These variables are used by the visitor functions below.
ternary_conditionals = []
# "problematic_locations" is a list and not a set,
# to preserve ordering and generate a deterministic order of added clauses.
# We expect the maximum size of this list to be a small constant number,
# so the linear "in" operator is really not a concern.
problematic_locations = []
def find_ternary_conditionals(expression):
"""Visitor function that extracts all enclosed TernaryConditional expressions."""
if isinstance(expression, TernaryConditional):
ternary_conditionals.append(expression)
return expression
def extract_locations_visitor(expression):
"""Visitor function that extracts all the problematic locations."""
if isinstance(expression, (ContextField, ContextFieldExistence)):
# We get the location at the vertex, ignoring property fields.
# The vertex-level location is sufficient to work around the OrientDB bug,
# and we want as few location as possible overall.
location_at_vertex = expression.location.at_vertex()
if location_at_vertex not in problematic_locations:
problematic_locations.append(location_at_vertex)
return expression
# We aren't modifying the base predicate itself, just traversing it.
# The returned "updated" value must be the exact same as the original.
return_value = base_predicate.visit_and_update(find_ternary_conditionals)
if return_value is not base_predicate:
raise AssertionError(u'Read-only visitor function "find_ternary_conditionals" '
u'caused state to change: '
u'{} {}'.format(base_predicate, return_value))
for ternary in ternary_conditionals:
# We aren't modifying the ternary itself, just traversing it.
# The returned "updated" value must be the exact same as the original.
return_value = ternary.visit_and_update(extract_locations_visitor)
if return_value is not ternary:
raise AssertionError(u'Read-only visitor function "extract_locations_visitor" '
u'caused state to change: '
u'{} {}'.format(ternary, return_value))
tautologies = [
_create_tautological_expression_for_location(query_metadata_table, location)
for location in problematic_locations
]
if not tautologies:
return block
final_predicate = base_predicate
for tautology in tautologies:
final_predicate = BinaryComposition(u'&&', final_predicate, tautology)
return Filter(final_predicate)
|
For a given location, create a BinaryComposition that always evaluates to 'true'.
|
def _create_tautological_expression_for_location(query_metadata_table, location):
"""For a given location, create a BinaryComposition that always evaluates to 'true'."""
location_type = query_metadata_table.get_location_info(location).type
location_exists = BinaryComposition(
u'!=', ContextField(location, location_type), NullLiteral)
location_does_not_exist = BinaryComposition(
u'=', ContextField(location, location_type), NullLiteral)
return BinaryComposition(u'||', location_exists, location_does_not_exist)
|
Assert that the collection has exactly one element, then return that element.
|
def get_only_element_from_collection(one_element_collection):
"""Assert that the collection has exactly one element, then return that element."""
if len(one_element_collection) != 1:
raise AssertionError(u'Expected a collection with exactly one element, but got: {}'
.format(one_element_collection))
return funcy.first(one_element_collection)
|
Return the normalized field name for the given AST node.
|
def get_ast_field_name(ast):
"""Return the normalized field name for the given AST node."""
replacements = {
# We always rewrite the following field names into their proper underlying counterparts.
TYPENAME_META_FIELD_NAME: '@class'
}
base_field_name = ast.name.value
normalized_name = replacements.get(base_field_name, base_field_name)
return normalized_name
|
Return the type of the field in the given type, accounting for field name normalization.
|
def get_field_type_from_schema(schema_type, field_name):
"""Return the type of the field in the given type, accounting for field name normalization."""
if field_name == '@class':
return GraphQLString
else:
if field_name not in schema_type.fields:
raise AssertionError(u'Field {} passed validation but was not present on type '
u'{}'.format(field_name, schema_type))
# Validation guarantees that the field must exist in the schema.
return schema_type.fields[field_name].type
|
Return the type of the vertex within the specified vertex field name of the given type.
|
def get_vertex_field_type(current_schema_type, vertex_field_name):
"""Return the type of the vertex within the specified vertex field name of the given type."""
# According to the schema, the vertex field itself is of type GraphQLList, and this is
# what get_field_type_from_schema returns. We care about what the type *inside* the list is,
# i.e., the type on the other side of the edge (hence .of_type).
# Validation guarantees that the field must exist in the schema.
if not is_vertex_field_name(vertex_field_name):
raise AssertionError(u'Trying to load the vertex field type of a non-vertex field: '
u'{} {}'.format(current_schema_type, vertex_field_name))
raw_field_type = get_field_type_from_schema(current_schema_type, vertex_field_name)
if not isinstance(strip_non_null_from_type(raw_field_type), GraphQLList):
raise AssertionError(u'Found an edge whose schema type was not GraphQLList: '
u'{} {} {}'.format(current_schema_type, vertex_field_name,
raw_field_type))
return raw_field_type.of_type
|
Get the edge direction and name from a non-root vertex field name.
|
def get_edge_direction_and_name(vertex_field_name):
"""Get the edge direction and name from a non-root vertex field name."""
edge_direction = None
edge_name = None
if vertex_field_name.startswith(OUTBOUND_EDGE_FIELD_PREFIX):
edge_direction = OUTBOUND_EDGE_DIRECTION
edge_name = vertex_field_name[len(OUTBOUND_EDGE_FIELD_PREFIX):]
elif vertex_field_name.startswith(INBOUND_EDGE_FIELD_PREFIX):
edge_direction = INBOUND_EDGE_DIRECTION
edge_name = vertex_field_name[len(INBOUND_EDGE_FIELD_PREFIX):]
else:
raise AssertionError(u'Unreachable condition reached:', vertex_field_name)
validate_safe_string(edge_name)
return edge_direction, edge_name
|
Return True if the argument is a vertex field type, and False otherwise.
|
def is_vertex_field_type(graphql_type):
"""Return True if the argument is a vertex field type, and False otherwise."""
# This will need to change if we ever support complex embedded types or edge field types.
underlying_type = strip_non_null_from_type(graphql_type)
return isinstance(underlying_type, (GraphQLInterfaceType, GraphQLObjectType, GraphQLUnionType))
|
Ensure the value is a string, and return it as unicode.
|
def ensure_unicode_string(value):
"""Ensure the value is a string, and return it as unicode."""
if not isinstance(value, six.string_types):
raise TypeError(u'Expected string value, got: {}'.format(value))
return six.text_type(value)
|
Return dict of name -> object pairs from a list of objects with unique names.
Args:
object_list: list of objects, each X of which has a unique name accessible as X.name.value
Returns:
dict, { X.name.value: X for x in object_list }
If the list is empty or None, returns an empty dict.
|
def get_uniquely_named_objects_by_name(object_list):
"""Return dict of name -> object pairs from a list of objects with unique names.
Args:
object_list: list of objects, each X of which has a unique name accessible as X.name.value
Returns:
dict, { X.name.value: X for x in object_list }
If the list is empty or None, returns an empty dict.
"""
if not object_list:
return dict()
result = dict()
for obj in object_list:
name = obj.name.value
if name in result:
raise GraphQLCompilationError(u'Found duplicate object key: '
u'{} {}'.format(name, object_list))
result[name] = obj
return result
|
Ensure the provided string does not have illegal characters.
|
def validate_safe_string(value):
"""Ensure the provided string does not have illegal characters."""
# The following strings are explicitly allowed, despite having otherwise-illegal chars.
legal_strings_with_special_chars = frozenset({'@rid', '@class', '@this', '%'})
if not isinstance(value, six.string_types):
raise TypeError(u'Expected string value, got: {} {}'.format(
type(value).__name__, value))
if not value:
raise GraphQLCompilationError(u'Empty strings are not allowed!')
if value[0] in string.digits:
raise GraphQLCompilationError(u'String values cannot start with a digit: {}'.format(value))
if not set(value).issubset(VARIABLE_ALLOWED_CHARS) and \
value not in legal_strings_with_special_chars:
raise GraphQLCompilationError(u'Encountered illegal characters in string: {}'.format(value))
|
Ensure the provided edge direction is either "in" or "out".
|
def validate_edge_direction(edge_direction):
"""Ensure the provided edge direction is either "in" or "out"."""
if not isinstance(edge_direction, six.string_types):
raise TypeError(u'Expected string edge_direction, got: {} {}'.format(
type(edge_direction), edge_direction))
if edge_direction not in ALLOWED_EDGE_DIRECTIONS:
raise ValueError(u'Unrecognized edge direction: {}'.format(edge_direction))
|
Validate that a Location object is safe for marking, and not at a field.
|
def validate_marked_location(location):
"""Validate that a Location object is safe for marking, and not at a field."""
if not isinstance(location, (Location, FoldScopeLocation)):
raise TypeError(u'Expected Location or FoldScopeLocation location, got: {} {}'.format(
type(location).__name__, location))
if location.field is not None:
raise GraphQLCompilationError(u'Cannot mark location at a field: {}'.format(location))
|
Invert a dict. A dict is invertible if values are unique and hashable.
|
def invert_dict(invertible_dict):
"""Invert a dict. A dict is invertible if values are unique and hashable."""
inverted = {}
for k, v in six.iteritems(invertible_dict):
if not isinstance(v, Hashable):
raise TypeError(u'Expected an invertible dict, but value at key {} has type {}'.format(
k, type(v).__name__))
if v in inverted:
raise TypeError(u'Expected an invertible dict, but keys '
u'{} and {} map to the same value'.format(
inverted[v], k))
inverted[v] = k
return inverted
|
Read package file as text to get name and version
|
def read_file(filename):
"""Read package file as text to get name and version"""
# intentionally *not* adding an encoding option to open
# see here:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, 'graphql_compiler', filename), 'r') as f:
return f.read()
|
Only define version in one place
|
def find_version():
"""Only define version in one place"""
version_file = read_file('__init__.py')
version_match = re.search(r'^__version__ = ["\']([^"\']*)["\']',
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError('Unable to find version string.')
|
Only define name in one place
|
def find_name():
"""Only define name in one place"""
name_file = read_file('__init__.py')
name_match = re.search(r'^__package_name__ = ["\']([^"\']*)["\']',
name_file, re.M)
if name_match:
return name_match.group(1)
raise RuntimeError('Unable to find name string.')
|
Lower CoerceType blocks into Filter blocks within Recurse steps.
|
def workaround_type_coercions_in_recursions(match_query):
"""Lower CoerceType blocks into Filter blocks within Recurse steps."""
# This step is required to work around an OrientDB bug that causes queries with both
# "while:" and "class:" in the same query location to fail to parse correctly.
#
# This bug is reported upstream: https://github.com/orientechnologies/orientdb/issues/8129
#
# Instead of "class:", we use "INSTANCEOF" in the "where:" clause to get correct behavior.
# However, we don't want to switch all coercions to this format, since the "class:" clause
# provides valuable info to the MATCH query scheduler about how to schedule efficiently.
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_match_step = match_step
has_coerce_type = match_step.coerce_type_block is not None
has_recurse_root = isinstance(match_step.root_block, Recurse)
if has_coerce_type and has_recurse_root:
new_where_block = convert_coerce_type_and_add_to_where_block(
match_step.coerce_type_block, match_step.where_block)
new_match_step = match_step._replace(coerce_type_block=None,
where_block=new_where_block)
new_traversal.append(new_match_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals)
|
Read a GraphQL query from standard input, and output it pretty-printed to standard output.
|
def main():
"""Read a GraphQL query from standard input, and output it pretty-printed to standard output."""
query = ' '.join(sys.stdin.readlines())
sys.stdout.write(pretty_print_graphql(query))
|
Sanitize and represent a string argument in Gremlin.
|
def _safe_gremlin_string(value):
"""Sanitize and represent a string argument in Gremlin."""
if not isinstance(value, six.string_types):
if isinstance(value, bytes): # should only happen in py3
value = value.decode('utf-8')
else:
raise GraphQLInvalidArgumentError(u'Attempting to convert a non-string into a string: '
u'{}'.format(value))
# Using JSON encoding means that all unicode literals and special chars
# (e.g. newlines and backslashes) are replaced by appropriate escape sequences.
# However, the quoted result is wrapped in double quotes, and $ signs are not escaped,
# so that would allow arbitrary code execution in Gremlin.
# We will therefore turn the double-quoted string into a single-quoted one to avoid this risk.
escaped_and_quoted = json.dumps(value)
# Double-quoted string literals in Gremlin/Groovy allow
# arbitrary code execution via string interpolation and closures.
# To avoid this, we perform the following steps:
# - we strip the wrapping double quotes;
# - we un-escape any double-quotes in the string, by replacing \" with ";
# - we escape any single-quotes in the string, by replacing ' with \';
# - finally, we wrap the string in single quotes.
# http://www.groovy-lang.org/syntax.html#_double_quoted_string
if not escaped_and_quoted[0] == escaped_and_quoted[-1] == '"':
raise AssertionError(u'Unreachable state reached: {} {}'.format(value, escaped_and_quoted))
no_quotes = escaped_and_quoted[1:-1]
re_escaped = no_quotes.replace('\\"', '"').replace('\'', '\\\'')
final_escaped_value = '\'' + re_escaped + '\''
return final_escaped_value
|
Represent the list of "inner_type" objects in Gremlin form.
|
def _safe_gremlin_list(inner_type, argument_value):
"""Represent the list of "inner_type" objects in Gremlin form."""
if not isinstance(argument_value, list):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-list as a list: '
u'{}'.format(argument_value))
stripped_type = strip_non_null_from_type(inner_type)
components = (
_safe_gremlin_argument(stripped_type, x)
for x in argument_value
)
return u'[' + u','.join(components) + u']'
|
Return a Gremlin string representing the given argument value.
|
def _safe_gremlin_argument(expected_type, argument_value):
"""Return a Gremlin string representing the given argument value."""
if GraphQLString.is_same_type(expected_type):
return _safe_gremlin_string(argument_value)
elif GraphQLID.is_same_type(expected_type):
# IDs can be strings or numbers, but the GraphQL library coerces them to strings.
# We will follow suit and treat them as strings.
if not isinstance(argument_value, six.string_types):
if isinstance(argument_value, bytes): # should only happen in py3
argument_value = argument_value.decode('utf-8')
else:
argument_value = six.text_type(argument_value)
return _safe_gremlin_string(argument_value)
elif GraphQLFloat.is_same_type(expected_type):
return represent_float_as_str(argument_value)
elif GraphQLInt.is_same_type(expected_type):
# Special case: in Python, isinstance(True, int) returns True.
# Safeguard against this with an explicit check against bool type.
if isinstance(argument_value, bool):
raise GraphQLInvalidArgumentError(u'Attempting to represent a non-int as an int: '
u'{}'.format(argument_value))
return type_check_and_str(int, argument_value)
elif GraphQLBoolean.is_same_type(expected_type):
return type_check_and_str(bool, argument_value)
elif GraphQLDecimal.is_same_type(expected_type):
return _safe_gremlin_decimal(argument_value)
elif GraphQLDate.is_same_type(expected_type):
return _safe_gremlin_date_and_datetime(expected_type, (datetime.date,), argument_value)
elif GraphQLDateTime.is_same_type(expected_type):
return _safe_gremlin_date_and_datetime(expected_type,
(datetime.datetime, arrow.Arrow), argument_value)
elif isinstance(expected_type, GraphQLList):
return _safe_gremlin_list(expected_type.of_type, argument_value)
else:
raise AssertionError(u'Could not safely represent the requested GraphQL type: '
u'{} {}'.format(expected_type, argument_value))
|
Insert the arguments into the compiled Gremlin query to form a complete query.
The GraphQL compiler attempts to use single-quoted string literals ('abc') in Gremlin output.
Double-quoted strings allow inline interpolation with the $ symbol, see here for details:
http://www.groovy-lang.org/syntax.html#all-strings
If the compiler needs to emit a literal '$' character as part of the Gremlin query,
it must be doubled ('$$') to avoid being interpreted as a query parameter.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a Gremlin query with inserted argument data
|
def insert_arguments_into_gremlin_query(compilation_result, arguments):
"""Insert the arguments into the compiled Gremlin query to form a complete query.
The GraphQL compiler attempts to use single-quoted string literals ('abc') in Gremlin output.
Double-quoted strings allow inline interpolation with the $ symbol, see here for details:
http://www.groovy-lang.org/syntax.html#all-strings
If the compiler needs to emit a literal '$' character as part of the Gremlin query,
it must be doubled ('$$') to avoid being interpreted as a query parameter.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a Gremlin query with inserted argument data
"""
if compilation_result.language != GREMLIN_LANGUAGE:
raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result))
base_query = compilation_result.query
argument_types = compilation_result.input_metadata
# The arguments are assumed to have already been validated against the query.
sanitized_arguments = {
key: _safe_gremlin_argument(argument_types[key], value)
for key, value in six.iteritems(arguments)
}
return Template(base_query).substitute(sanitized_arguments)
|
Get the location name from a location that is expected to point to a vertex.
|
def _get_vertex_location_name(location):
"""Get the location name from a location that is expected to point to a vertex."""
mark_name, field_name = location.get_location_name()
if field_name is not None:
raise AssertionError(u'Location unexpectedly pointed to a field: {}'.format(location))
return mark_name
|
Transform the very first MATCH step into a MATCH query string.
|
def _first_step_to_match(match_step):
"""Transform the very first MATCH step into a MATCH query string."""
parts = []
if match_step.root_block is not None:
if not isinstance(match_step.root_block, QueryRoot):
raise AssertionError(u'Expected None or QueryRoot root block, received: '
u'{} {}'.format(match_step.root_block, match_step))
match_step.root_block.validate()
start_class = get_only_element_from_collection(match_step.root_block.start_class)
parts.append(u'class: %s' % (start_class,))
# MATCH steps with a QueryRoot root block shouldn't have a 'coerce_type_block'.
if match_step.coerce_type_block is not None:
raise AssertionError(u'Invalid MATCH step: {}'.format(match_step))
if match_step.where_block:
match_step.where_block.validate()
parts.append(u'where: (%s)' % (match_step.where_block.predicate.to_match(),))
if match_step.as_block is None:
raise AssertionError(u'Found a MATCH step without a corresponding Location. '
u'This should never happen: {}'.format(match_step))
else:
match_step.as_block.validate()
parts.append(u'as: %s' % (_get_vertex_location_name(match_step.as_block.location),))
return u'{{ %s }}' % (u', '.join(parts),)
|
Transform any subsequent (non-first) MATCH step into a MATCH query string.
|
def _subsequent_step_to_match(match_step):
"""Transform any subsequent (non-first) MATCH step into a MATCH query string."""
if not isinstance(match_step.root_block, (Traverse, Recurse)):
raise AssertionError(u'Expected Traverse root block, received: '
u'{} {}'.format(match_step.root_block, match_step))
is_recursing = isinstance(match_step.root_block, Recurse)
match_step.root_block.validate()
traversal_command = u'.%s(\'%s\')' % (match_step.root_block.direction,
match_step.root_block.edge_name)
parts = []
if match_step.coerce_type_block:
coerce_type_set = match_step.coerce_type_block.target_class
if len(coerce_type_set) != 1:
raise AssertionError(u'Found MATCH type coercion block with more than one target class:'
u' {} {}'.format(coerce_type_set, match_step))
coerce_type_target = list(coerce_type_set)[0]
parts.append(u'class: %s' % (coerce_type_target,))
if is_recursing:
# In MATCH, "$depth < 1" means "include the source vertex and its immediate neighbors."
# Yes, the "<" is intentional -- it's not supposed to be a "<=".
parts.append(u'while: ($depth < %d)' % (match_step.root_block.depth,))
if match_step.where_block:
match_step.where_block.validate()
parts.append(u'where: (%s)' % (match_step.where_block.predicate.to_match(),))
if not is_recursing and match_step.root_block.optional:
parts.append(u'optional: true')
if match_step.as_block:
match_step.as_block.validate()
parts.append(u'as: %s' % (_get_vertex_location_name(match_step.as_block.location),))
return u'%s {{ %s }}' % (traversal_command, u', '.join(parts))
|
Emit MATCH query code for an entire MATCH traversal sequence.
|
def _represent_match_traversal(match_traversal):
"""Emit MATCH query code for an entire MATCH traversal sequence."""
output = []
output.append(_first_step_to_match(match_traversal[0]))
for step in match_traversal[1:]:
output.append(_subsequent_step_to_match(step))
return u''.join(output)
|
Emit a LET clause corresponding to the IR blocks for a @fold scope.
|
def _represent_fold(fold_location, fold_ir_blocks):
"""Emit a LET clause corresponding to the IR blocks for a @fold scope."""
start_let_template = u'$%(mark_name)s = %(base_location)s'
traverse_edge_template = u'.%(direction)s("%(edge_name)s")'
base_template = start_let_template + traverse_edge_template
edge_direction, edge_name = fold_location.get_first_folded_edge()
mark_name, _ = fold_location.get_location_name()
base_location_name, _ = fold_location.base_location.get_location_name()
validate_safe_string(mark_name)
validate_safe_string(base_location_name)
validate_safe_string(edge_direction)
validate_safe_string(edge_name)
template_data = {
'mark_name': mark_name,
'base_location': base_location_name,
'direction': edge_direction,
'edge_name': edge_name,
}
final_string = base_template % template_data
for block in fold_ir_blocks:
if isinstance(block, Filter):
final_string += u'[' + block.predicate.to_match() + u']'
elif isinstance(block, Traverse):
template_data = {
'direction': block.direction,
'edge_name': block.edge_name,
}
final_string += traverse_edge_template % template_data
elif isinstance(block, MarkLocation):
# MarkLocation blocks inside a fold do not result in any MATCH output.
pass
else:
raise AssertionError(u'Found an unexpected IR block in the folded IR blocks: '
u'{} {} {}'.format(type(block), block, fold_ir_blocks))
# Workaround for OrientDB's inconsistent return type when filtering a list.
# https://github.com/orientechnologies/orientdb/issues/7811
final_string += '.asList()'
return final_string
|
Transform a ConstructResult block into a MATCH query string.
|
def _construct_output_to_match(output_block):
"""Transform a ConstructResult block into a MATCH query string."""
output_block.validate()
selections = (
u'%s AS `%s`' % (output_block.fields[key].to_match(), key)
for key in sorted(output_block.fields.keys()) # Sort keys for deterministic output order.
)
return u'SELECT %s FROM' % (u', '.join(selections),)
|
Transform a Filter block into a MATCH query string.
|
def _construct_where_to_match(where_block):
"""Transform a Filter block into a MATCH query string."""
if where_block.predicate == TrueLiteral:
raise AssertionError(u'Received WHERE block with TrueLiteral predicate: {}'
.format(where_block))
return u'WHERE ' + where_block.predicate.to_match()
|
Return a MATCH query string from a list of IR blocks.
|
def emit_code_from_single_match_query(match_query):
"""Return a MATCH query string from a list of IR blocks."""
query_data = deque([u'MATCH '])
if not match_query.match_traversals:
raise AssertionError(u'Unexpected falsy value for match_query.match_traversals received: '
u'{} {}'.format(match_query.match_traversals, match_query))
# Represent and add the MATCH traversal steps.
match_traversal_data = [
_represent_match_traversal(x)
for x in match_query.match_traversals
]
query_data.append(match_traversal_data[0])
for traversal_data in match_traversal_data[1:]:
query_data.append(u', ')
query_data.append(traversal_data)
query_data.appendleft(u' (') # Prepare to wrap the MATCH in a SELECT.
query_data.append(u'RETURN $matches)') # Finish the MATCH query and the wrapping ().
# Represent and add the LET clauses for any @fold scopes that might be part of the query.
# Sort for deterministic order of clauses.
fold_data = sorted([
_represent_fold(fold_location, fold_ir_blocks)
for fold_location, fold_ir_blocks in six.iteritems(match_query.folds)
])
if fold_data:
query_data.append(u' LET ')
query_data.append(fold_data[0])
for fold_clause in fold_data[1:]:
query_data.append(u', ')
query_data.append(fold_clause)
# Represent and add the SELECT clauses with the proper output data.
query_data.appendleft(_construct_output_to_match(match_query.output_block))
# Represent and add the WHERE clause with the proper filters.
if match_query.where_block is not None:
query_data.append(_construct_where_to_match(match_query.where_block))
return u' '.join(query_data)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.