idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
226,800
def _realize ( self , master , element ) : data = data_xmlnode_to_dict ( element , self . translator ) cname = data [ 'class' ] uniqueid = data [ 'id' ] if cname not in CLASS_MAP : self . _import_class ( cname ) if cname in CLASS_MAP : self . _pre_process_data ( data ) parent = CLASS_MAP [ cname ] . builder . factory ( self , data ) widget = parent . realize ( master ) self . objects [ uniqueid ] = parent xpath = "./child" children = element . findall ( xpath ) for child in children : child_xml = child . find ( './object' ) child = self . _realize ( parent , child_xml ) parent . add_child ( child ) parent . configure ( ) parent . layout ( ) return parent else : raise Exception ( 'Class "{0}" not mapped' . format ( cname ) )
Builds a widget from xml element using master as parent .
209
12
226,801
def connect_callbacks ( self , callbacks_bag ) : notconnected = [ ] for wname , builderobj in self . objects . items ( ) : missing = builderobj . connect_commands ( callbacks_bag ) if missing is not None : notconnected . extend ( missing ) missing = builderobj . connect_bindings ( callbacks_bag ) if missing is not None : notconnected . extend ( missing ) if notconnected : notconnected = list ( set ( notconnected ) ) msg = 'Missing callbacks for commands: {}' . format ( notconnected ) logger . warning ( msg ) return notconnected else : return None
Connect callbacks specified in callbacks_bag with callbacks defined in the ui definition . Return a list with the name of the callbacks not connected .
135
32
226,802
def _start_selecting ( self , event ) : self . _selecting = True canvas = self . _canvas x = canvas . canvasx ( event . x ) y = canvas . canvasy ( event . y ) self . _sstart = ( x , y ) if not self . _sobject : self . _sobject = canvas . create_rectangle ( self . _sstart [ 0 ] , self . _sstart [ 1 ] , x , y , dash = ( 3 , 5 ) , outline = '#0000ff' ) canvas . itemconfigure ( self . _sobject , state = tk . NORMAL )
Comienza con el proceso de seleccion .
141
14
226,803
def _keep_selecting ( self , event ) : canvas = self . _canvas x = canvas . canvasx ( event . x ) y = canvas . canvasy ( event . y ) canvas . coords ( self . _sobject , self . _sstart [ 0 ] , self . _sstart [ 1 ] , x , y )
Continua con el proceso de seleccion . Crea o redimensiona el cuadro de seleccion de acuerdo con la posicion del raton .
75
41
226,804
def _finish_selecting ( self , event ) : self . _selecting = False canvas = self . _canvas x = canvas . canvasx ( event . x ) y = canvas . canvasy ( event . y ) canvas . coords ( self . _sobject , - 1 , - 1 , - 1 , - 1 ) canvas . itemconfigure ( self . _sobject , state = tk . HIDDEN ) sel_region = self . _sstart [ 0 ] , self . _sstart [ 1 ] , x , y canvas . region_selected = sel_region canvas . event_generate ( '<<RegionSelected>>' )
Finaliza la seleccion . Marca como seleccionados todos los objetos que se encuentran dentro del recuadro de seleccion .
146
42
226,805
def matrix_coords ( rows , cols , rowh , colw , ox = 0 , oy = 0 ) : for i , f , c in rowmajor ( rows , cols ) : x = ox + c * colw y = oy + f * rowh x1 = x + colw y1 = y + rowh yield ( i , x , y , x1 , y1 )
Generate coords for a matrix of rects
86
10
226,806
def get ( self ) : value = { } for ( elementname , elementvar ) in self . _elementvars . items ( ) : value [ elementname ] = elementvar . get ( ) return value
Return a dictionary that represents the Tcl array
44
9
226,807
def yview ( self , * args ) : self . after_idle ( self . __updateWnds ) ttk . Treeview . yview ( self , * args )
Update inplace widgets position when doing vertical scroll
39
9
226,808
def xview ( self , * args ) : self . after_idle ( self . __updateWnds ) ttk . Treeview . xview ( self , * args )
Update inplace widgets position when doing horizontal scroll
39
9
226,809
def __check_focus ( self , event ) : #print('Event:', event.type, event.x, event.y) changed = False if not self . _curfocus : changed = True elif self . _curfocus != self . focus ( ) : self . __clear_inplace_widgets ( ) changed = True newfocus = self . focus ( ) if changed : if newfocus : #print('Focus changed to:', newfocus) self . _curfocus = newfocus self . __focus ( newfocus ) self . __updateWnds ( )
Checks if the focus has changed
123
7
226,810
def __focus ( self , item ) : cols = self . __get_display_columns ( ) for col in cols : self . __event_info = ( col , item ) self . event_generate ( '<<TreeviewInplaceEdit>>' ) if col in self . _inplace_widgets : w = self . _inplace_widgets [ col ] w . bind ( '<Key-Tab>' , lambda e : w . tk_focusNext ( ) . focus_set ( ) ) w . bind ( '<Shift-Key-Tab>' , lambda e : w . tk_focusPrev ( ) . focus_set ( ) )
Called when focus item has changed
147
7
226,811
def __clear_inplace_widgets ( self ) : cols = self . __get_display_columns ( ) #print('Clear:', cols) for c in cols : if c in self . _inplace_widgets : widget = self . _inplace_widgets [ c ] widget . place_forget ( ) self . _inplace_widgets_show . pop ( c , None )
Remove all inplace edit widgets .
92
7
226,812
def run ( self ) : install . run ( self ) # # Remove old pygubu.py from scripts path if exists spath = os . path . join ( self . install_scripts , 'pygubu' ) for ext in ( '.py' , '.pyw' ) : filename = spath + ext if os . path . exists ( filename ) : os . remove ( filename ) # # Remove old pygubu-designer.bat if platform . system ( ) == 'Windows' : spath = os . path . join ( self . install_scripts , 'pygubu-designer.bat' ) if os . path . exists ( spath ) : os . remove ( spath )
Run parent install and then save the install dir in the script .
154
13
226,813
def hide_all ( self ) : self . current = None for _v , ( label , widget ) in self . _propbag . items ( ) : label . grid_remove ( ) widget . grid_remove ( )
Hide all properties from property editor .
47
7
226,814
def _get_init_args ( self ) : args = { } for rop in self . ro_properties : if rop in self . properties : args [ rop ] = self . properties [ rop ] return args
Creates dict with properties marked as readonly
48
9
226,815
def _calculate_menu_wh ( self ) : w = iw = 50 h = ih = 0 # menu.index returns None if there are no choices index = self . _menu . index ( tk . END ) index = index if index is not None else 0 count = index + 1 # First calculate using the font paramters of root menu: font = self . _menu . cget ( 'font' ) font = self . _get_font ( font ) for i in range ( 0 , count ) : mtype = self . _menu . type ( i ) if mtype == 'tearoff' : continue label = 'default' ifont = 'TkMenuFont' if mtype != 'separator' : label = self . _menu . entrycget ( i , 'label' ) ifont = self . _menu . entrycget ( i , 'font' ) wpx = font . measure ( label ) hpx = font . metrics ( 'linespace' ) w += wpx if hpx > h : h = hpx * 2 # Calculate using font configured for each subitem ifont = self . _get_font ( ifont ) wpx = ifont . measure ( label ) hpx = ifont . metrics ( 'linespace' ) iw += wpx if hpx > ih : ih = hpx * 2 # Then compare 2 sizes and use the greatest w = max ( w , iw , 100 ) h = max ( h , ih , 25 ) self . _cwidth = w + int ( w * 0.25 ) self . _cheight = h + int ( h * 0.25 )
Calculate menu widht and height .
358
9
226,816
def _over_resizer ( self , x , y ) : over_resizer = False c = self . canvas ids = c . find_overlapping ( x , y , x , y ) if ids : o = ids [ 0 ] tags = c . gettags ( o ) if 'resizer' in tags : over_resizer = True return over_resizer
Returns True if mouse is over a resizer
83
9
226,817
def resize_preview ( self , dw , dh ) : # identify preview if self . _objects_moving : id_ = self . _objects_moving [ 0 ] tags = self . canvas . gettags ( id_ ) for tag in tags : if tag . startswith ( 'preview_' ) : _ , ident = tag . split ( 'preview_' ) preview = self . previews [ ident ] preview . resize_by ( dw , dh ) self . move_previews ( ) break self . _update_cregion ( )
Resizes preview that is currently dragged
117
7
226,818
def move_previews ( self ) : # calculate new positions min_y = self . _calc_preview_ypos ( ) for idx , ( key , p ) in enumerate ( self . previews . items ( ) ) : new_dy = min_y [ idx ] - p . y self . previews [ key ] . move_by ( 0 , new_dy ) self . _update_cregion ( ) self . show_selected ( self . _sel_id , self . _sel_widget )
Move previews after a resize event
113
6
226,819
def _calc_preview_ypos ( self ) : y = 10 min_y = [ y ] for k , p in self . previews . items ( ) : y += p . height ( ) + self . padding min_y . append ( y ) return min_y
Calculates the previews positions on canvas
60
8
226,820
def _get_slot ( self ) : x = y = 10 for k , p in self . previews . items ( ) : y += p . height ( ) + self . padding return x , y
Returns the next coordinates for a preview
42
7
226,821
def clear_cache ( cls ) : #Prevent tkinter errors on python 2 ?? for key in cls . _cached : cls . _cached [ key ] = None cls . _cached = { }
Call this before closing tk root
50
7
226,822
def register ( cls , key , filename ) : if key in cls . _stock : logger . info ( 'Warning, replacing resource ' + str ( key ) ) cls . _stock [ key ] = { 'type' : 'custom' , 'filename' : filename } logger . info ( '%s registered as %s' % ( filename , key ) )
Register a image file using key
79
6
226,823
def register_from_data ( cls , key , format , data ) : if key in cls . _stock : logger . info ( 'Warning, replacing resource ' + str ( key ) ) cls . _stock [ key ] = { 'type' : 'data' , 'data' : data , 'format' : format } logger . info ( '%s registered as %s' % ( 'data' , key ) )
Register a image data using key
93
6
226,824
def register_created ( cls , key , image ) : if key in cls . _stock : logger . info ( 'Warning, replacing resource ' + str ( key ) ) cls . _stock [ key ] = { 'type' : 'created' , 'image' : image } logger . info ( '%s registered as %s' % ( 'data' , key ) )
Register an already created image using key
83
7
226,825
def _load_image ( cls , rkey ) : v = cls . _stock [ rkey ] img = None itype = v [ 'type' ] if itype in ( 'stock' , 'data' ) : img = tk . PhotoImage ( format = v [ 'format' ] , data = v [ 'data' ] ) elif itype == 'created' : img = v [ 'image' ] else : img = tk . PhotoImage ( file = v [ 'filename' ] ) cls . _cached [ rkey ] = img logger . info ( 'Loaded resource %s.' % rkey ) return img
Load image from file or return the cached instance .
141
10
226,826
def get ( cls , rkey ) : if rkey in cls . _cached : logger . info ( 'Resource %s is in cache.' % rkey ) return cls . _cached [ rkey ] if rkey in cls . _stock : img = cls . _load_image ( rkey ) return img else : raise StockImageException ( 'StockImage: %s not registered.' % rkey )
Get image previously registered with key rkey . If key not exist raise StockImageException
93
17
226,827
def config_treeview ( self ) : tree = self . treeview tree . bind ( '<Double-1>' , self . on_treeview_double_click ) tree . bind ( '<<TreeviewSelect>>' , self . on_treeview_select , add = '+' )
Sets treeview columns and other params
65
8
226,828
def get_toplevel_parent ( self , treeitem ) : tv = self . treeview toplevel_items = tv . get_children ( ) item = treeitem while not ( item in toplevel_items ) : item = tv . parent ( item ) return item
Returns the top level parent for treeitem .
60
9
226,829
def draw_widget ( self , item ) : if item : self . filter_remove ( remember = True ) selected_id = self . treedata [ item ] [ 'id' ] item = self . get_toplevel_parent ( item ) widget_id = self . treedata [ item ] [ 'id' ] wclass = self . treedata [ item ] [ 'class' ] xmlnode = self . tree_node_to_xml ( '' , item ) self . previewer . draw ( item , widget_id , xmlnode , wclass ) self . previewer . show_selected ( item , selected_id ) self . filter_restore ( )
Create a preview of the selected treeview item
146
9
226,830
def on_treeview_delete_selection ( self , event = None ) : tv = self . treeview selection = tv . selection ( ) # Need to remove filter self . filter_remove ( remember = True ) toplevel_items = tv . get_children ( ) parents_to_redraw = set ( ) for item in selection : try : parent = '' if item not in toplevel_items : parent = self . get_toplevel_parent ( item ) else : self . previewer . delete ( item ) del self . treedata [ item ] tv . delete ( item ) self . app . set_changed ( ) if parent : self . _update_max_grid_rc ( parent ) parents_to_redraw . add ( parent ) self . widget_editor . hide_all ( ) except tk . TclError : # Selection of parent and child items ?? # TODO: notify something here pass # redraw widgets for item in parents_to_redraw : self . draw_widget ( item ) # restore filter self . filter_restore ( )
Removes selected items from treeview
232
7
226,831
def tree_to_xml ( self ) : # Need to remove filter or hidden items will not be saved. self . filter_remove ( remember = True ) tree = self . treeview root = ET . Element ( 'interface' ) items = tree . get_children ( ) for item in items : node = self . tree_node_to_xml ( '' , item ) root . append ( node ) # restore filter self . filter_restore ( ) return ET . ElementTree ( root )
Traverses treeview and generates a ElementTree object
104
11
226,832
def tree_node_to_xml ( self , parent , item ) : tree = self . treeview data = self . treedata [ item ] node = data . to_xml_node ( ) children = tree . get_children ( item ) for child in children : cnode = ET . Element ( 'child' ) cwidget = self . tree_node_to_xml ( item , child ) cnode . append ( cwidget ) node . append ( cnode ) return node
Converts a treeview item and children to xml nodes
103
11
226,833
def _insert_item ( self , root , data , from_file = False ) : tree = self . treeview treelabel = data . get_id ( ) row = col = '' if root != '' and 'layout' in data : row = data . get_layout_property ( 'row' ) col = data . get_layout_property ( 'column' ) # fix row position when using copy and paste # If collision, increase by 1 row_count = self . get_max_row ( root ) if not from_file and ( row_count > int ( row ) and int ( col ) == 0 ) : row = str ( row_count + 1 ) data . set_layout_property ( 'row' , row ) image = '' try : image = StockImage . get ( '16x16-tk.default' ) except StockImageException : # TODO: notify something here pass try : image = StockImage . get ( '16x16-{0}' . format ( data . get_class ( ) ) ) except StockImageException : # TODO: notify something here pass values = ( data . get_class ( ) , row , col ) item = tree . insert ( root , 'end' , text = treelabel , values = values , image = image ) data . attach ( self ) self . treedata [ item ] = data # Update grid r/c data self . _update_max_grid_rc ( root , from_file = True ) self . app . set_changed ( ) return item
Insert a item on the treeview and fills columns from data
330
12
226,834
def copy_to_clipboard ( self ) : tree = self . treeview # get the selected item: selection = tree . selection ( ) if selection : self . filter_remove ( remember = True ) root = ET . Element ( 'selection' ) for item in selection : node = self . tree_node_to_xml ( '' , item ) root . append ( node ) # python2 issue try : text = ET . tostring ( root , encoding = 'unicode' ) except LookupError : text = ET . tostring ( root , encoding = 'UTF-8' ) tree . clipboard_clear ( ) tree . clipboard_append ( text ) self . filter_restore ( )
Copies selected items to clipboard .
147
7
226,835
def add_widget ( self , wclass ) : tree = self . treeview # get the selected item: selected_item = '' tsel = tree . selection ( ) if tsel : selected_item = tsel [ 0 ] # Need to remove filter if set self . filter_remove ( ) root = selected_item # check if the widget can be added at selected point if not self . _validate_add ( root , wclass , False ) : # if not try to add at item parent level parent = tree . parent ( root ) if parent != root : if self . _validate_add ( parent , wclass ) : root = parent else : return else : return # root item should be set at this point # setup properties widget_id = self . get_unique_id ( wclass ) data = WidgetDescr ( wclass , widget_id ) # setup default values for properties for pname in builder . CLASS_MAP [ wclass ] . builder . properties : pdescription = { } if pname in properties . WIDGET_PROPERTIES : pdescription = properties . WIDGET_PROPERTIES [ pname ] if wclass in pdescription : pdescription = dict ( pdescription , * * pdescription [ wclass ] ) default_value = str ( pdescription . get ( 'default' , '' ) ) data . set_property ( pname , default_value ) # default text for widgets with text prop: if pname in ( 'text' , 'label' ) : data . set_property ( pname , widget_id ) # # default grid properties # # is_container = builder.CLASS_MAP[wclass].builder.container for prop_name in properties . GRID_PROPERTIES : pdescription = properties . LAYOUT_OPTIONS [ prop_name ] if wclass in pdescription : pdescription = dict ( pdescription , * * pdescription [ wclass ] ) default_value = str ( pdescription . get ( 'default' , '' ) ) data . set_layout_property ( prop_name , default_value ) rownum = '0' if root : rownum = str ( self . get_max_row ( root ) + 1 ) data . set_layout_property ( 'row' , rownum ) data . set_layout_property ( 'column' , '0' ) item = self . _insert_item ( root , data ) # Do redraw self . draw_widget ( item ) # Select and show the item created tree . after_idle ( lambda : tree . selection_set ( item ) ) tree . after_idle ( lambda : tree . focus ( item ) ) tree . after_idle ( lambda : tree . see ( item ) )
Adds a new item to the treeview .
593
9
226,836
def load_file ( self , filename ) : self . counter . clear ( ) # python2 issues try : etree = ET . parse ( filename ) except ET . ParseError : parser = ET . XMLParser ( encoding = 'UTF-8' ) etree = ET . parse ( filename , parser ) eroot = etree . getroot ( ) self . remove_all ( ) self . previewer . remove_all ( ) self . widget_editor . hide_all ( ) self . previewer . resource_paths . append ( os . path . dirname ( filename ) ) for element in eroot : self . populate_tree ( '' , eroot , element , from_file = True ) children = self . treeview . get_children ( '' ) for child in children : self . draw_widget ( child ) self . previewer . show_selected ( None , None )
Load file into treeview
189
5
226,837
def populate_tree ( self , master , parent , element , from_file = False ) : data = WidgetDescr ( None , None ) data . from_xml_node ( element ) cname = data . get_class ( ) uniqueid = self . get_unique_id ( cname , data . get_id ( ) ) data . set_property ( 'id' , uniqueid ) if cname in builder . CLASS_MAP : pwidget = self . _insert_item ( master , data , from_file = from_file ) xpath = "./child" children = element . findall ( xpath ) for child in children : child_object = child . find ( './object' ) cwidget = self . populate_tree ( pwidget , child , child_object , from_file = from_file ) return pwidget else : raise Exception ( 'Class "{0}" not mapped' . format ( cname ) )
Reads xml nodes and populates tree item
202
9
226,838
def update_event ( self , hint , obj ) : tree = self . treeview data = obj item = self . get_item_by_data ( obj ) if item : if data . get_id ( ) != tree . item ( item , 'text' ) : tree . item ( item , text = data . get_id ( ) ) # if tree.parent(item) != '' and 'layout' in data: if tree . parent ( item ) != '' : row = data . get_layout_property ( 'row' ) col = data . get_layout_property ( 'column' ) values = tree . item ( item , 'values' ) if ( row != values [ 1 ] or col != values [ 2 ] ) : values = ( data . get_class ( ) , row , col ) tree . item ( item , values = values ) self . draw_widget ( item ) self . app . set_changed ( )
Updates tree colums when itemdata is changed .
200
11
226,839
def _reatach ( self ) : for item , p , idx in self . _detached : # The item may have been deleted. if self . treeview . exists ( item ) and self . treeview . exists ( p ) : self . treeview . move ( item , p , idx ) self . _detached = [ ]
Reinsert the hidden items .
73
6
226,840
def _detach ( self , item ) : to_detach = [ ] children_det = [ ] children_match = False match_found = False value = self . filtervar . get ( ) txt = self . treeview . item ( item , 'text' ) . lower ( ) if value in txt : match_found = True else : class_txt = self . treedata [ item ] . get_class ( ) . lower ( ) if value in class_txt : match_found = True parent = self . treeview . parent ( item ) idx = self . treeview . index ( item ) children = self . treeview . get_children ( item ) if children : for child in children : match , detach = self . _detach ( child ) children_match = children_match | match if detach : children_det . extend ( detach ) if match_found : if children_det : to_detach . extend ( children_det ) else : if children_match : if children_det : to_detach . extend ( children_det ) else : to_detach . append ( ( item , parent , idx ) ) match_found = match_found | children_match return match_found , to_detach
Hide items from treeview that do not match the search string .
268
13
226,841
def load_file ( self , filename ) : self . tree_editor . load_file ( filename ) self . project_name . configure ( text = filename ) self . currentfile = filename self . is_changed = False
Load xml into treeview
47
5
226,842
def lower_ir ( ir_blocks , query_metadata_table , type_equivalence_hints = None ) : sanity_check_ir_blocks_from_frontend ( ir_blocks , query_metadata_table ) ir_blocks = lower_context_field_existence ( ir_blocks , query_metadata_table ) ir_blocks = optimize_boolean_expression_comparisons ( ir_blocks ) if type_equivalence_hints : ir_blocks = lower_coerce_type_block_type_data ( ir_blocks , type_equivalence_hints ) ir_blocks = lower_coerce_type_blocks ( ir_blocks ) ir_blocks = rewrite_filters_in_optional_blocks ( ir_blocks ) ir_blocks = merge_consecutive_filter_clauses ( ir_blocks ) ir_blocks = lower_folded_outputs ( ir_blocks ) return ir_blocks
Lower the IR into an IR form that can be represented in Gremlin queries .
209
16
226,843
def lower_coerce_type_block_type_data ( ir_blocks , type_equivalence_hints ) : allowed_key_type_spec = ( GraphQLInterfaceType , GraphQLObjectType ) allowed_value_type_spec = GraphQLUnionType # Validate that the type_equivalence_hints parameter has correct types. for key , value in six . iteritems ( type_equivalence_hints ) : if ( not isinstance ( key , allowed_key_type_spec ) or not isinstance ( value , allowed_value_type_spec ) ) : msg = ( u'Invalid type equivalence hints received! Hint {} ({}) -> {} ({}) ' u'was unexpected, expected a hint in the form ' u'GraphQLInterfaceType -> GraphQLUnionType or ' u'GraphQLObjectType -> GraphQLUnionType' . format ( key . name , str ( type ( key ) ) , value . name , str ( type ( value ) ) ) ) raise GraphQLCompilationError ( msg ) # CoerceType blocks only know the name of the type to which they coerce, # and not its corresponding GraphQL type object. Convert the type equivalence hints into # a dict of type name -> set of names of equivalent types, which can be used more readily. equivalent_type_names = { key . name : { x . name for x in value . types } for key , value in six . iteritems ( type_equivalence_hints ) } new_ir_blocks = [ ] for block in ir_blocks : new_block = block if isinstance ( block , CoerceType ) : target_class = get_only_element_from_collection ( block . target_class ) if target_class in equivalent_type_names : new_block = CoerceType ( equivalent_type_names [ target_class ] ) new_ir_blocks . append ( new_block ) return new_ir_blocks
Rewrite CoerceType blocks to explicitly state which types are allowed in the coercion .
428
18
226,844
def lower_coerce_type_blocks ( ir_blocks ) : new_ir_blocks = [ ] for block in ir_blocks : new_block = block if isinstance ( block , CoerceType ) : predicate = BinaryComposition ( u'contains' , Literal ( list ( block . target_class ) ) , LocalField ( '@class' ) ) new_block = Filter ( predicate ) new_ir_blocks . append ( new_block ) return new_ir_blocks
Lower CoerceType blocks into Filter blocks with a type - check predicate .
108
16
226,845
def rewrite_filters_in_optional_blocks ( ir_blocks ) : new_ir_blocks = [ ] optional_context_depth = 0 for block in ir_blocks : new_block = block if isinstance ( block , CoerceType ) : raise AssertionError ( u'Found a CoerceType block after all such blocks should have been ' u'lowered to Filter blocks: {}' . format ( ir_blocks ) ) elif isinstance ( block , Traverse ) and block . optional : optional_context_depth += 1 elif isinstance ( block , Backtrack ) and block . optional : optional_context_depth -= 1 if optional_context_depth < 0 : raise AssertionError ( u'Reached negative optional context depth for blocks: ' u'{}' . format ( ir_blocks ) ) elif isinstance ( block , Filter ) and optional_context_depth > 0 : null_check = BinaryComposition ( u'=' , LocalField ( '@this' ) , NullLiteral ) new_block = Filter ( BinaryComposition ( u'||' , null_check , block . predicate ) ) else : pass new_ir_blocks . append ( new_block ) return new_ir_blocks
In optional contexts add a check for null that allows non - existent optional data through .
270
18
226,846
def lower_folded_outputs ( ir_blocks ) : folds , remaining_ir_blocks = extract_folds_from_ir_blocks ( ir_blocks ) if not remaining_ir_blocks : raise AssertionError ( u'Expected at least one non-folded block to remain: {} {} ' u'{}' . format ( folds , remaining_ir_blocks , ir_blocks ) ) output_block = remaining_ir_blocks [ - 1 ] if not isinstance ( output_block , ConstructResult ) : raise AssertionError ( u'Expected the last non-folded block to be ConstructResult, ' u'but instead was: {} {} ' u'{}' . format ( type ( output_block ) , output_block , ir_blocks ) ) # Turn folded Filter blocks into GremlinFoldedFilter blocks. converted_folds = { base_fold_location . get_location_name ( ) [ 0 ] : _convert_folded_blocks ( folded_ir_blocks ) for base_fold_location , folded_ir_blocks in six . iteritems ( folds ) } new_output_fields = dict ( ) for output_name , output_expression in six . iteritems ( output_block . fields ) : new_output_expression = output_expression # Turn FoldedContextField expressions into GremlinFoldedContextField ones. if isinstance ( output_expression , FoldedContextField ) : # Get the matching folded IR blocks and put them in the new context field. base_fold_location_name = output_expression . fold_scope_location . get_location_name ( ) [ 0 ] folded_ir_blocks = converted_folds [ base_fold_location_name ] new_output_expression = GremlinFoldedContextField ( output_expression . fold_scope_location , folded_ir_blocks , output_expression . field_type ) new_output_fields [ output_name ] = new_output_expression new_ir_blocks = remaining_ir_blocks [ : - 1 ] new_ir_blocks . append ( ConstructResult ( new_output_fields ) ) return new_ir_blocks
Lower standard folded output fields into GremlinFoldedContextField objects .
469
14
226,847
def validate ( self ) : if not isinstance ( self . fold_scope_location , FoldScopeLocation ) : raise TypeError ( u'Expected FoldScopeLocation fold_scope_location, got: {} {}' . format ( type ( self . fold_scope_location ) , self . fold_scope_location ) ) allowed_block_types = ( GremlinFoldedFilter , GremlinFoldedTraverse , Backtrack ) for block in self . folded_ir_blocks : if not isinstance ( block , allowed_block_types ) : raise AssertionError ( u'Found invalid block of type {} in folded_ir_blocks: {} ' u'Allowed types are {}.' . format ( type ( block ) , self . folded_ir_blocks , allowed_block_types ) ) if not isinstance ( self . field_type , GraphQLList ) : raise ValueError ( u'Invalid value of "field_type", expected a list type but got: ' u'{}' . format ( self . field_type ) ) inner_type = strip_non_null_from_type ( self . field_type . of_type ) if isinstance ( inner_type , GraphQLList ) : raise GraphQLCompilationError ( u'Outputting list-valued fields in a @fold context is currently ' u'not supported: {} {}' . format ( self . fold_scope_location , self . field_type . of_type ) )
Validate that the GremlinFoldedContextField is correctly representable .
317
15
226,848
def from_traverse ( cls , traverse_block ) : if isinstance ( traverse_block , Traverse ) : return cls ( traverse_block . direction , traverse_block . edge_name ) else : raise AssertionError ( u'Tried to initialize an instance of GremlinFoldedTraverse ' u'with block of type {}' . format ( type ( traverse_block ) ) )
Create a GremlinFoldedTraverse block as a copy of the given Traverse block .
87
19
226,849
def _get_referenced_type_equivalences ( graphql_types , type_equivalence_hints ) : referenced_types = set ( ) for graphql_type in graphql_types . values ( ) : if isinstance ( graphql_type , ( GraphQLObjectType , GraphQLInterfaceType ) ) : for _ , field in graphql_type . fields . items ( ) : if isinstance ( field . type , GraphQLList ) : referenced_types . add ( field . type . of_type . name ) return { original : union for original , union in type_equivalence_hints . items ( ) if union . name in referenced_types }
Filter union types with no edges from the type equivalence hints dict .
150
14
226,850
def _get_inherited_field_types ( class_to_field_type_overrides , schema_graph ) : inherited_field_type_overrides = dict ( ) for superclass_name , field_type_overrides in class_to_field_type_overrides . items ( ) : for subclass_name in schema_graph . get_subclass_set ( superclass_name ) : inherited_field_type_overrides . setdefault ( subclass_name , dict ( ) ) inherited_field_type_overrides [ subclass_name ] . update ( field_type_overrides ) return inherited_field_type_overrides
Return a dictionary describing the field type overrides in subclasses .
150
13
226,851
def _validate_overriden_fields_are_not_defined_in_superclasses ( class_to_field_type_overrides , schema_graph ) : for class_name , field_type_overrides in six . iteritems ( class_to_field_type_overrides ) : for superclass_name in schema_graph . get_inheritance_set ( class_name ) : if superclass_name != class_name : superclass = schema_graph . get_element_by_class_name ( superclass_name ) for field_name in field_type_overrides : if field_name in superclass . properties : raise AssertionError ( u'Attempting to override field "{}" from class "{}", but the field is ' u'defined in superclass "{}"' . format ( field_name , class_name , superclass_name ) )
Assert that the fields we want to override are not defined in superclasses .
199
16
226,852
def _property_descriptor_to_graphql_type ( property_obj ) : property_type = property_obj . type_id scalar_types = { PROPERTY_TYPE_BOOLEAN_ID : GraphQLBoolean , PROPERTY_TYPE_DATE_ID : GraphQLDate , PROPERTY_TYPE_DATETIME_ID : GraphQLDateTime , PROPERTY_TYPE_DECIMAL_ID : GraphQLDecimal , PROPERTY_TYPE_DOUBLE_ID : GraphQLFloat , PROPERTY_TYPE_FLOAT_ID : GraphQLFloat , PROPERTY_TYPE_INTEGER_ID : GraphQLInt , PROPERTY_TYPE_STRING_ID : GraphQLString , } result = scalar_types . get ( property_type , None ) if result : return result mapping_types = { PROPERTY_TYPE_EMBEDDED_SET_ID : GraphQLList , PROPERTY_TYPE_EMBEDDED_LIST_ID : GraphQLList , } wrapping_type = mapping_types . get ( property_type , None ) if wrapping_type : linked_property_obj = property_obj . qualifier # There are properties that are embedded collections of non-primitive types, # for example, ProxyEventSet.scalar_parameters. # The GraphQL compiler does not currently support these. if linked_property_obj in scalar_types : return wrapping_type ( scalar_types [ linked_property_obj ] ) # We weren't able to represent this property in GraphQL, so we'll hide it instead. return None
Return the best GraphQL type representation for an OrientDB property descriptor .
361
14
226,853
def _get_union_type_name ( type_names_to_union ) : if not type_names_to_union : raise AssertionError ( u'Expected a non-empty list of type names to union, received: ' u'{}' . format ( type_names_to_union ) ) return u'Union__' + u'__' . join ( sorted ( type_names_to_union ) )
Construct a unique union type name based on the type names being unioned .
95
15
226,854
def _get_fields_for_class ( schema_graph , graphql_types , field_type_overrides , hidden_classes , cls_name ) : properties = schema_graph . get_element_by_class_name ( cls_name ) . properties # Add leaf GraphQL fields (class properties). all_properties = { property_name : _property_descriptor_to_graphql_type ( property_obj ) for property_name , property_obj in six . iteritems ( properties ) } result = { property_name : graphql_representation for property_name , graphql_representation in six . iteritems ( all_properties ) if graphql_representation is not None } # Add edge GraphQL fields (edges to other vertex classes). schema_element = schema_graph . get_element_by_class_name ( cls_name ) outbound_edges = ( ( 'out_{}' . format ( out_edge_name ) , schema_graph . get_element_by_class_name ( out_edge_name ) . properties [ EDGE_DESTINATION_PROPERTY_NAME ] . qualifier ) for out_edge_name in schema_element . out_connections ) inbound_edges = ( ( 'in_{}' . format ( in_edge_name ) , schema_graph . get_element_by_class_name ( in_edge_name ) . properties [ EDGE_SOURCE_PROPERTY_NAME ] . qualifier ) for in_edge_name in schema_element . in_connections ) for field_name , to_type_name in chain ( outbound_edges , inbound_edges ) : edge_endpoint_type_name = None subclasses = schema_graph . get_subclass_set ( to_type_name ) to_type_abstract = schema_graph . get_element_by_class_name ( to_type_name ) . abstract if not to_type_abstract and len ( subclasses ) > 1 : # If the edge endpoint type has no subclasses, it can't be coerced into any other type. # If the edge endpoint type is abstract (an interface type), we can already # coerce it to the proper type with a GraphQL fragment. However, if the endpoint type # is non-abstract and has subclasses, we need to return its subclasses as an union type. # This is because GraphQL fragments cannot be applied on concrete types, and # GraphQL does not support inheritance of concrete types. type_names_to_union = [ subclass for subclass in subclasses if subclass not in hidden_classes ] if type_names_to_union : edge_endpoint_type_name = _get_union_type_name ( type_names_to_union ) else : if to_type_name not in hidden_classes : edge_endpoint_type_name = to_type_name if edge_endpoint_type_name is not None : # If we decided to not hide this edge due to its endpoint type being non-representable, # represent the edge field as the GraphQL type List(edge_endpoint_type_name). result [ field_name ] = GraphQLList ( graphql_types [ edge_endpoint_type_name ] ) for field_name , field_type in six . iteritems ( field_type_overrides ) : if field_name not in result : raise AssertionError ( u'Attempting to override field "{}" from class "{}", but the ' u'class does not contain said field' . format ( field_name , cls_name ) ) else : result [ field_name ] = field_type return result
Return a dict from field name to GraphQL field type for the specified graph class .
813
17
226,855
def _create_field_specification ( schema_graph , graphql_types , field_type_overrides , hidden_classes , cls_name ) : def field_maker_func ( ) : """Create and return the fields for the given GraphQL type.""" result = EXTENDED_META_FIELD_DEFINITIONS . copy ( ) result . update ( OrderedDict ( [ ( name , GraphQLField ( value ) ) for name , value in sorted ( six . iteritems ( _get_fields_for_class ( schema_graph , graphql_types , field_type_overrides , hidden_classes , cls_name ) ) , key = lambda x : x [ 0 ] ) ] ) ) return result return field_maker_func
Return a function that specifies the fields present on the given type .
167
13
226,856
def _create_interface_specification ( schema_graph , graphql_types , hidden_classes , cls_name ) : def interface_spec ( ) : """Return a list of GraphQL interface types implemented by the type named 'cls_name'.""" abstract_inheritance_set = ( superclass_name for superclass_name in sorted ( list ( schema_graph . get_inheritance_set ( cls_name ) ) ) if ( superclass_name not in hidden_classes and schema_graph . get_element_by_class_name ( superclass_name ) . abstract ) ) return [ graphql_types [ x ] for x in abstract_inheritance_set if x not in hidden_classes ] return interface_spec
Return a function that specifies the interfaces implemented by the given type .
164
13
226,857
def _create_union_types_specification ( schema_graph , graphql_types , hidden_classes , base_name ) : # When edges point to vertices of type base_name, and base_name is both non-abstract and # has subclasses, we need to represent the edge endpoint type with a union type based on # base_name and its subclasses. This function calculates what types that union should include. def types_spec ( ) : """Return a list of GraphQL types that this class' corresponding union type includes.""" return [ graphql_types [ x ] for x in sorted ( list ( schema_graph . get_subclass_set ( base_name ) ) ) if x not in hidden_classes ] return types_spec
Return a function that gives the types in the union type rooted at base_name .
161
17
226,858
def workaround_lowering_pass ( ir_blocks , query_metadata_table ) : new_ir_blocks = [ ] for block in ir_blocks : if isinstance ( block , Filter ) : new_block = _process_filter_block ( query_metadata_table , block ) else : new_block = block new_ir_blocks . append ( new_block ) return new_ir_blocks
Extract locations from TernaryConditionals and rewrite their Filter blocks as necessary .
87
17
226,859
def _process_filter_block ( query_metadata_table , block ) : # For a given Filter block with BinaryComposition predicate expression X, # let L be the set of all Locations referenced in any TernaryConditional # predicate expression enclosed in X. # For each location l in L, we construct a tautological expression that looks like: # ((l IS NULL) OR (l IS NOT NULL)) # and then join the original BinaryComposition X with all such expressions with ANDs. # We set this new BinaryComposition expression as the predicate of the Filter block. base_predicate = block . predicate # These variables are used by the visitor functions below. ternary_conditionals = [ ] # "problematic_locations" is a list and not a set, # to preserve ordering and generate a deterministic order of added clauses. # We expect the maximum size of this list to be a small constant number, # so the linear "in" operator is really not a concern. problematic_locations = [ ] def find_ternary_conditionals ( expression ) : """Visitor function that extracts all enclosed TernaryConditional expressions.""" if isinstance ( expression , TernaryConditional ) : ternary_conditionals . append ( expression ) return expression def extract_locations_visitor ( expression ) : """Visitor function that extracts all the problematic locations.""" if isinstance ( expression , ( ContextField , ContextFieldExistence ) ) : # We get the location at the vertex, ignoring property fields. # The vertex-level location is sufficient to work around the OrientDB bug, # and we want as few location as possible overall. location_at_vertex = expression . location . at_vertex ( ) if location_at_vertex not in problematic_locations : problematic_locations . append ( location_at_vertex ) return expression # We aren't modifying the base predicate itself, just traversing it. # The returned "updated" value must be the exact same as the original. return_value = base_predicate . visit_and_update ( find_ternary_conditionals ) if return_value is not base_predicate : raise AssertionError ( u'Read-only visitor function "find_ternary_conditionals" ' u'caused state to change: ' u'{} {}' . format ( base_predicate , return_value ) ) for ternary in ternary_conditionals : # We aren't modifying the ternary itself, just traversing it. # The returned "updated" value must be the exact same as the original. return_value = ternary . visit_and_update ( extract_locations_visitor ) if return_value is not ternary : raise AssertionError ( u'Read-only visitor function "extract_locations_visitor" ' u'caused state to change: ' u'{} {}' . format ( ternary , return_value ) ) tautologies = [ _create_tautological_expression_for_location ( query_metadata_table , location ) for location in problematic_locations ] if not tautologies : return block final_predicate = base_predicate for tautology in tautologies : final_predicate = BinaryComposition ( u'&&' , final_predicate , tautology ) return Filter ( final_predicate )
Rewrite the provided Filter block if necessary .
733
9
226,860
def _create_tautological_expression_for_location ( query_metadata_table , location ) : location_type = query_metadata_table . get_location_info ( location ) . type location_exists = BinaryComposition ( u'!=' , ContextField ( location , location_type ) , NullLiteral ) location_does_not_exist = BinaryComposition ( u'=' , ContextField ( location , location_type ) , NullLiteral ) return BinaryComposition ( u'||' , location_exists , location_does_not_exist )
For a given location create a BinaryComposition that always evaluates to true .
126
15
226,861
def get_only_element_from_collection ( one_element_collection ) : if len ( one_element_collection ) != 1 : raise AssertionError ( u'Expected a collection with exactly one element, but got: {}' . format ( one_element_collection ) ) return funcy . first ( one_element_collection )
Assert that the collection has exactly one element then return that element .
74
14
226,862
def get_ast_field_name ( ast ) : replacements = { # We always rewrite the following field names into their proper underlying counterparts. TYPENAME_META_FIELD_NAME : '@class' } base_field_name = ast . name . value normalized_name = replacements . get ( base_field_name , base_field_name ) return normalized_name
Return the normalized field name for the given AST node .
81
11
226,863
def get_field_type_from_schema ( schema_type , field_name ) : if field_name == '@class' : return GraphQLString else : if field_name not in schema_type . fields : raise AssertionError ( u'Field {} passed validation but was not present on type ' u'{}' . format ( field_name , schema_type ) ) # Validation guarantees that the field must exist in the schema. return schema_type . fields [ field_name ] . type
Return the type of the field in the given type accounting for field name normalization .
111
17
226,864
def get_vertex_field_type ( current_schema_type , vertex_field_name ) : # According to the schema, the vertex field itself is of type GraphQLList, and this is # what get_field_type_from_schema returns. We care about what the type *inside* the list is, # i.e., the type on the other side of the edge (hence .of_type). # Validation guarantees that the field must exist in the schema. if not is_vertex_field_name ( vertex_field_name ) : raise AssertionError ( u'Trying to load the vertex field type of a non-vertex field: ' u'{} {}' . format ( current_schema_type , vertex_field_name ) ) raw_field_type = get_field_type_from_schema ( current_schema_type , vertex_field_name ) if not isinstance ( strip_non_null_from_type ( raw_field_type ) , GraphQLList ) : raise AssertionError ( u'Found an edge whose schema type was not GraphQLList: ' u'{} {} {}' . format ( current_schema_type , vertex_field_name , raw_field_type ) ) return raw_field_type . of_type
Return the type of the vertex within the specified vertex field name of the given type .
293
17
226,865
def get_edge_direction_and_name ( vertex_field_name ) : edge_direction = None edge_name = None if vertex_field_name . startswith ( OUTBOUND_EDGE_FIELD_PREFIX ) : edge_direction = OUTBOUND_EDGE_DIRECTION edge_name = vertex_field_name [ len ( OUTBOUND_EDGE_FIELD_PREFIX ) : ] elif vertex_field_name . startswith ( INBOUND_EDGE_FIELD_PREFIX ) : edge_direction = INBOUND_EDGE_DIRECTION edge_name = vertex_field_name [ len ( INBOUND_EDGE_FIELD_PREFIX ) : ] else : raise AssertionError ( u'Unreachable condition reached:' , vertex_field_name ) validate_safe_string ( edge_name ) return edge_direction , edge_name
Get the edge direction and name from a non - root vertex field name .
202
15
226,866
def is_vertex_field_type ( graphql_type ) : # This will need to change if we ever support complex embedded types or edge field types. underlying_type = strip_non_null_from_type ( graphql_type ) return isinstance ( underlying_type , ( GraphQLInterfaceType , GraphQLObjectType , GraphQLUnionType ) )
Return True if the argument is a vertex field type and False otherwise .
78
14
226,867
def ensure_unicode_string ( value ) : if not isinstance ( value , six . string_types ) : raise TypeError ( u'Expected string value, got: {}' . format ( value ) ) return six . text_type ( value )
Ensure the value is a string and return it as unicode .
55
14
226,868
def get_uniquely_named_objects_by_name ( object_list ) : if not object_list : return dict ( ) result = dict ( ) for obj in object_list : name = obj . name . value if name in result : raise GraphQLCompilationError ( u'Found duplicate object key: ' u'{} {}' . format ( name , object_list ) ) result [ name ] = obj return result
Return dict of name - > object pairs from a list of objects with unique names .
93
17
226,869
def validate_safe_string ( value ) : # The following strings are explicitly allowed, despite having otherwise-illegal chars. legal_strings_with_special_chars = frozenset ( { '@rid' , '@class' , '@this' , '%' } ) if not isinstance ( value , six . string_types ) : raise TypeError ( u'Expected string value, got: {} {}' . format ( type ( value ) . __name__ , value ) ) if not value : raise GraphQLCompilationError ( u'Empty strings are not allowed!' ) if value [ 0 ] in string . digits : raise GraphQLCompilationError ( u'String values cannot start with a digit: {}' . format ( value ) ) if not set ( value ) . issubset ( VARIABLE_ALLOWED_CHARS ) and value not in legal_strings_with_special_chars : raise GraphQLCompilationError ( u'Encountered illegal characters in string: {}' . format ( value ) )
Ensure the provided string does not have illegal characters .
227
11
226,870
def validate_edge_direction ( edge_direction ) : if not isinstance ( edge_direction , six . string_types ) : raise TypeError ( u'Expected string edge_direction, got: {} {}' . format ( type ( edge_direction ) , edge_direction ) ) if edge_direction not in ALLOWED_EDGE_DIRECTIONS : raise ValueError ( u'Unrecognized edge direction: {}' . format ( edge_direction ) )
Ensure the provided edge direction is either in or out .
100
12
226,871
def validate_marked_location ( location ) : if not isinstance ( location , ( Location , FoldScopeLocation ) ) : raise TypeError ( u'Expected Location or FoldScopeLocation location, got: {} {}' . format ( type ( location ) . __name__ , location ) ) if location . field is not None : raise GraphQLCompilationError ( u'Cannot mark location at a field: {}' . format ( location ) )
Validate that a Location object is safe for marking and not at a field .
95
16
226,872
def invert_dict ( invertible_dict ) : inverted = { } for k , v in six . iteritems ( invertible_dict ) : if not isinstance ( v , Hashable ) : raise TypeError ( u'Expected an invertible dict, but value at key {} has type {}' . format ( k , type ( v ) . __name__ ) ) if v in inverted : raise TypeError ( u'Expected an invertible dict, but keys ' u'{} and {} map to the same value' . format ( inverted [ v ] , k ) ) inverted [ v ] = k return inverted
Invert a dict . A dict is invertible if values are unique and hashable .
136
19
226,873
def read_file ( filename ) : # intentionally *not* adding an encoding option to open # see here: # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 here = os . path . abspath ( os . path . dirname ( __file__ ) ) with codecs . open ( os . path . join ( here , 'graphql_compiler' , filename ) , 'r' ) as f : return f . read ( )
Read package file as text to get name and version
106
10
226,874
def find_version ( ) : version_file = read_file ( '__init__.py' ) version_match = re . search ( r'^__version__ = ["\']([^"\']*)["\']' , version_file , re . M ) if version_match : return version_match . group ( 1 ) raise RuntimeError ( 'Unable to find version string.' )
Only define version in one place
87
6
226,875
def find_name ( ) : name_file = read_file ( '__init__.py' ) name_match = re . search ( r'^__package_name__ = ["\']([^"\']*)["\']' , name_file , re . M ) if name_match : return name_match . group ( 1 ) raise RuntimeError ( 'Unable to find name string.' )
Only define name in one place
89
6
226,876
def workaround_type_coercions_in_recursions ( match_query ) : # This step is required to work around an OrientDB bug that causes queries with both # "while:" and "class:" in the same query location to fail to parse correctly. # # This bug is reported upstream: https://github.com/orientechnologies/orientdb/issues/8129 # # Instead of "class:", we use "INSTANCEOF" in the "where:" clause to get correct behavior. # However, we don't want to switch all coercions to this format, since the "class:" clause # provides valuable info to the MATCH query scheduler about how to schedule efficiently. new_match_traversals = [ ] for current_traversal in match_query . match_traversals : new_traversal = [ ] for match_step in current_traversal : new_match_step = match_step has_coerce_type = match_step . coerce_type_block is not None has_recurse_root = isinstance ( match_step . root_block , Recurse ) if has_coerce_type and has_recurse_root : new_where_block = convert_coerce_type_and_add_to_where_block ( match_step . coerce_type_block , match_step . where_block ) new_match_step = match_step . _replace ( coerce_type_block = None , where_block = new_where_block ) new_traversal . append ( new_match_step ) new_match_traversals . append ( new_traversal ) return match_query . _replace ( match_traversals = new_match_traversals )
Lower CoerceType blocks into Filter blocks within Recurse steps .
387
14
226,877
def main ( ) : query = ' ' . join ( sys . stdin . readlines ( ) ) sys . stdout . write ( pretty_print_graphql ( query ) )
Read a GraphQL query from standard input and output it pretty - printed to standard output .
39
18
226,878
def _safe_gremlin_string ( value ) : if not isinstance ( value , six . string_types ) : if isinstance ( value , bytes ) : # should only happen in py3 value = value . decode ( 'utf-8' ) else : raise GraphQLInvalidArgumentError ( u'Attempting to convert a non-string into a string: ' u'{}' . format ( value ) ) # Using JSON encoding means that all unicode literals and special chars # (e.g. newlines and backslashes) are replaced by appropriate escape sequences. # However, the quoted result is wrapped in double quotes, and $ signs are not escaped, # so that would allow arbitrary code execution in Gremlin. # We will therefore turn the double-quoted string into a single-quoted one to avoid this risk. escaped_and_quoted = json . dumps ( value ) # Double-quoted string literals in Gremlin/Groovy allow # arbitrary code execution via string interpolation and closures. # To avoid this, we perform the following steps: # - we strip the wrapping double quotes; # - we un-escape any double-quotes in the string, by replacing \" with "; # - we escape any single-quotes in the string, by replacing ' with \'; # - finally, we wrap the string in single quotes. # http://www.groovy-lang.org/syntax.html#_double_quoted_string if not escaped_and_quoted [ 0 ] == escaped_and_quoted [ - 1 ] == '"' : raise AssertionError ( u'Unreachable state reached: {} {}' . format ( value , escaped_and_quoted ) ) no_quotes = escaped_and_quoted [ 1 : - 1 ] re_escaped = no_quotes . replace ( '\\"' , '"' ) . replace ( '\'' , '\\\'' ) final_escaped_value = '\'' + re_escaped + '\'' return final_escaped_value
Sanitize and represent a string argument in Gremlin .
443
12
226,879
def _safe_gremlin_list ( inner_type , argument_value ) : if not isinstance ( argument_value , list ) : raise GraphQLInvalidArgumentError ( u'Attempting to represent a non-list as a list: ' u'{}' . format ( argument_value ) ) stripped_type = strip_non_null_from_type ( inner_type ) components = ( _safe_gremlin_argument ( stripped_type , x ) for x in argument_value ) return u'[' + u',' . join ( components ) + u']'
Represent the list of inner_type objects in Gremlin form .
124
13
226,880
def _safe_gremlin_argument ( expected_type , argument_value ) : if GraphQLString . is_same_type ( expected_type ) : return _safe_gremlin_string ( argument_value ) elif GraphQLID . is_same_type ( expected_type ) : # IDs can be strings or numbers, but the GraphQL library coerces them to strings. # We will follow suit and treat them as strings. if not isinstance ( argument_value , six . string_types ) : if isinstance ( argument_value , bytes ) : # should only happen in py3 argument_value = argument_value . decode ( 'utf-8' ) else : argument_value = six . text_type ( argument_value ) return _safe_gremlin_string ( argument_value ) elif GraphQLFloat . is_same_type ( expected_type ) : return represent_float_as_str ( argument_value ) elif GraphQLInt . is_same_type ( expected_type ) : # Special case: in Python, isinstance(True, int) returns True. # Safeguard against this with an explicit check against bool type. if isinstance ( argument_value , bool ) : raise GraphQLInvalidArgumentError ( u'Attempting to represent a non-int as an int: ' u'{}' . format ( argument_value ) ) return type_check_and_str ( int , argument_value ) elif GraphQLBoolean . is_same_type ( expected_type ) : return type_check_and_str ( bool , argument_value ) elif GraphQLDecimal . is_same_type ( expected_type ) : return _safe_gremlin_decimal ( argument_value ) elif GraphQLDate . is_same_type ( expected_type ) : return _safe_gremlin_date_and_datetime ( expected_type , ( datetime . date , ) , argument_value ) elif GraphQLDateTime . is_same_type ( expected_type ) : return _safe_gremlin_date_and_datetime ( expected_type , ( datetime . datetime , arrow . Arrow ) , argument_value ) elif isinstance ( expected_type , GraphQLList ) : return _safe_gremlin_list ( expected_type . of_type , argument_value ) else : raise AssertionError ( u'Could not safely represent the requested GraphQL type: ' u'{} {}' . format ( expected_type , argument_value ) )
Return a Gremlin string representing the given argument value .
552
11
226,881
def insert_arguments_into_gremlin_query ( compilation_result , arguments ) : if compilation_result . language != GREMLIN_LANGUAGE : raise AssertionError ( u'Unexpected query output language: {}' . format ( compilation_result ) ) base_query = compilation_result . query argument_types = compilation_result . input_metadata # The arguments are assumed to have already been validated against the query. sanitized_arguments = { key : _safe_gremlin_argument ( argument_types [ key ] , value ) for key , value in six . iteritems ( arguments ) } return Template ( base_query ) . substitute ( sanitized_arguments )
Insert the arguments into the compiled Gremlin query to form a complete query .
149
15
226,882
def _get_vertex_location_name ( location ) : mark_name , field_name = location . get_location_name ( ) if field_name is not None : raise AssertionError ( u'Location unexpectedly pointed to a field: {}' . format ( location ) ) return mark_name
Get the location name from a location that is expected to point to a vertex .
66
16
226,883
def _first_step_to_match ( match_step ) : parts = [ ] if match_step . root_block is not None : if not isinstance ( match_step . root_block , QueryRoot ) : raise AssertionError ( u'Expected None or QueryRoot root block, received: ' u'{} {}' . format ( match_step . root_block , match_step ) ) match_step . root_block . validate ( ) start_class = get_only_element_from_collection ( match_step . root_block . start_class ) parts . append ( u'class: %s' % ( start_class , ) ) # MATCH steps with a QueryRoot root block shouldn't have a 'coerce_type_block'. if match_step . coerce_type_block is not None : raise AssertionError ( u'Invalid MATCH step: {}' . format ( match_step ) ) if match_step . where_block : match_step . where_block . validate ( ) parts . append ( u'where: (%s)' % ( match_step . where_block . predicate . to_match ( ) , ) ) if match_step . as_block is None : raise AssertionError ( u'Found a MATCH step without a corresponding Location. ' u'This should never happen: {}' . format ( match_step ) ) else : match_step . as_block . validate ( ) parts . append ( u'as: %s' % ( _get_vertex_location_name ( match_step . as_block . location ) , ) ) return u'{{ %s }}' % ( u', ' . join ( parts ) , )
Transform the very first MATCH step into a MATCH query string .
374
14
226,884
def _represent_match_traversal ( match_traversal ) : output = [ ] output . append ( _first_step_to_match ( match_traversal [ 0 ] ) ) for step in match_traversal [ 1 : ] : output . append ( _subsequent_step_to_match ( step ) ) return u'' . join ( output )
Emit MATCH query code for an entire MATCH traversal sequence .
82
15
226,885
def _represent_fold ( fold_location , fold_ir_blocks ) : start_let_template = u'$%(mark_name)s = %(base_location)s' traverse_edge_template = u'.%(direction)s("%(edge_name)s")' base_template = start_let_template + traverse_edge_template edge_direction , edge_name = fold_location . get_first_folded_edge ( ) mark_name , _ = fold_location . get_location_name ( ) base_location_name , _ = fold_location . base_location . get_location_name ( ) validate_safe_string ( mark_name ) validate_safe_string ( base_location_name ) validate_safe_string ( edge_direction ) validate_safe_string ( edge_name ) template_data = { 'mark_name' : mark_name , 'base_location' : base_location_name , 'direction' : edge_direction , 'edge_name' : edge_name , } final_string = base_template % template_data for block in fold_ir_blocks : if isinstance ( block , Filter ) : final_string += u'[' + block . predicate . to_match ( ) + u']' elif isinstance ( block , Traverse ) : template_data = { 'direction' : block . direction , 'edge_name' : block . edge_name , } final_string += traverse_edge_template % template_data elif isinstance ( block , MarkLocation ) : # MarkLocation blocks inside a fold do not result in any MATCH output. pass else : raise AssertionError ( u'Found an unexpected IR block in the folded IR blocks: ' u'{} {} {}' . format ( type ( block ) , block , fold_ir_blocks ) ) # Workaround for OrientDB's inconsistent return type when filtering a list. # https://github.com/orientechnologies/orientdb/issues/7811 final_string += '.asList()' return final_string
Emit a LET clause corresponding to the IR blocks for a
451
12
226,886
def _construct_output_to_match ( output_block ) : output_block . validate ( ) selections = ( u'%s AS `%s`' % ( output_block . fields [ key ] . to_match ( ) , key ) for key in sorted ( output_block . fields . keys ( ) ) # Sort keys for deterministic output order. ) return u'SELECT %s FROM' % ( u', ' . join ( selections ) , )
Transform a ConstructResult block into a MATCH query string .
99
12
226,887
def _construct_where_to_match ( where_block ) : if where_block . predicate == TrueLiteral : raise AssertionError ( u'Received WHERE block with TrueLiteral predicate: {}' . format ( where_block ) ) return u'WHERE ' + where_block . predicate . to_match ( )
Transform a Filter block into a MATCH query string .
73
11
226,888
def emit_code_from_multiple_match_queries ( match_queries ) : optional_variable_base_name = '$optional__' union_variable_name = '$result' query_data = deque ( [ u'SELECT EXPAND(' , union_variable_name , u')' , u' LET ' ] ) optional_variables = [ ] sub_queries = [ emit_code_from_single_match_query ( match_query ) for match_query in match_queries ] for ( i , sub_query ) in enumerate ( sub_queries ) : variable_name = optional_variable_base_name + str ( i ) variable_assignment = variable_name + u' = (' sub_query_end = u'),' query_data . append ( variable_assignment ) query_data . append ( sub_query ) query_data . append ( sub_query_end ) optional_variables . append ( variable_name ) query_data . append ( union_variable_name ) query_data . append ( u' = UNIONALL(' ) query_data . append ( u', ' . join ( optional_variables ) ) query_data . append ( u')' ) return u' ' . join ( query_data )
Return a MATCH query string from a list of MatchQuery namedtuples .
278
16
226,889
def emit_code_from_ir ( compound_match_query , compiler_metadata ) : # If the compound match query contains only one match query, # just call `emit_code_from_single_match_query` # If there are multiple match queries, construct the query string for each # individual query and combine them as follows. # # SELECT EXPAND($result) # LET # $optional__0 = ( # <query_string_0> # ), # $optional__1 = ( # <query_string_1> # ), # $optional__2 = ( # <query_string_2> # ), # # . . . # # $result = UNIONALL($optional__0, $optional__1, . . . ) match_queries = compound_match_query . match_queries if len ( match_queries ) == 1 : query_string = emit_code_from_single_match_query ( match_queries [ 0 ] ) elif len ( match_queries ) > 1 : query_string = emit_code_from_multiple_match_queries ( match_queries ) else : raise AssertionError ( u'Received CompoundMatchQuery with an empty list of MatchQueries: ' u'{}' . format ( match_queries ) ) return query_string
Return a MATCH query string from a CompoundMatchQuery .
288
13
226,890
def _serialize_date ( value ) : if not isinstance ( value , date ) : raise ValueError ( u'The received object was not a date: ' u'{} {}' . format ( type ( value ) , value ) ) return value . isoformat ( )
Serialize a Date object to its proper ISO - 8601 representation .
59
14
226,891
def _serialize_datetime ( value ) : if not isinstance ( value , ( datetime , arrow . Arrow ) ) : raise ValueError ( u'The received object was not a datetime: ' u'{} {}' . format ( type ( value ) , value ) ) return value . isoformat ( )
Serialize a DateTime object to its proper ISO - 8601 representation .
68
15
226,892
def _parse_datetime_value ( value ) : if value . endswith ( 'Z' ) : # Arrow doesn't support the "Z" literal to denote UTC time. # Strip the "Z" and add an explicit time zone instead. value = value [ : - 1 ] + '+00:00' return arrow . get ( value , 'YYYY-MM-DDTHH:mm:ssZ' ) . datetime
Deserialize a DateTime object from its proper ISO - 8601 representation .
94
16
226,893
def insert_meta_fields_into_existing_schema ( graphql_schema ) : root_type_name = graphql_schema . get_query_type ( ) . name for type_name , type_obj in six . iteritems ( graphql_schema . get_type_map ( ) ) : if type_name . startswith ( '__' ) or type_name == root_type_name : # Ignore the types that are built into GraphQL itself, as well as the root query type. continue if not isinstance ( type_obj , ( GraphQLObjectType , GraphQLInterfaceType ) ) : # Ignore definitions that are not interfaces or types. continue for meta_field_name , meta_field in six . iteritems ( EXTENDED_META_FIELD_DEFINITIONS ) : if meta_field_name in type_obj . fields : raise AssertionError ( u'Unexpectedly encountered an existing field named {} while ' u'attempting to add a meta-field of the same name. Make sure ' u'you are not attempting to add meta-fields twice.' . format ( meta_field_name ) ) type_obj . fields [ meta_field_name ] = meta_field
Add compiler - specific meta - fields into all interfaces and types of the specified schema .
269
17
226,894
def validate_context_for_visiting_vertex_field ( parent_location , vertex_field_name , context ) : if is_in_fold_innermost_scope ( context ) : raise GraphQLCompilationError ( u'Traversing inside a @fold block after filtering on {} or outputting fields ' u'is not supported! Parent location: {}, vertex field name: {}' . format ( COUNT_META_FIELD_NAME , parent_location , vertex_field_name ) )
Ensure that the current context allows for visiting a vertex field .
111
13
226,895
def pretty_print_graphql ( query , use_four_spaces = True ) : # Use our custom visitor, which fixes directive argument order # to get the canonical representation output = visit ( parse ( query ) , CustomPrintingVisitor ( ) ) # Using four spaces for indentation makes it easier to edit in # Python source files. if use_four_spaces : return fix_indentation_depth ( output ) return output
Take a GraphQL query pretty print it and return it .
93
12
226,896
def fix_indentation_depth ( query ) : lines = query . split ( '\n' ) final_lines = [ ] for line in lines : consecutive_spaces = 0 for char in line : if char == ' ' : consecutive_spaces += 1 else : break if consecutive_spaces % 2 != 0 : raise AssertionError ( u'Indentation was not a multiple of two: ' u'{}' . format ( consecutive_spaces ) ) final_lines . append ( ( ' ' * consecutive_spaces ) + line [ consecutive_spaces : ] ) return '\n' . join ( final_lines )
Make indentation use 4 spaces rather than the 2 spaces GraphQL normally uses .
140
16
226,897
def leave_Directive ( self , node , * args ) : name_to_arg_value = { # Taking [0] is ok here because the GraphQL parser checks for the # existence of ':' in directive arguments. arg . split ( ':' , 1 ) [ 0 ] : arg for arg in node . arguments } ordered_args = node . arguments directive = DIRECTIVES_BY_NAME . get ( node . name ) if directive : sorted_args = [ ] encountered_argument_names = set ( ) # Iterate through all defined arguments in the directive schema. for defined_arg_name in six . iterkeys ( directive . args ) : if defined_arg_name in name_to_arg_value : # The argument was present in the query, print it in the correct order. encountered_argument_names . add ( defined_arg_name ) sorted_args . append ( name_to_arg_value [ defined_arg_name ] ) # Get all the arguments that weren't defined in the directive schema. # They will be printed after all the arguments that were in the schema. unsorted_args = [ value for name , value in six . iteritems ( name_to_arg_value ) if name not in encountered_argument_names ] ordered_args = sorted_args + unsorted_args return '@' + node . name + wrap ( '(' , join ( ordered_args , ', ' ) , ')' )
Call when exiting a directive node in the ast .
307
10
226,898
def toposort_classes ( classes ) : def get_class_topolist ( class_name , name_to_class , processed_classes , current_trace ) : """Return a topologically sorted list of this class's dependencies and class itself Args: class_name: string, name of the class to process name_to_class: dict, class_name -> descriptor processed_classes: set of strings, a set of classes that have already been processed current_trace: list of strings, list of classes traversed during the recursion Returns: list of dicts, list of classes sorted in topological order """ # Check if this class has already been handled if class_name in processed_classes : return [ ] if class_name in current_trace : raise AssertionError ( 'Encountered self-reference in dependency chain of {}' . format ( class_name ) ) cls = name_to_class [ class_name ] # Collect the dependency classes # These are bases and classes from linked properties dependencies = _list_superclasses ( cls ) # Recursively process linked edges properties = cls [ 'properties' ] if 'properties' in cls else [ ] for prop in properties : if 'linkedClass' in prop : dependencies . append ( prop [ 'linkedClass' ] ) class_list = [ ] # Recursively process superclasses current_trace . add ( class_name ) for dependency in dependencies : class_list . extend ( get_class_topolist ( dependency , name_to_class , processed_classes , current_trace ) ) current_trace . remove ( class_name ) # Do the bookkeeping class_list . append ( name_to_class [ class_name ] ) processed_classes . add ( class_name ) return class_list # Map names to classes class_map = { c [ 'name' ] : c for c in classes } seen_classes = set ( ) toposorted = [ ] for name in class_map . keys ( ) : toposorted . extend ( get_class_topolist ( name , class_map , seen_classes , set ( ) ) ) return toposorted
Sort class metadatas so that a superclass is always before the subclass
462
16
226,899
def _list_superclasses ( class_def ) : superclasses = class_def . get ( 'superClasses' , [ ] ) if superclasses : # Make sure to duplicate the list return list ( superclasses ) sup = class_def . get ( 'superClass' , None ) if sup : return [ sup ] else : return [ ]
Return a list of the superclasses of the given class
74
11