id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
246,700
alejandroautalan/pygubu
pygubudesigner/previewer.py
PreviewHelper._get_slot
def _get_slot(self): "Returns the next coordinates for a preview" x = y = 10 for k, p in self.previews.items(): y += p.height() + self.padding return x, y
python
def _get_slot(self): "Returns the next coordinates for a preview" x = y = 10 for k, p in self.previews.items(): y += p.height() + self.padding return x, y
[ "def", "_get_slot", "(", "self", ")", ":", "x", "=", "y", "=", "10", "for", "k", ",", "p", "in", "self", ".", "previews", ".", "items", "(", ")", ":", "y", "+=", "p", ".", "height", "(", ")", "+", "self", ".", "padding", "return", "x", ",", "y" ]
Returns the next coordinates for a preview
[ "Returns", "the", "next", "coordinates", "for", "a", "preview" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/previewer.py#L511-L517
246,701
alejandroautalan/pygubu
pygubu/stockimage.py
StockImage.clear_cache
def clear_cache(cls): """Call this before closing tk root""" #Prevent tkinter errors on python 2 ?? for key in cls._cached: cls._cached[key] = None cls._cached = {}
python
def clear_cache(cls): #Prevent tkinter errors on python 2 ?? for key in cls._cached: cls._cached[key] = None cls._cached = {}
[ "def", "clear_cache", "(", "cls", ")", ":", "#Prevent tkinter errors on python 2 ??", "for", "key", "in", "cls", ".", "_cached", ":", "cls", ".", "_cached", "[", "key", "]", "=", "None", "cls", ".", "_cached", "=", "{", "}" ]
Call this before closing tk root
[ "Call", "this", "before", "closing", "tk", "root" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L54-L59
246,702
alejandroautalan/pygubu
pygubu/stockimage.py
StockImage.register
def register(cls, key, filename): """Register a image file using key""" if key in cls._stock: logger.info('Warning, replacing resource ' + str(key)) cls._stock[key] = {'type': 'custom', 'filename': filename} logger.info('%s registered as %s' % (filename, key))
python
def register(cls, key, filename): if key in cls._stock: logger.info('Warning, replacing resource ' + str(key)) cls._stock[key] = {'type': 'custom', 'filename': filename} logger.info('%s registered as %s' % (filename, key))
[ "def", "register", "(", "cls", ",", "key", ",", "filename", ")", ":", "if", "key", "in", "cls", ".", "_stock", ":", "logger", ".", "info", "(", "'Warning, replacing resource '", "+", "str", "(", "key", ")", ")", "cls", ".", "_stock", "[", "key", "]", "=", "{", "'type'", ":", "'custom'", ",", "'filename'", ":", "filename", "}", "logger", ".", "info", "(", "'%s registered as %s'", "%", "(", "filename", ",", "key", ")", ")" ]
Register a image file using key
[ "Register", "a", "image", "file", "using", "key" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L62-L68
246,703
alejandroautalan/pygubu
pygubu/stockimage.py
StockImage.register_from_data
def register_from_data(cls, key, format, data): """Register a image data using key""" if key in cls._stock: logger.info('Warning, replacing resource ' + str(key)) cls._stock[key] = {'type': 'data', 'data': data, 'format': format } logger.info('%s registered as %s' % ('data', key))
python
def register_from_data(cls, key, format, data): if key in cls._stock: logger.info('Warning, replacing resource ' + str(key)) cls._stock[key] = {'type': 'data', 'data': data, 'format': format } logger.info('%s registered as %s' % ('data', key))
[ "def", "register_from_data", "(", "cls", ",", "key", ",", "format", ",", "data", ")", ":", "if", "key", "in", "cls", ".", "_stock", ":", "logger", ".", "info", "(", "'Warning, replacing resource '", "+", "str", "(", "key", ")", ")", "cls", ".", "_stock", "[", "key", "]", "=", "{", "'type'", ":", "'data'", ",", "'data'", ":", "data", ",", "'format'", ":", "format", "}", "logger", ".", "info", "(", "'%s registered as %s'", "%", "(", "'data'", ",", "key", ")", ")" ]
Register a image data using key
[ "Register", "a", "image", "data", "using", "key" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L71-L77
246,704
alejandroautalan/pygubu
pygubu/stockimage.py
StockImage.register_created
def register_created(cls, key, image): """Register an already created image using key""" if key in cls._stock: logger.info('Warning, replacing resource ' + str(key)) cls._stock[key] = {'type': 'created', 'image': image} logger.info('%s registered as %s' % ('data', key))
python
def register_created(cls, key, image): if key in cls._stock: logger.info('Warning, replacing resource ' + str(key)) cls._stock[key] = {'type': 'created', 'image': image} logger.info('%s registered as %s' % ('data', key))
[ "def", "register_created", "(", "cls", ",", "key", ",", "image", ")", ":", "if", "key", "in", "cls", ".", "_stock", ":", "logger", ".", "info", "(", "'Warning, replacing resource '", "+", "str", "(", "key", ")", ")", "cls", ".", "_stock", "[", "key", "]", "=", "{", "'type'", ":", "'created'", ",", "'image'", ":", "image", "}", "logger", ".", "info", "(", "'%s registered as %s'", "%", "(", "'data'", ",", "key", ")", ")" ]
Register an already created image using key
[ "Register", "an", "already", "created", "image", "using", "key" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L80-L86
246,705
alejandroautalan/pygubu
pygubu/stockimage.py
StockImage._load_image
def _load_image(cls, rkey): """Load image from file or return the cached instance.""" v = cls._stock[rkey] img = None itype = v['type'] if itype in ('stock', 'data'): img = tk.PhotoImage(format=v['format'], data=v['data']) elif itype == 'created': img = v['image'] else: img = tk.PhotoImage(file=v['filename']) cls._cached[rkey] = img logger.info('Loaded resource %s.' % rkey) return img
python
def _load_image(cls, rkey): v = cls._stock[rkey] img = None itype = v['type'] if itype in ('stock', 'data'): img = tk.PhotoImage(format=v['format'], data=v['data']) elif itype == 'created': img = v['image'] else: img = tk.PhotoImage(file=v['filename']) cls._cached[rkey] = img logger.info('Loaded resource %s.' % rkey) return img
[ "def", "_load_image", "(", "cls", ",", "rkey", ")", ":", "v", "=", "cls", ".", "_stock", "[", "rkey", "]", "img", "=", "None", "itype", "=", "v", "[", "'type'", "]", "if", "itype", "in", "(", "'stock'", ",", "'data'", ")", ":", "img", "=", "tk", ".", "PhotoImage", "(", "format", "=", "v", "[", "'format'", "]", ",", "data", "=", "v", "[", "'data'", "]", ")", "elif", "itype", "==", "'created'", ":", "img", "=", "v", "[", "'image'", "]", "else", ":", "img", "=", "tk", ".", "PhotoImage", "(", "file", "=", "v", "[", "'filename'", "]", ")", "cls", ".", "_cached", "[", "rkey", "]", "=", "img", "logger", ".", "info", "(", "'Loaded resource %s.'", "%", "rkey", ")", "return", "img" ]
Load image from file or return the cached instance.
[ "Load", "image", "from", "file", "or", "return", "the", "cached", "instance", "." ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L107-L121
246,706
alejandroautalan/pygubu
pygubu/stockimage.py
StockImage.get
def get(cls, rkey): """Get image previously registered with key rkey. If key not exist, raise StockImageException """ if rkey in cls._cached: logger.info('Resource %s is in cache.' % rkey) return cls._cached[rkey] if rkey in cls._stock: img = cls._load_image(rkey) return img else: raise StockImageException('StockImage: %s not registered.' % rkey)
python
def get(cls, rkey): if rkey in cls._cached: logger.info('Resource %s is in cache.' % rkey) return cls._cached[rkey] if rkey in cls._stock: img = cls._load_image(rkey) return img else: raise StockImageException('StockImage: %s not registered.' % rkey)
[ "def", "get", "(", "cls", ",", "rkey", ")", ":", "if", "rkey", "in", "cls", ".", "_cached", ":", "logger", ".", "info", "(", "'Resource %s is in cache.'", "%", "rkey", ")", "return", "cls", ".", "_cached", "[", "rkey", "]", "if", "rkey", "in", "cls", ".", "_stock", ":", "img", "=", "cls", ".", "_load_image", "(", "rkey", ")", "return", "img", "else", ":", "raise", "StockImageException", "(", "'StockImage: %s not registered.'", "%", "rkey", ")" ]
Get image previously registered with key rkey. If key not exist, raise StockImageException
[ "Get", "image", "previously", "registered", "with", "key", "rkey", ".", "If", "key", "not", "exist", "raise", "StockImageException" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubu/stockimage.py#L124-L136
246,707
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.config_treeview
def config_treeview(self): """Sets treeview columns and other params""" tree = self.treeview tree.bind('<Double-1>', self.on_treeview_double_click) tree.bind('<<TreeviewSelect>>', self.on_treeview_select, add='+')
python
def config_treeview(self): tree = self.treeview tree.bind('<Double-1>', self.on_treeview_double_click) tree.bind('<<TreeviewSelect>>', self.on_treeview_select, add='+')
[ "def", "config_treeview", "(", "self", ")", ":", "tree", "=", "self", ".", "treeview", "tree", ".", "bind", "(", "'<Double-1>'", ",", "self", ".", "on_treeview_double_click", ")", "tree", ".", "bind", "(", "'<<TreeviewSelect>>'", ",", "self", ".", "on_treeview_select", ",", "add", "=", "'+'", ")" ]
Sets treeview columns and other params
[ "Sets", "treeview", "columns", "and", "other", "params" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L79-L83
246,708
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.get_toplevel_parent
def get_toplevel_parent(self, treeitem): """Returns the top level parent for treeitem.""" tv = self.treeview toplevel_items = tv.get_children() item = treeitem while not (item in toplevel_items): item = tv.parent(item) return item
python
def get_toplevel_parent(self, treeitem): tv = self.treeview toplevel_items = tv.get_children() item = treeitem while not (item in toplevel_items): item = tv.parent(item) return item
[ "def", "get_toplevel_parent", "(", "self", ",", "treeitem", ")", ":", "tv", "=", "self", ".", "treeview", "toplevel_items", "=", "tv", ".", "get_children", "(", ")", "item", "=", "treeitem", "while", "not", "(", "item", "in", "toplevel_items", ")", ":", "item", "=", "tv", ".", "parent", "(", "item", ")", "return", "item" ]
Returns the top level parent for treeitem.
[ "Returns", "the", "top", "level", "parent", "for", "treeitem", "." ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L85-L94
246,709
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.draw_widget
def draw_widget(self, item): """Create a preview of the selected treeview item""" if item: self.filter_remove(remember=True) selected_id = self.treedata[item]['id'] item = self.get_toplevel_parent(item) widget_id = self.treedata[item]['id'] wclass = self.treedata[item]['class'] xmlnode = self.tree_node_to_xml('', item) self.previewer.draw(item, widget_id, xmlnode, wclass) self.previewer.show_selected(item, selected_id) self.filter_restore()
python
def draw_widget(self, item): if item: self.filter_remove(remember=True) selected_id = self.treedata[item]['id'] item = self.get_toplevel_parent(item) widget_id = self.treedata[item]['id'] wclass = self.treedata[item]['class'] xmlnode = self.tree_node_to_xml('', item) self.previewer.draw(item, widget_id, xmlnode, wclass) self.previewer.show_selected(item, selected_id) self.filter_restore()
[ "def", "draw_widget", "(", "self", ",", "item", ")", ":", "if", "item", ":", "self", ".", "filter_remove", "(", "remember", "=", "True", ")", "selected_id", "=", "self", ".", "treedata", "[", "item", "]", "[", "'id'", "]", "item", "=", "self", ".", "get_toplevel_parent", "(", "item", ")", "widget_id", "=", "self", ".", "treedata", "[", "item", "]", "[", "'id'", "]", "wclass", "=", "self", ".", "treedata", "[", "item", "]", "[", "'class'", "]", "xmlnode", "=", "self", ".", "tree_node_to_xml", "(", "''", ",", "item", ")", "self", ".", "previewer", ".", "draw", "(", "item", ",", "widget_id", ",", "xmlnode", ",", "wclass", ")", "self", ".", "previewer", ".", "show_selected", "(", "item", ",", "selected_id", ")", "self", ".", "filter_restore", "(", ")" ]
Create a preview of the selected treeview item
[ "Create", "a", "preview", "of", "the", "selected", "treeview", "item" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L96-L107
246,710
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.on_treeview_delete_selection
def on_treeview_delete_selection(self, event=None): """Removes selected items from treeview""" tv = self.treeview selection = tv.selection() # Need to remove filter self.filter_remove(remember=True) toplevel_items = tv.get_children() parents_to_redraw = set() for item in selection: try: parent = '' if item not in toplevel_items: parent = self.get_toplevel_parent(item) else: self.previewer.delete(item) del self.treedata[item] tv.delete(item) self.app.set_changed() if parent: self._update_max_grid_rc(parent) parents_to_redraw.add(parent) self.widget_editor.hide_all() except tk.TclError: # Selection of parent and child items ?? # TODO: notify something here pass # redraw widgets for item in parents_to_redraw: self.draw_widget(item) # restore filter self.filter_restore()
python
def on_treeview_delete_selection(self, event=None): tv = self.treeview selection = tv.selection() # Need to remove filter self.filter_remove(remember=True) toplevel_items = tv.get_children() parents_to_redraw = set() for item in selection: try: parent = '' if item not in toplevel_items: parent = self.get_toplevel_parent(item) else: self.previewer.delete(item) del self.treedata[item] tv.delete(item) self.app.set_changed() if parent: self._update_max_grid_rc(parent) parents_to_redraw.add(parent) self.widget_editor.hide_all() except tk.TclError: # Selection of parent and child items ?? # TODO: notify something here pass # redraw widgets for item in parents_to_redraw: self.draw_widget(item) # restore filter self.filter_restore()
[ "def", "on_treeview_delete_selection", "(", "self", ",", "event", "=", "None", ")", ":", "tv", "=", "self", ".", "treeview", "selection", "=", "tv", ".", "selection", "(", ")", "# Need to remove filter", "self", ".", "filter_remove", "(", "remember", "=", "True", ")", "toplevel_items", "=", "tv", ".", "get_children", "(", ")", "parents_to_redraw", "=", "set", "(", ")", "for", "item", "in", "selection", ":", "try", ":", "parent", "=", "''", "if", "item", "not", "in", "toplevel_items", ":", "parent", "=", "self", ".", "get_toplevel_parent", "(", "item", ")", "else", ":", "self", ".", "previewer", ".", "delete", "(", "item", ")", "del", "self", ".", "treedata", "[", "item", "]", "tv", ".", "delete", "(", "item", ")", "self", ".", "app", ".", "set_changed", "(", ")", "if", "parent", ":", "self", ".", "_update_max_grid_rc", "(", "parent", ")", "parents_to_redraw", ".", "add", "(", "parent", ")", "self", ".", "widget_editor", ".", "hide_all", "(", ")", "except", "tk", ".", "TclError", ":", "# Selection of parent and child items ??", "# TODO: notify something here", "pass", "# redraw widgets", "for", "item", "in", "parents_to_redraw", ":", "self", ".", "draw_widget", "(", "item", ")", "# restore filter", "self", ".", "filter_restore", "(", ")" ]
Removes selected items from treeview
[ "Removes", "selected", "items", "from", "treeview" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L134-L167
246,711
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.tree_to_xml
def tree_to_xml(self): """Traverses treeview and generates a ElementTree object""" # Need to remove filter or hidden items will not be saved. self.filter_remove(remember=True) tree = self.treeview root = ET.Element('interface') items = tree.get_children() for item in items: node = self.tree_node_to_xml('', item) root.append(node) # restore filter self.filter_restore() return ET.ElementTree(root)
python
def tree_to_xml(self): # Need to remove filter or hidden items will not be saved. self.filter_remove(remember=True) tree = self.treeview root = ET.Element('interface') items = tree.get_children() for item in items: node = self.tree_node_to_xml('', item) root.append(node) # restore filter self.filter_restore() return ET.ElementTree(root)
[ "def", "tree_to_xml", "(", "self", ")", ":", "# Need to remove filter or hidden items will not be saved.", "self", ".", "filter_remove", "(", "remember", "=", "True", ")", "tree", "=", "self", ".", "treeview", "root", "=", "ET", ".", "Element", "(", "'interface'", ")", "items", "=", "tree", ".", "get_children", "(", ")", "for", "item", "in", "items", ":", "node", "=", "self", ".", "tree_node_to_xml", "(", "''", ",", "item", ")", "root", ".", "append", "(", "node", ")", "# restore filter", "self", ".", "filter_restore", "(", ")", "return", "ET", ".", "ElementTree", "(", "root", ")" ]
Traverses treeview and generates a ElementTree object
[ "Traverses", "treeview", "and", "generates", "a", "ElementTree", "object" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L169-L185
246,712
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.tree_node_to_xml
def tree_node_to_xml(self, parent, item): """Converts a treeview item and children to xml nodes""" tree = self.treeview data = self.treedata[item] node = data.to_xml_node() children = tree.get_children(item) for child in children: cnode = ET.Element('child') cwidget = self.tree_node_to_xml(item, child) cnode.append(cwidget) node.append(cnode) return node
python
def tree_node_to_xml(self, parent, item): tree = self.treeview data = self.treedata[item] node = data.to_xml_node() children = tree.get_children(item) for child in children: cnode = ET.Element('child') cwidget = self.tree_node_to_xml(item, child) cnode.append(cwidget) node.append(cnode) return node
[ "def", "tree_node_to_xml", "(", "self", ",", "parent", ",", "item", ")", ":", "tree", "=", "self", ".", "treeview", "data", "=", "self", ".", "treedata", "[", "item", "]", "node", "=", "data", ".", "to_xml_node", "(", ")", "children", "=", "tree", ".", "get_children", "(", "item", ")", "for", "child", "in", "children", ":", "cnode", "=", "ET", ".", "Element", "(", "'child'", ")", "cwidget", "=", "self", ".", "tree_node_to_xml", "(", "item", ",", "child", ")", "cnode", ".", "append", "(", "cwidget", ")", "node", ".", "append", "(", "cnode", ")", "return", "node" ]
Converts a treeview item and children to xml nodes
[ "Converts", "a", "treeview", "item", "and", "children", "to", "xml", "nodes" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L187-L201
246,713
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor._insert_item
def _insert_item(self, root, data, from_file=False): """Insert a item on the treeview and fills columns from data""" tree = self.treeview treelabel = data.get_id() row = col = '' if root != '' and 'layout' in data: row = data.get_layout_property('row') col = data.get_layout_property('column') # fix row position when using copy and paste # If collision, increase by 1 row_count = self.get_max_row(root) if not from_file and (row_count > int(row) and int(col) == 0): row = str(row_count + 1) data.set_layout_property('row', row) image = '' try: image = StockImage.get('16x16-tk.default') except StockImageException: # TODO: notify something here pass try: image = StockImage.get('16x16-{0}'.format(data.get_class())) except StockImageException: # TODO: notify something here pass values = (data.get_class(), row, col) item = tree.insert(root, 'end', text=treelabel, values=values, image=image) data.attach(self) self.treedata[item] = data # Update grid r/c data self._update_max_grid_rc(root, from_file=True) self.app.set_changed() return item
python
def _insert_item(self, root, data, from_file=False): tree = self.treeview treelabel = data.get_id() row = col = '' if root != '' and 'layout' in data: row = data.get_layout_property('row') col = data.get_layout_property('column') # fix row position when using copy and paste # If collision, increase by 1 row_count = self.get_max_row(root) if not from_file and (row_count > int(row) and int(col) == 0): row = str(row_count + 1) data.set_layout_property('row', row) image = '' try: image = StockImage.get('16x16-tk.default') except StockImageException: # TODO: notify something here pass try: image = StockImage.get('16x16-{0}'.format(data.get_class())) except StockImageException: # TODO: notify something here pass values = (data.get_class(), row, col) item = tree.insert(root, 'end', text=treelabel, values=values, image=image) data.attach(self) self.treedata[item] = data # Update grid r/c data self._update_max_grid_rc(root, from_file=True) self.app.set_changed() return item
[ "def", "_insert_item", "(", "self", ",", "root", ",", "data", ",", "from_file", "=", "False", ")", ":", "tree", "=", "self", ".", "treeview", "treelabel", "=", "data", ".", "get_id", "(", ")", "row", "=", "col", "=", "''", "if", "root", "!=", "''", "and", "'layout'", "in", "data", ":", "row", "=", "data", ".", "get_layout_property", "(", "'row'", ")", "col", "=", "data", ".", "get_layout_property", "(", "'column'", ")", "# fix row position when using copy and paste", "# If collision, increase by 1", "row_count", "=", "self", ".", "get_max_row", "(", "root", ")", "if", "not", "from_file", "and", "(", "row_count", ">", "int", "(", "row", ")", "and", "int", "(", "col", ")", "==", "0", ")", ":", "row", "=", "str", "(", "row_count", "+", "1", ")", "data", ".", "set_layout_property", "(", "'row'", ",", "row", ")", "image", "=", "''", "try", ":", "image", "=", "StockImage", ".", "get", "(", "'16x16-tk.default'", ")", "except", "StockImageException", ":", "# TODO: notify something here", "pass", "try", ":", "image", "=", "StockImage", ".", "get", "(", "'16x16-{0}'", ".", "format", "(", "data", ".", "get_class", "(", ")", ")", ")", "except", "StockImageException", ":", "# TODO: notify something here", "pass", "values", "=", "(", "data", ".", "get_class", "(", ")", ",", "row", ",", "col", ")", "item", "=", "tree", ".", "insert", "(", "root", ",", "'end'", ",", "text", "=", "treelabel", ",", "values", "=", "values", ",", "image", "=", "image", ")", "data", ".", "attach", "(", "self", ")", "self", ".", "treedata", "[", "item", "]", "=", "data", "# Update grid r/c data", "self", ".", "_update_max_grid_rc", "(", "root", ",", "from_file", "=", "True", ")", "self", ".", "app", ".", "set_changed", "(", ")", "return", "item" ]
Insert a item on the treeview and fills columns from data
[ "Insert", "a", "item", "on", "the", "treeview", "and", "fills", "columns", "from", "data" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L203-L243
246,714
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.copy_to_clipboard
def copy_to_clipboard(self): """ Copies selected items to clipboard. """ tree = self.treeview # get the selected item: selection = tree.selection() if selection: self.filter_remove(remember=True) root = ET.Element('selection') for item in selection: node = self.tree_node_to_xml('', item) root.append(node) # python2 issue try: text = ET.tostring(root, encoding='unicode') except LookupError: text = ET.tostring(root, encoding='UTF-8') tree.clipboard_clear() tree.clipboard_append(text) self.filter_restore()
python
def copy_to_clipboard(self): tree = self.treeview # get the selected item: selection = tree.selection() if selection: self.filter_remove(remember=True) root = ET.Element('selection') for item in selection: node = self.tree_node_to_xml('', item) root.append(node) # python2 issue try: text = ET.tostring(root, encoding='unicode') except LookupError: text = ET.tostring(root, encoding='UTF-8') tree.clipboard_clear() tree.clipboard_append(text) self.filter_restore()
[ "def", "copy_to_clipboard", "(", "self", ")", ":", "tree", "=", "self", ".", "treeview", "# get the selected item:", "selection", "=", "tree", ".", "selection", "(", ")", "if", "selection", ":", "self", ".", "filter_remove", "(", "remember", "=", "True", ")", "root", "=", "ET", ".", "Element", "(", "'selection'", ")", "for", "item", "in", "selection", ":", "node", "=", "self", ".", "tree_node_to_xml", "(", "''", ",", "item", ")", "root", ".", "append", "(", "node", ")", "# python2 issue", "try", ":", "text", "=", "ET", ".", "tostring", "(", "root", ",", "encoding", "=", "'unicode'", ")", "except", "LookupError", ":", "text", "=", "ET", ".", "tostring", "(", "root", ",", "encoding", "=", "'UTF-8'", ")", "tree", ".", "clipboard_clear", "(", ")", "tree", ".", "clipboard_append", "(", "text", ")", "self", ".", "filter_restore", "(", ")" ]
Copies selected items to clipboard.
[ "Copies", "selected", "items", "to", "clipboard", "." ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L255-L275
246,715
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.add_widget
def add_widget(self, wclass): """Adds a new item to the treeview.""" tree = self.treeview # get the selected item: selected_item = '' tsel = tree.selection() if tsel: selected_item = tsel[0] # Need to remove filter if set self.filter_remove() root = selected_item # check if the widget can be added at selected point if not self._validate_add(root, wclass, False): # if not try to add at item parent level parent = tree.parent(root) if parent != root: if self._validate_add(parent, wclass): root = parent else: return else: return # root item should be set at this point # setup properties widget_id = self.get_unique_id(wclass) data = WidgetDescr(wclass, widget_id) # setup default values for properties for pname in builder.CLASS_MAP[wclass].builder.properties: pdescription = {} if pname in properties.WIDGET_PROPERTIES: pdescription = properties.WIDGET_PROPERTIES[pname] if wclass in pdescription: pdescription = dict(pdescription, **pdescription[wclass]) default_value = str(pdescription.get('default', '')) data.set_property(pname, default_value) # default text for widgets with text prop: if pname in ('text', 'label'): data.set_property(pname, widget_id) # # default grid properties # # is_container = builder.CLASS_MAP[wclass].builder.container for prop_name in properties.GRID_PROPERTIES: pdescription = properties.LAYOUT_OPTIONS[prop_name] if wclass in pdescription: pdescription = dict(pdescription, **pdescription[wclass]) default_value = str(pdescription.get('default', '')) data.set_layout_property(prop_name, default_value) rownum = '0' if root: rownum = str(self.get_max_row(root)+1) data.set_layout_property('row', rownum) data.set_layout_property('column', '0') item = self._insert_item(root, data) # Do redraw self.draw_widget(item) # Select and show the item created tree.after_idle(lambda: tree.selection_set(item)) tree.after_idle(lambda: tree.focus(item)) tree.after_idle(lambda: tree.see(item))
python
def add_widget(self, wclass): tree = self.treeview # get the selected item: selected_item = '' tsel = tree.selection() if tsel: selected_item = tsel[0] # Need to remove filter if set self.filter_remove() root = selected_item # check if the widget can be added at selected point if not self._validate_add(root, wclass, False): # if not try to add at item parent level parent = tree.parent(root) if parent != root: if self._validate_add(parent, wclass): root = parent else: return else: return # root item should be set at this point # setup properties widget_id = self.get_unique_id(wclass) data = WidgetDescr(wclass, widget_id) # setup default values for properties for pname in builder.CLASS_MAP[wclass].builder.properties: pdescription = {} if pname in properties.WIDGET_PROPERTIES: pdescription = properties.WIDGET_PROPERTIES[pname] if wclass in pdescription: pdescription = dict(pdescription, **pdescription[wclass]) default_value = str(pdescription.get('default', '')) data.set_property(pname, default_value) # default text for widgets with text prop: if pname in ('text', 'label'): data.set_property(pname, widget_id) # # default grid properties # # is_container = builder.CLASS_MAP[wclass].builder.container for prop_name in properties.GRID_PROPERTIES: pdescription = properties.LAYOUT_OPTIONS[prop_name] if wclass in pdescription: pdescription = dict(pdescription, **pdescription[wclass]) default_value = str(pdescription.get('default', '')) data.set_layout_property(prop_name, default_value) rownum = '0' if root: rownum = str(self.get_max_row(root)+1) data.set_layout_property('row', rownum) data.set_layout_property('column', '0') item = self._insert_item(root, data) # Do redraw self.draw_widget(item) # Select and show the item created tree.after_idle(lambda: tree.selection_set(item)) tree.after_idle(lambda: tree.focus(item)) tree.after_idle(lambda: tree.see(item))
[ "def", "add_widget", "(", "self", ",", "wclass", ")", ":", "tree", "=", "self", ".", "treeview", "# get the selected item:", "selected_item", "=", "''", "tsel", "=", "tree", ".", "selection", "(", ")", "if", "tsel", ":", "selected_item", "=", "tsel", "[", "0", "]", "# Need to remove filter if set", "self", ".", "filter_remove", "(", ")", "root", "=", "selected_item", "# check if the widget can be added at selected point", "if", "not", "self", ".", "_validate_add", "(", "root", ",", "wclass", ",", "False", ")", ":", "# if not try to add at item parent level", "parent", "=", "tree", ".", "parent", "(", "root", ")", "if", "parent", "!=", "root", ":", "if", "self", ".", "_validate_add", "(", "parent", ",", "wclass", ")", ":", "root", "=", "parent", "else", ":", "return", "else", ":", "return", "# root item should be set at this point", "# setup properties", "widget_id", "=", "self", ".", "get_unique_id", "(", "wclass", ")", "data", "=", "WidgetDescr", "(", "wclass", ",", "widget_id", ")", "# setup default values for properties", "for", "pname", "in", "builder", ".", "CLASS_MAP", "[", "wclass", "]", ".", "builder", ".", "properties", ":", "pdescription", "=", "{", "}", "if", "pname", "in", "properties", ".", "WIDGET_PROPERTIES", ":", "pdescription", "=", "properties", ".", "WIDGET_PROPERTIES", "[", "pname", "]", "if", "wclass", "in", "pdescription", ":", "pdescription", "=", "dict", "(", "pdescription", ",", "*", "*", "pdescription", "[", "wclass", "]", ")", "default_value", "=", "str", "(", "pdescription", ".", "get", "(", "'default'", ",", "''", ")", ")", "data", ".", "set_property", "(", "pname", ",", "default_value", ")", "# default text for widgets with text prop:", "if", "pname", "in", "(", "'text'", ",", "'label'", ")", ":", "data", ".", "set_property", "(", "pname", ",", "widget_id", ")", "#", "# default grid properties", "#", "# is_container = builder.CLASS_MAP[wclass].builder.container", "for", "prop_name", "in", "properties", ".", "GRID_PROPERTIES", ":", "pdescription", "=", "properties", ".", "LAYOUT_OPTIONS", "[", "prop_name", "]", "if", "wclass", "in", "pdescription", ":", "pdescription", "=", "dict", "(", "pdescription", ",", "*", "*", "pdescription", "[", "wclass", "]", ")", "default_value", "=", "str", "(", "pdescription", ".", "get", "(", "'default'", ",", "''", ")", ")", "data", ".", "set_layout_property", "(", "prop_name", ",", "default_value", ")", "rownum", "=", "'0'", "if", "root", ":", "rownum", "=", "str", "(", "self", ".", "get_max_row", "(", "root", ")", "+", "1", ")", "data", ".", "set_layout_property", "(", "'row'", ",", "rownum", ")", "data", ".", "set_layout_property", "(", "'column'", ",", "'0'", ")", "item", "=", "self", ".", "_insert_item", "(", "root", ",", "data", ")", "# Do redraw", "self", ".", "draw_widget", "(", "item", ")", "# Select and show the item created", "tree", ".", "after_idle", "(", "lambda", ":", "tree", ".", "selection_set", "(", "item", ")", ")", "tree", ".", "after_idle", "(", "lambda", ":", "tree", ".", "focus", "(", "item", ")", ")", "tree", ".", "after_idle", "(", "lambda", ":", "tree", ".", "see", "(", "item", ")", ")" ]
Adds a new item to the treeview.
[ "Adds", "a", "new", "item", "to", "the", "treeview", "." ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L422-L492
246,716
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.load_file
def load_file(self, filename): """Load file into treeview""" self.counter.clear() # python2 issues try: etree = ET.parse(filename) except ET.ParseError: parser = ET.XMLParser(encoding='UTF-8') etree = ET.parse(filename, parser) eroot = etree.getroot() self.remove_all() self.previewer.remove_all() self.widget_editor.hide_all() self.previewer.resource_paths.append(os.path.dirname(filename)) for element in eroot: self.populate_tree('', eroot, element,from_file=True) children = self.treeview.get_children('') for child in children: self.draw_widget(child) self.previewer.show_selected(None, None)
python
def load_file(self, filename): self.counter.clear() # python2 issues try: etree = ET.parse(filename) except ET.ParseError: parser = ET.XMLParser(encoding='UTF-8') etree = ET.parse(filename, parser) eroot = etree.getroot() self.remove_all() self.previewer.remove_all() self.widget_editor.hide_all() self.previewer.resource_paths.append(os.path.dirname(filename)) for element in eroot: self.populate_tree('', eroot, element,from_file=True) children = self.treeview.get_children('') for child in children: self.draw_widget(child) self.previewer.show_selected(None, None)
[ "def", "load_file", "(", "self", ",", "filename", ")", ":", "self", ".", "counter", ".", "clear", "(", ")", "# python2 issues", "try", ":", "etree", "=", "ET", ".", "parse", "(", "filename", ")", "except", "ET", ".", "ParseError", ":", "parser", "=", "ET", ".", "XMLParser", "(", "encoding", "=", "'UTF-8'", ")", "etree", "=", "ET", ".", "parse", "(", "filename", ",", "parser", ")", "eroot", "=", "etree", ".", "getroot", "(", ")", "self", ".", "remove_all", "(", ")", "self", ".", "previewer", ".", "remove_all", "(", ")", "self", ".", "widget_editor", ".", "hide_all", "(", ")", "self", ".", "previewer", ".", "resource_paths", ".", "append", "(", "os", ".", "path", ".", "dirname", "(", "filename", ")", ")", "for", "element", "in", "eroot", ":", "self", ".", "populate_tree", "(", "''", ",", "eroot", ",", "element", ",", "from_file", "=", "True", ")", "children", "=", "self", ".", "treeview", ".", "get_children", "(", "''", ")", "for", "child", "in", "children", ":", "self", ".", "draw_widget", "(", "child", ")", "self", ".", "previewer", ".", "show_selected", "(", "None", ",", "None", ")" ]
Load file into treeview
[ "Load", "file", "into", "treeview" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L501-L523
246,717
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.populate_tree
def populate_tree(self, master, parent, element,from_file=False): """Reads xml nodes and populates tree item""" data = WidgetDescr(None, None) data.from_xml_node(element) cname = data.get_class() uniqueid = self.get_unique_id(cname, data.get_id()) data.set_property('id', uniqueid) if cname in builder.CLASS_MAP: pwidget = self._insert_item(master, data,from_file=from_file) xpath = "./child" children = element.findall(xpath) for child in children: child_object = child.find('./object') cwidget = self.populate_tree(pwidget, child, child_object,from_file=from_file) return pwidget else: raise Exception('Class "{0}" not mapped'.format(cname))
python
def populate_tree(self, master, parent, element,from_file=False): data = WidgetDescr(None, None) data.from_xml_node(element) cname = data.get_class() uniqueid = self.get_unique_id(cname, data.get_id()) data.set_property('id', uniqueid) if cname in builder.CLASS_MAP: pwidget = self._insert_item(master, data,from_file=from_file) xpath = "./child" children = element.findall(xpath) for child in children: child_object = child.find('./object') cwidget = self.populate_tree(pwidget, child, child_object,from_file=from_file) return pwidget else: raise Exception('Class "{0}" not mapped'.format(cname))
[ "def", "populate_tree", "(", "self", ",", "master", ",", "parent", ",", "element", ",", "from_file", "=", "False", ")", ":", "data", "=", "WidgetDescr", "(", "None", ",", "None", ")", "data", ".", "from_xml_node", "(", "element", ")", "cname", "=", "data", ".", "get_class", "(", ")", "uniqueid", "=", "self", ".", "get_unique_id", "(", "cname", ",", "data", ".", "get_id", "(", ")", ")", "data", ".", "set_property", "(", "'id'", ",", "uniqueid", ")", "if", "cname", "in", "builder", ".", "CLASS_MAP", ":", "pwidget", "=", "self", ".", "_insert_item", "(", "master", ",", "data", ",", "from_file", "=", "from_file", ")", "xpath", "=", "\"./child\"", "children", "=", "element", ".", "findall", "(", "xpath", ")", "for", "child", "in", "children", ":", "child_object", "=", "child", ".", "find", "(", "'./object'", ")", "cwidget", "=", "self", ".", "populate_tree", "(", "pwidget", ",", "child", ",", "child_object", ",", "from_file", "=", "from_file", ")", "return", "pwidget", "else", ":", "raise", "Exception", "(", "'Class \"{0}\" not mapped'", ".", "format", "(", "cname", ")", ")" ]
Reads xml nodes and populates tree item
[ "Reads", "xml", "nodes", "and", "populates", "tree", "item" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L525-L544
246,718
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor.update_event
def update_event(self, hint, obj): """Updates tree colums when itemdata is changed.""" tree = self.treeview data = obj item = self.get_item_by_data(obj) if item: if data.get_id() != tree.item(item, 'text'): tree.item(item, text=data.get_id()) # if tree.parent(item) != '' and 'layout' in data: if tree.parent(item) != '': row = data.get_layout_property('row') col = data.get_layout_property('column') values = tree.item(item, 'values') if (row != values[1] or col != values[2]): values = (data.get_class(), row, col) tree.item(item, values=values) self.draw_widget(item) self.app.set_changed()
python
def update_event(self, hint, obj): tree = self.treeview data = obj item = self.get_item_by_data(obj) if item: if data.get_id() != tree.item(item, 'text'): tree.item(item, text=data.get_id()) # if tree.parent(item) != '' and 'layout' in data: if tree.parent(item) != '': row = data.get_layout_property('row') col = data.get_layout_property('column') values = tree.item(item, 'values') if (row != values[1] or col != values[2]): values = (data.get_class(), row, col) tree.item(item, values=values) self.draw_widget(item) self.app.set_changed()
[ "def", "update_event", "(", "self", ",", "hint", ",", "obj", ")", ":", "tree", "=", "self", ".", "treeview", "data", "=", "obj", "item", "=", "self", ".", "get_item_by_data", "(", "obj", ")", "if", "item", ":", "if", "data", ".", "get_id", "(", ")", "!=", "tree", ".", "item", "(", "item", ",", "'text'", ")", ":", "tree", ".", "item", "(", "item", ",", "text", "=", "data", ".", "get_id", "(", ")", ")", "# if tree.parent(item) != '' and 'layout' in data:", "if", "tree", ".", "parent", "(", "item", ")", "!=", "''", ":", "row", "=", "data", ".", "get_layout_property", "(", "'row'", ")", "col", "=", "data", ".", "get_layout_property", "(", "'column'", ")", "values", "=", "tree", ".", "item", "(", "item", ",", "'values'", ")", "if", "(", "row", "!=", "values", "[", "1", "]", "or", "col", "!=", "values", "[", "2", "]", ")", ":", "values", "=", "(", "data", ".", "get_class", "(", ")", ",", "row", ",", "col", ")", "tree", ".", "item", "(", "item", ",", "values", "=", "values", ")", "self", ".", "draw_widget", "(", "item", ")", "self", ".", "app", ".", "set_changed", "(", ")" ]
Updates tree colums when itemdata is changed.
[ "Updates", "tree", "colums", "when", "itemdata", "is", "changed", "." ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L586-L604
246,719
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor._reatach
def _reatach(self): """Reinsert the hidden items.""" for item, p, idx in self._detached: # The item may have been deleted. if self.treeview.exists(item) and self.treeview.exists(p): self.treeview.move(item, p, idx) self._detached = []
python
def _reatach(self): for item, p, idx in self._detached: # The item may have been deleted. if self.treeview.exists(item) and self.treeview.exists(p): self.treeview.move(item, p, idx) self._detached = []
[ "def", "_reatach", "(", "self", ")", ":", "for", "item", ",", "p", ",", "idx", "in", "self", ".", "_detached", ":", "# The item may have been deleted.", "if", "self", ".", "treeview", ".", "exists", "(", "item", ")", "and", "self", ".", "treeview", ".", "exists", "(", "p", ")", ":", "self", ".", "treeview", ".", "move", "(", "item", ",", "p", ",", "idx", ")", "self", ".", "_detached", "=", "[", "]" ]
Reinsert the hidden items.
[ "Reinsert", "the", "hidden", "items", "." ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L741-L747
246,720
alejandroautalan/pygubu
pygubudesigner/uitreeeditor.py
WidgetsTreeEditor._detach
def _detach(self, item): """Hide items from treeview that do not match the search string.""" to_detach = [] children_det = [] children_match = False match_found = False value = self.filtervar.get() txt = self.treeview.item(item, 'text').lower() if value in txt: match_found = True else: class_txt = self.treedata[item].get_class().lower() if value in class_txt: match_found = True parent = self.treeview.parent(item) idx = self.treeview.index(item) children = self.treeview.get_children(item) if children: for child in children: match, detach = self._detach(child) children_match = children_match | match if detach: children_det.extend(detach) if match_found: if children_det: to_detach.extend(children_det) else: if children_match: if children_det: to_detach.extend(children_det) else: to_detach.append((item, parent, idx)) match_found = match_found | children_match return match_found, to_detach
python
def _detach(self, item): to_detach = [] children_det = [] children_match = False match_found = False value = self.filtervar.get() txt = self.treeview.item(item, 'text').lower() if value in txt: match_found = True else: class_txt = self.treedata[item].get_class().lower() if value in class_txt: match_found = True parent = self.treeview.parent(item) idx = self.treeview.index(item) children = self.treeview.get_children(item) if children: for child in children: match, detach = self._detach(child) children_match = children_match | match if detach: children_det.extend(detach) if match_found: if children_det: to_detach.extend(children_det) else: if children_match: if children_det: to_detach.extend(children_det) else: to_detach.append((item, parent, idx)) match_found = match_found | children_match return match_found, to_detach
[ "def", "_detach", "(", "self", ",", "item", ")", ":", "to_detach", "=", "[", "]", "children_det", "=", "[", "]", "children_match", "=", "False", "match_found", "=", "False", "value", "=", "self", ".", "filtervar", ".", "get", "(", ")", "txt", "=", "self", ".", "treeview", ".", "item", "(", "item", ",", "'text'", ")", ".", "lower", "(", ")", "if", "value", "in", "txt", ":", "match_found", "=", "True", "else", ":", "class_txt", "=", "self", ".", "treedata", "[", "item", "]", ".", "get_class", "(", ")", ".", "lower", "(", ")", "if", "value", "in", "class_txt", ":", "match_found", "=", "True", "parent", "=", "self", ".", "treeview", ".", "parent", "(", "item", ")", "idx", "=", "self", ".", "treeview", ".", "index", "(", "item", ")", "children", "=", "self", ".", "treeview", ".", "get_children", "(", "item", ")", "if", "children", ":", "for", "child", "in", "children", ":", "match", ",", "detach", "=", "self", ".", "_detach", "(", "child", ")", "children_match", "=", "children_match", "|", "match", "if", "detach", ":", "children_det", ".", "extend", "(", "detach", ")", "if", "match_found", ":", "if", "children_det", ":", "to_detach", ".", "extend", "(", "children_det", ")", "else", ":", "if", "children_match", ":", "if", "children_det", ":", "to_detach", ".", "extend", "(", "children_det", ")", "else", ":", "to_detach", ".", "append", "(", "(", "item", ",", "parent", ",", "idx", ")", ")", "match_found", "=", "match_found", "|", "children_match", "return", "match_found", ",", "to_detach" ]
Hide items from treeview that do not match the search string.
[ "Hide", "items", "from", "treeview", "that", "do", "not", "match", "the", "search", "string", "." ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/uitreeeditor.py#L749-L785
246,721
alejandroautalan/pygubu
pygubudesigner/main.py
PygubuUI.load_file
def load_file(self, filename): """Load xml into treeview""" self.tree_editor.load_file(filename) self.project_name.configure(text=filename) self.currentfile = filename self.is_changed = False
python
def load_file(self, filename): self.tree_editor.load_file(filename) self.project_name.configure(text=filename) self.currentfile = filename self.is_changed = False
[ "def", "load_file", "(", "self", ",", "filename", ")", ":", "self", ".", "tree_editor", ".", "load_file", "(", "filename", ")", "self", ".", "project_name", ".", "configure", "(", "text", "=", "filename", ")", "self", ".", "currentfile", "=", "filename", "self", ".", "is_changed", "=", "False" ]
Load xml into treeview
[ "Load", "xml", "into", "treeview" ]
41c8fb37ef973736ec5d68cbe1cd4ecb78712e40
https://github.com/alejandroautalan/pygubu/blob/41c8fb37ef973736ec5d68cbe1cd4ecb78712e40/pygubudesigner/main.py#L514-L520
246,722
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_gremlin/__init__.py
lower_ir
def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None): """Lower the IR into an IR form that can be represented in Gremlin queries. Args: ir_blocks: list of IR blocks to lower into Gremlin-compatible form query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: list of IR blocks suitable for outputting as Gremlin """ sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table) ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table) ir_blocks = optimize_boolean_expression_comparisons(ir_blocks) if type_equivalence_hints: ir_blocks = lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints) ir_blocks = lower_coerce_type_blocks(ir_blocks) ir_blocks = rewrite_filters_in_optional_blocks(ir_blocks) ir_blocks = merge_consecutive_filter_clauses(ir_blocks) ir_blocks = lower_folded_outputs(ir_blocks) return ir_blocks
python
def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None): sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table) ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table) ir_blocks = optimize_boolean_expression_comparisons(ir_blocks) if type_equivalence_hints: ir_blocks = lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints) ir_blocks = lower_coerce_type_blocks(ir_blocks) ir_blocks = rewrite_filters_in_optional_blocks(ir_blocks) ir_blocks = merge_consecutive_filter_clauses(ir_blocks) ir_blocks = lower_folded_outputs(ir_blocks) return ir_blocks
[ "def", "lower_ir", "(", "ir_blocks", ",", "query_metadata_table", ",", "type_equivalence_hints", "=", "None", ")", ":", "sanity_check_ir_blocks_from_frontend", "(", "ir_blocks", ",", "query_metadata_table", ")", "ir_blocks", "=", "lower_context_field_existence", "(", "ir_blocks", ",", "query_metadata_table", ")", "ir_blocks", "=", "optimize_boolean_expression_comparisons", "(", "ir_blocks", ")", "if", "type_equivalence_hints", ":", "ir_blocks", "=", "lower_coerce_type_block_type_data", "(", "ir_blocks", ",", "type_equivalence_hints", ")", "ir_blocks", "=", "lower_coerce_type_blocks", "(", "ir_blocks", ")", "ir_blocks", "=", "rewrite_filters_in_optional_blocks", "(", "ir_blocks", ")", "ir_blocks", "=", "merge_consecutive_filter_clauses", "(", "ir_blocks", ")", "ir_blocks", "=", "lower_folded_outputs", "(", "ir_blocks", ")", "return", "ir_blocks" ]
Lower the IR into an IR form that can be represented in Gremlin queries. Args: ir_blocks: list of IR blocks to lower into Gremlin-compatible form query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: list of IR blocks suitable for outputting as Gremlin
[ "Lower", "the", "IR", "into", "an", "IR", "form", "that", "can", "be", "represented", "in", "Gremlin", "queries", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/__init__.py#L13-L53
246,723
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py
lower_coerce_type_block_type_data
def lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints): """Rewrite CoerceType blocks to explicitly state which types are allowed in the coercion.""" allowed_key_type_spec = (GraphQLInterfaceType, GraphQLObjectType) allowed_value_type_spec = GraphQLUnionType # Validate that the type_equivalence_hints parameter has correct types. for key, value in six.iteritems(type_equivalence_hints): if (not isinstance(key, allowed_key_type_spec) or not isinstance(value, allowed_value_type_spec)): msg = (u'Invalid type equivalence hints received! Hint {} ({}) -> {} ({}) ' u'was unexpected, expected a hint in the form ' u'GraphQLInterfaceType -> GraphQLUnionType or ' u'GraphQLObjectType -> GraphQLUnionType'.format(key.name, str(type(key)), value.name, str(type(value)))) raise GraphQLCompilationError(msg) # CoerceType blocks only know the name of the type to which they coerce, # and not its corresponding GraphQL type object. Convert the type equivalence hints into # a dict of type name -> set of names of equivalent types, which can be used more readily. equivalent_type_names = { key.name: {x.name for x in value.types} for key, value in six.iteritems(type_equivalence_hints) } new_ir_blocks = [] for block in ir_blocks: new_block = block if isinstance(block, CoerceType): target_class = get_only_element_from_collection(block.target_class) if target_class in equivalent_type_names: new_block = CoerceType(equivalent_type_names[target_class]) new_ir_blocks.append(new_block) return new_ir_blocks
python
def lower_coerce_type_block_type_data(ir_blocks, type_equivalence_hints): allowed_key_type_spec = (GraphQLInterfaceType, GraphQLObjectType) allowed_value_type_spec = GraphQLUnionType # Validate that the type_equivalence_hints parameter has correct types. for key, value in six.iteritems(type_equivalence_hints): if (not isinstance(key, allowed_key_type_spec) or not isinstance(value, allowed_value_type_spec)): msg = (u'Invalid type equivalence hints received! Hint {} ({}) -> {} ({}) ' u'was unexpected, expected a hint in the form ' u'GraphQLInterfaceType -> GraphQLUnionType or ' u'GraphQLObjectType -> GraphQLUnionType'.format(key.name, str(type(key)), value.name, str(type(value)))) raise GraphQLCompilationError(msg) # CoerceType blocks only know the name of the type to which they coerce, # and not its corresponding GraphQL type object. Convert the type equivalence hints into # a dict of type name -> set of names of equivalent types, which can be used more readily. equivalent_type_names = { key.name: {x.name for x in value.types} for key, value in six.iteritems(type_equivalence_hints) } new_ir_blocks = [] for block in ir_blocks: new_block = block if isinstance(block, CoerceType): target_class = get_only_element_from_collection(block.target_class) if target_class in equivalent_type_names: new_block = CoerceType(equivalent_type_names[target_class]) new_ir_blocks.append(new_block) return new_ir_blocks
[ "def", "lower_coerce_type_block_type_data", "(", "ir_blocks", ",", "type_equivalence_hints", ")", ":", "allowed_key_type_spec", "=", "(", "GraphQLInterfaceType", ",", "GraphQLObjectType", ")", "allowed_value_type_spec", "=", "GraphQLUnionType", "# Validate that the type_equivalence_hints parameter has correct types.", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "type_equivalence_hints", ")", ":", "if", "(", "not", "isinstance", "(", "key", ",", "allowed_key_type_spec", ")", "or", "not", "isinstance", "(", "value", ",", "allowed_value_type_spec", ")", ")", ":", "msg", "=", "(", "u'Invalid type equivalence hints received! Hint {} ({}) -> {} ({}) '", "u'was unexpected, expected a hint in the form '", "u'GraphQLInterfaceType -> GraphQLUnionType or '", "u'GraphQLObjectType -> GraphQLUnionType'", ".", "format", "(", "key", ".", "name", ",", "str", "(", "type", "(", "key", ")", ")", ",", "value", ".", "name", ",", "str", "(", "type", "(", "value", ")", ")", ")", ")", "raise", "GraphQLCompilationError", "(", "msg", ")", "# CoerceType blocks only know the name of the type to which they coerce,", "# and not its corresponding GraphQL type object. Convert the type equivalence hints into", "# a dict of type name -> set of names of equivalent types, which can be used more readily.", "equivalent_type_names", "=", "{", "key", ".", "name", ":", "{", "x", ".", "name", "for", "x", "in", "value", ".", "types", "}", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "type_equivalence_hints", ")", "}", "new_ir_blocks", "=", "[", "]", "for", "block", "in", "ir_blocks", ":", "new_block", "=", "block", "if", "isinstance", "(", "block", ",", "CoerceType", ")", ":", "target_class", "=", "get_only_element_from_collection", "(", "block", ".", "target_class", ")", "if", "target_class", "in", "equivalent_type_names", ":", "new_block", "=", "CoerceType", "(", "equivalent_type_names", "[", "target_class", "]", ")", "new_ir_blocks", ".", "append", "(", "new_block", ")", "return", "new_ir_blocks" ]
Rewrite CoerceType blocks to explicitly state which types are allowed in the coercion.
[ "Rewrite", "CoerceType", "blocks", "to", "explicitly", "state", "which", "types", "are", "allowed", "in", "the", "coercion", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L31-L65
246,724
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py
lower_coerce_type_blocks
def lower_coerce_type_blocks(ir_blocks): """Lower CoerceType blocks into Filter blocks with a type-check predicate.""" new_ir_blocks = [] for block in ir_blocks: new_block = block if isinstance(block, CoerceType): predicate = BinaryComposition( u'contains', Literal(list(block.target_class)), LocalField('@class')) new_block = Filter(predicate) new_ir_blocks.append(new_block) return new_ir_blocks
python
def lower_coerce_type_blocks(ir_blocks): new_ir_blocks = [] for block in ir_blocks: new_block = block if isinstance(block, CoerceType): predicate = BinaryComposition( u'contains', Literal(list(block.target_class)), LocalField('@class')) new_block = Filter(predicate) new_ir_blocks.append(new_block) return new_ir_blocks
[ "def", "lower_coerce_type_blocks", "(", "ir_blocks", ")", ":", "new_ir_blocks", "=", "[", "]", "for", "block", "in", "ir_blocks", ":", "new_block", "=", "block", "if", "isinstance", "(", "block", ",", "CoerceType", ")", ":", "predicate", "=", "BinaryComposition", "(", "u'contains'", ",", "Literal", "(", "list", "(", "block", ".", "target_class", ")", ")", ",", "LocalField", "(", "'@class'", ")", ")", "new_block", "=", "Filter", "(", "predicate", ")", "new_ir_blocks", ".", "append", "(", "new_block", ")", "return", "new_ir_blocks" ]
Lower CoerceType blocks into Filter blocks with a type-check predicate.
[ "Lower", "CoerceType", "blocks", "into", "Filter", "blocks", "with", "a", "type", "-", "check", "predicate", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L68-L81
246,725
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py
rewrite_filters_in_optional_blocks
def rewrite_filters_in_optional_blocks(ir_blocks): """In optional contexts, add a check for null that allows non-existent optional data through. Optional traversals in Gremlin represent missing optional data by setting the current vertex to null until the exit from the optional scope. Therefore, filtering and type coercions (which should have been lowered into filters by this point) must check for null before applying their filtering predicates. Since missing optional data isn't filtered, the new filtering predicate should be "(it == null) || existing_predicate". Args: ir_blocks: list of IR blocks to lower into Gremlin-compatible form Returns: new list of IR blocks with this lowering step applied """ new_ir_blocks = [] optional_context_depth = 0 for block in ir_blocks: new_block = block if isinstance(block, CoerceType): raise AssertionError(u'Found a CoerceType block after all such blocks should have been ' u'lowered to Filter blocks: {}'.format(ir_blocks)) elif isinstance(block, Traverse) and block.optional: optional_context_depth += 1 elif isinstance(block, Backtrack) and block.optional: optional_context_depth -= 1 if optional_context_depth < 0: raise AssertionError(u'Reached negative optional context depth for blocks: ' u'{}'.format(ir_blocks)) elif isinstance(block, Filter) and optional_context_depth > 0: null_check = BinaryComposition(u'=', LocalField('@this'), NullLiteral) new_block = Filter(BinaryComposition(u'||', null_check, block.predicate)) else: pass new_ir_blocks.append(new_block) return new_ir_blocks
python
def rewrite_filters_in_optional_blocks(ir_blocks): new_ir_blocks = [] optional_context_depth = 0 for block in ir_blocks: new_block = block if isinstance(block, CoerceType): raise AssertionError(u'Found a CoerceType block after all such blocks should have been ' u'lowered to Filter blocks: {}'.format(ir_blocks)) elif isinstance(block, Traverse) and block.optional: optional_context_depth += 1 elif isinstance(block, Backtrack) and block.optional: optional_context_depth -= 1 if optional_context_depth < 0: raise AssertionError(u'Reached negative optional context depth for blocks: ' u'{}'.format(ir_blocks)) elif isinstance(block, Filter) and optional_context_depth > 0: null_check = BinaryComposition(u'=', LocalField('@this'), NullLiteral) new_block = Filter(BinaryComposition(u'||', null_check, block.predicate)) else: pass new_ir_blocks.append(new_block) return new_ir_blocks
[ "def", "rewrite_filters_in_optional_blocks", "(", "ir_blocks", ")", ":", "new_ir_blocks", "=", "[", "]", "optional_context_depth", "=", "0", "for", "block", "in", "ir_blocks", ":", "new_block", "=", "block", "if", "isinstance", "(", "block", ",", "CoerceType", ")", ":", "raise", "AssertionError", "(", "u'Found a CoerceType block after all such blocks should have been '", "u'lowered to Filter blocks: {}'", ".", "format", "(", "ir_blocks", ")", ")", "elif", "isinstance", "(", "block", ",", "Traverse", ")", "and", "block", ".", "optional", ":", "optional_context_depth", "+=", "1", "elif", "isinstance", "(", "block", ",", "Backtrack", ")", "and", "block", ".", "optional", ":", "optional_context_depth", "-=", "1", "if", "optional_context_depth", "<", "0", ":", "raise", "AssertionError", "(", "u'Reached negative optional context depth for blocks: '", "u'{}'", ".", "format", "(", "ir_blocks", ")", ")", "elif", "isinstance", "(", "block", ",", "Filter", ")", "and", "optional_context_depth", ">", "0", ":", "null_check", "=", "BinaryComposition", "(", "u'='", ",", "LocalField", "(", "'@this'", ")", ",", "NullLiteral", ")", "new_block", "=", "Filter", "(", "BinaryComposition", "(", "u'||'", ",", "null_check", ",", "block", ".", "predicate", ")", ")", "else", ":", "pass", "new_ir_blocks", ".", "append", "(", "new_block", ")", "return", "new_ir_blocks" ]
In optional contexts, add a check for null that allows non-existent optional data through. Optional traversals in Gremlin represent missing optional data by setting the current vertex to null until the exit from the optional scope. Therefore, filtering and type coercions (which should have been lowered into filters by this point) must check for null before applying their filtering predicates. Since missing optional data isn't filtered, the new filtering predicate should be "(it == null) || existing_predicate". Args: ir_blocks: list of IR blocks to lower into Gremlin-compatible form Returns: new list of IR blocks with this lowering step applied
[ "In", "optional", "contexts", "add", "a", "check", "for", "null", "that", "allows", "non", "-", "existent", "optional", "data", "through", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L84-L122
246,726
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py
lower_folded_outputs
def lower_folded_outputs(ir_blocks): """Lower standard folded output fields into GremlinFoldedContextField objects.""" folds, remaining_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) if not remaining_ir_blocks: raise AssertionError(u'Expected at least one non-folded block to remain: {} {} ' u'{}'.format(folds, remaining_ir_blocks, ir_blocks)) output_block = remaining_ir_blocks[-1] if not isinstance(output_block, ConstructResult): raise AssertionError(u'Expected the last non-folded block to be ConstructResult, ' u'but instead was: {} {} ' u'{}'.format(type(output_block), output_block, ir_blocks)) # Turn folded Filter blocks into GremlinFoldedFilter blocks. converted_folds = { base_fold_location.get_location_name()[0]: _convert_folded_blocks(folded_ir_blocks) for base_fold_location, folded_ir_blocks in six.iteritems(folds) } new_output_fields = dict() for output_name, output_expression in six.iteritems(output_block.fields): new_output_expression = output_expression # Turn FoldedContextField expressions into GremlinFoldedContextField ones. if isinstance(output_expression, FoldedContextField): # Get the matching folded IR blocks and put them in the new context field. base_fold_location_name = output_expression.fold_scope_location.get_location_name()[0] folded_ir_blocks = converted_folds[base_fold_location_name] new_output_expression = GremlinFoldedContextField( output_expression.fold_scope_location, folded_ir_blocks, output_expression.field_type) new_output_fields[output_name] = new_output_expression new_ir_blocks = remaining_ir_blocks[:-1] new_ir_blocks.append(ConstructResult(new_output_fields)) return new_ir_blocks
python
def lower_folded_outputs(ir_blocks): folds, remaining_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) if not remaining_ir_blocks: raise AssertionError(u'Expected at least one non-folded block to remain: {} {} ' u'{}'.format(folds, remaining_ir_blocks, ir_blocks)) output_block = remaining_ir_blocks[-1] if not isinstance(output_block, ConstructResult): raise AssertionError(u'Expected the last non-folded block to be ConstructResult, ' u'but instead was: {} {} ' u'{}'.format(type(output_block), output_block, ir_blocks)) # Turn folded Filter blocks into GremlinFoldedFilter blocks. converted_folds = { base_fold_location.get_location_name()[0]: _convert_folded_blocks(folded_ir_blocks) for base_fold_location, folded_ir_blocks in six.iteritems(folds) } new_output_fields = dict() for output_name, output_expression in six.iteritems(output_block.fields): new_output_expression = output_expression # Turn FoldedContextField expressions into GremlinFoldedContextField ones. if isinstance(output_expression, FoldedContextField): # Get the matching folded IR blocks and put them in the new context field. base_fold_location_name = output_expression.fold_scope_location.get_location_name()[0] folded_ir_blocks = converted_folds[base_fold_location_name] new_output_expression = GremlinFoldedContextField( output_expression.fold_scope_location, folded_ir_blocks, output_expression.field_type) new_output_fields[output_name] = new_output_expression new_ir_blocks = remaining_ir_blocks[:-1] new_ir_blocks.append(ConstructResult(new_output_fields)) return new_ir_blocks
[ "def", "lower_folded_outputs", "(", "ir_blocks", ")", ":", "folds", ",", "remaining_ir_blocks", "=", "extract_folds_from_ir_blocks", "(", "ir_blocks", ")", "if", "not", "remaining_ir_blocks", ":", "raise", "AssertionError", "(", "u'Expected at least one non-folded block to remain: {} {} '", "u'{}'", ".", "format", "(", "folds", ",", "remaining_ir_blocks", ",", "ir_blocks", ")", ")", "output_block", "=", "remaining_ir_blocks", "[", "-", "1", "]", "if", "not", "isinstance", "(", "output_block", ",", "ConstructResult", ")", ":", "raise", "AssertionError", "(", "u'Expected the last non-folded block to be ConstructResult, '", "u'but instead was: {} {} '", "u'{}'", ".", "format", "(", "type", "(", "output_block", ")", ",", "output_block", ",", "ir_blocks", ")", ")", "# Turn folded Filter blocks into GremlinFoldedFilter blocks.", "converted_folds", "=", "{", "base_fold_location", ".", "get_location_name", "(", ")", "[", "0", "]", ":", "_convert_folded_blocks", "(", "folded_ir_blocks", ")", "for", "base_fold_location", ",", "folded_ir_blocks", "in", "six", ".", "iteritems", "(", "folds", ")", "}", "new_output_fields", "=", "dict", "(", ")", "for", "output_name", ",", "output_expression", "in", "six", ".", "iteritems", "(", "output_block", ".", "fields", ")", ":", "new_output_expression", "=", "output_expression", "# Turn FoldedContextField expressions into GremlinFoldedContextField ones.", "if", "isinstance", "(", "output_expression", ",", "FoldedContextField", ")", ":", "# Get the matching folded IR blocks and put them in the new context field.", "base_fold_location_name", "=", "output_expression", ".", "fold_scope_location", ".", "get_location_name", "(", ")", "[", "0", "]", "folded_ir_blocks", "=", "converted_folds", "[", "base_fold_location_name", "]", "new_output_expression", "=", "GremlinFoldedContextField", "(", "output_expression", ".", "fold_scope_location", ",", "folded_ir_blocks", ",", "output_expression", ".", "field_type", ")", "new_output_fields", "[", "output_name", "]", "=", "new_output_expression", "new_ir_blocks", "=", "remaining_ir_blocks", "[", ":", "-", "1", "]", "new_ir_blocks", ".", "append", "(", "ConstructResult", "(", "new_output_fields", ")", ")", "return", "new_ir_blocks" ]
Lower standard folded output fields into GremlinFoldedContextField objects.
[ "Lower", "standard", "folded", "output", "fields", "into", "GremlinFoldedContextField", "objects", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L319-L355
246,727
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py
GremlinFoldedContextField.validate
def validate(self): """Validate that the GremlinFoldedContextField is correctly representable.""" if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format( type(self.fold_scope_location), self.fold_scope_location)) allowed_block_types = (GremlinFoldedFilter, GremlinFoldedTraverse, Backtrack) for block in self.folded_ir_blocks: if not isinstance(block, allowed_block_types): raise AssertionError( u'Found invalid block of type {} in folded_ir_blocks: {} ' u'Allowed types are {}.' .format(type(block), self.folded_ir_blocks, allowed_block_types)) if not isinstance(self.field_type, GraphQLList): raise ValueError(u'Invalid value of "field_type", expected a list type but got: ' u'{}'.format(self.field_type)) inner_type = strip_non_null_from_type(self.field_type.of_type) if isinstance(inner_type, GraphQLList): raise GraphQLCompilationError( u'Outputting list-valued fields in a @fold context is currently ' u'not supported: {} {}'.format(self.fold_scope_location, self.field_type.of_type))
python
def validate(self): if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format( type(self.fold_scope_location), self.fold_scope_location)) allowed_block_types = (GremlinFoldedFilter, GremlinFoldedTraverse, Backtrack) for block in self.folded_ir_blocks: if not isinstance(block, allowed_block_types): raise AssertionError( u'Found invalid block of type {} in folded_ir_blocks: {} ' u'Allowed types are {}.' .format(type(block), self.folded_ir_blocks, allowed_block_types)) if not isinstance(self.field_type, GraphQLList): raise ValueError(u'Invalid value of "field_type", expected a list type but got: ' u'{}'.format(self.field_type)) inner_type = strip_non_null_from_type(self.field_type.of_type) if isinstance(inner_type, GraphQLList): raise GraphQLCompilationError( u'Outputting list-valued fields in a @fold context is currently ' u'not supported: {} {}'.format(self.fold_scope_location, self.field_type.of_type))
[ "def", "validate", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "fold_scope_location", ",", "FoldScopeLocation", ")", ":", "raise", "TypeError", "(", "u'Expected FoldScopeLocation fold_scope_location, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "fold_scope_location", ")", ",", "self", ".", "fold_scope_location", ")", ")", "allowed_block_types", "=", "(", "GremlinFoldedFilter", ",", "GremlinFoldedTraverse", ",", "Backtrack", ")", "for", "block", "in", "self", ".", "folded_ir_blocks", ":", "if", "not", "isinstance", "(", "block", ",", "allowed_block_types", ")", ":", "raise", "AssertionError", "(", "u'Found invalid block of type {} in folded_ir_blocks: {} '", "u'Allowed types are {}.'", ".", "format", "(", "type", "(", "block", ")", ",", "self", ".", "folded_ir_blocks", ",", "allowed_block_types", ")", ")", "if", "not", "isinstance", "(", "self", ".", "field_type", ",", "GraphQLList", ")", ":", "raise", "ValueError", "(", "u'Invalid value of \"field_type\", expected a list type but got: '", "u'{}'", ".", "format", "(", "self", ".", "field_type", ")", ")", "inner_type", "=", "strip_non_null_from_type", "(", "self", ".", "field_type", ".", "of_type", ")", "if", "isinstance", "(", "inner_type", ",", "GraphQLList", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Outputting list-valued fields in a @fold context is currently '", "u'not supported: {} {}'", ".", "format", "(", "self", ".", "fold_scope_location", ",", "self", ".", "field_type", ".", "of_type", ")", ")" ]
Validate that the GremlinFoldedContextField is correctly representable.
[ "Validate", "that", "the", "GremlinFoldedContextField", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L137-L159
246,728
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py
GremlinFoldedTraverse.from_traverse
def from_traverse(cls, traverse_block): """Create a GremlinFoldedTraverse block as a copy of the given Traverse block.""" if isinstance(traverse_block, Traverse): return cls(traverse_block.direction, traverse_block.edge_name) else: raise AssertionError(u'Tried to initialize an instance of GremlinFoldedTraverse ' u'with block of type {}'.format(type(traverse_block)))
python
def from_traverse(cls, traverse_block): if isinstance(traverse_block, Traverse): return cls(traverse_block.direction, traverse_block.edge_name) else: raise AssertionError(u'Tried to initialize an instance of GremlinFoldedTraverse ' u'with block of type {}'.format(type(traverse_block)))
[ "def", "from_traverse", "(", "cls", ",", "traverse_block", ")", ":", "if", "isinstance", "(", "traverse_block", ",", "Traverse", ")", ":", "return", "cls", "(", "traverse_block", ".", "direction", ",", "traverse_block", ".", "edge_name", ")", "else", ":", "raise", "AssertionError", "(", "u'Tried to initialize an instance of GremlinFoldedTraverse '", "u'with block of type {}'", ".", "format", "(", "type", "(", "traverse_block", ")", ")", ")" ]
Create a GremlinFoldedTraverse block as a copy of the given Traverse block.
[ "Create", "a", "GremlinFoldedTraverse", "block", "as", "a", "copy", "of", "the", "given", "Traverse", "block", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_gremlin/ir_lowering.py#L258-L264
246,729
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/graphql_schema.py
_get_referenced_type_equivalences
def _get_referenced_type_equivalences(graphql_types, type_equivalence_hints): """Filter union types with no edges from the type equivalence hints dict.""" referenced_types = set() for graphql_type in graphql_types.values(): if isinstance(graphql_type, (GraphQLObjectType, GraphQLInterfaceType)): for _, field in graphql_type.fields.items(): if isinstance(field.type, GraphQLList): referenced_types.add(field.type.of_type.name) return { original: union for original, union in type_equivalence_hints.items() if union.name in referenced_types }
python
def _get_referenced_type_equivalences(graphql_types, type_equivalence_hints): referenced_types = set() for graphql_type in graphql_types.values(): if isinstance(graphql_type, (GraphQLObjectType, GraphQLInterfaceType)): for _, field in graphql_type.fields.items(): if isinstance(field.type, GraphQLList): referenced_types.add(field.type.of_type.name) return { original: union for original, union in type_equivalence_hints.items() if union.name in referenced_types }
[ "def", "_get_referenced_type_equivalences", "(", "graphql_types", ",", "type_equivalence_hints", ")", ":", "referenced_types", "=", "set", "(", ")", "for", "graphql_type", "in", "graphql_types", ".", "values", "(", ")", ":", "if", "isinstance", "(", "graphql_type", ",", "(", "GraphQLObjectType", ",", "GraphQLInterfaceType", ")", ")", ":", "for", "_", ",", "field", "in", "graphql_type", ".", "fields", ".", "items", "(", ")", ":", "if", "isinstance", "(", "field", ".", "type", ",", "GraphQLList", ")", ":", "referenced_types", ".", "add", "(", "field", ".", "type", ".", "of_type", ".", "name", ")", "return", "{", "original", ":", "union", "for", "original", ",", "union", "in", "type_equivalence_hints", ".", "items", "(", ")", "if", "union", ".", "name", "in", "referenced_types", "}" ]
Filter union types with no edges from the type equivalence hints dict.
[ "Filter", "union", "types", "with", "no", "edges", "from", "the", "type", "equivalence", "hints", "dict", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L24-L36
246,730
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/graphql_schema.py
_get_inherited_field_types
def _get_inherited_field_types(class_to_field_type_overrides, schema_graph): """Return a dictionary describing the field type overrides in subclasses.""" inherited_field_type_overrides = dict() for superclass_name, field_type_overrides in class_to_field_type_overrides.items(): for subclass_name in schema_graph.get_subclass_set(superclass_name): inherited_field_type_overrides.setdefault(subclass_name, dict()) inherited_field_type_overrides[subclass_name].update(field_type_overrides) return inherited_field_type_overrides
python
def _get_inherited_field_types(class_to_field_type_overrides, schema_graph): inherited_field_type_overrides = dict() for superclass_name, field_type_overrides in class_to_field_type_overrides.items(): for subclass_name in schema_graph.get_subclass_set(superclass_name): inherited_field_type_overrides.setdefault(subclass_name, dict()) inherited_field_type_overrides[subclass_name].update(field_type_overrides) return inherited_field_type_overrides
[ "def", "_get_inherited_field_types", "(", "class_to_field_type_overrides", ",", "schema_graph", ")", ":", "inherited_field_type_overrides", "=", "dict", "(", ")", "for", "superclass_name", ",", "field_type_overrides", "in", "class_to_field_type_overrides", ".", "items", "(", ")", ":", "for", "subclass_name", "in", "schema_graph", ".", "get_subclass_set", "(", "superclass_name", ")", ":", "inherited_field_type_overrides", ".", "setdefault", "(", "subclass_name", ",", "dict", "(", ")", ")", "inherited_field_type_overrides", "[", "subclass_name", "]", ".", "update", "(", "field_type_overrides", ")", "return", "inherited_field_type_overrides" ]
Return a dictionary describing the field type overrides in subclasses.
[ "Return", "a", "dictionary", "describing", "the", "field", "type", "overrides", "in", "subclasses", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L39-L46
246,731
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/graphql_schema.py
_validate_overriden_fields_are_not_defined_in_superclasses
def _validate_overriden_fields_are_not_defined_in_superclasses(class_to_field_type_overrides, schema_graph): """Assert that the fields we want to override are not defined in superclasses.""" for class_name, field_type_overrides in six.iteritems(class_to_field_type_overrides): for superclass_name in schema_graph.get_inheritance_set(class_name): if superclass_name != class_name: superclass = schema_graph.get_element_by_class_name(superclass_name) for field_name in field_type_overrides: if field_name in superclass.properties: raise AssertionError( u'Attempting to override field "{}" from class "{}", but the field is ' u'defined in superclass "{}"' .format(field_name, class_name, superclass_name))
python
def _validate_overriden_fields_are_not_defined_in_superclasses(class_to_field_type_overrides, schema_graph): for class_name, field_type_overrides in six.iteritems(class_to_field_type_overrides): for superclass_name in schema_graph.get_inheritance_set(class_name): if superclass_name != class_name: superclass = schema_graph.get_element_by_class_name(superclass_name) for field_name in field_type_overrides: if field_name in superclass.properties: raise AssertionError( u'Attempting to override field "{}" from class "{}", but the field is ' u'defined in superclass "{}"' .format(field_name, class_name, superclass_name))
[ "def", "_validate_overriden_fields_are_not_defined_in_superclasses", "(", "class_to_field_type_overrides", ",", "schema_graph", ")", ":", "for", "class_name", ",", "field_type_overrides", "in", "six", ".", "iteritems", "(", "class_to_field_type_overrides", ")", ":", "for", "superclass_name", "in", "schema_graph", ".", "get_inheritance_set", "(", "class_name", ")", ":", "if", "superclass_name", "!=", "class_name", ":", "superclass", "=", "schema_graph", ".", "get_element_by_class_name", "(", "superclass_name", ")", "for", "field_name", "in", "field_type_overrides", ":", "if", "field_name", "in", "superclass", ".", "properties", ":", "raise", "AssertionError", "(", "u'Attempting to override field \"{}\" from class \"{}\", but the field is '", "u'defined in superclass \"{}\"'", ".", "format", "(", "field_name", ",", "class_name", ",", "superclass_name", ")", ")" ]
Assert that the fields we want to override are not defined in superclasses.
[ "Assert", "that", "the", "fields", "we", "want", "to", "override", "are", "not", "defined", "in", "superclasses", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L49-L61
246,732
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/graphql_schema.py
_property_descriptor_to_graphql_type
def _property_descriptor_to_graphql_type(property_obj): """Return the best GraphQL type representation for an OrientDB property descriptor.""" property_type = property_obj.type_id scalar_types = { PROPERTY_TYPE_BOOLEAN_ID: GraphQLBoolean, PROPERTY_TYPE_DATE_ID: GraphQLDate, PROPERTY_TYPE_DATETIME_ID: GraphQLDateTime, PROPERTY_TYPE_DECIMAL_ID: GraphQLDecimal, PROPERTY_TYPE_DOUBLE_ID: GraphQLFloat, PROPERTY_TYPE_FLOAT_ID: GraphQLFloat, PROPERTY_TYPE_INTEGER_ID: GraphQLInt, PROPERTY_TYPE_STRING_ID: GraphQLString, } result = scalar_types.get(property_type, None) if result: return result mapping_types = { PROPERTY_TYPE_EMBEDDED_SET_ID: GraphQLList, PROPERTY_TYPE_EMBEDDED_LIST_ID: GraphQLList, } wrapping_type = mapping_types.get(property_type, None) if wrapping_type: linked_property_obj = property_obj.qualifier # There are properties that are embedded collections of non-primitive types, # for example, ProxyEventSet.scalar_parameters. # The GraphQL compiler does not currently support these. if linked_property_obj in scalar_types: return wrapping_type(scalar_types[linked_property_obj]) # We weren't able to represent this property in GraphQL, so we'll hide it instead. return None
python
def _property_descriptor_to_graphql_type(property_obj): property_type = property_obj.type_id scalar_types = { PROPERTY_TYPE_BOOLEAN_ID: GraphQLBoolean, PROPERTY_TYPE_DATE_ID: GraphQLDate, PROPERTY_TYPE_DATETIME_ID: GraphQLDateTime, PROPERTY_TYPE_DECIMAL_ID: GraphQLDecimal, PROPERTY_TYPE_DOUBLE_ID: GraphQLFloat, PROPERTY_TYPE_FLOAT_ID: GraphQLFloat, PROPERTY_TYPE_INTEGER_ID: GraphQLInt, PROPERTY_TYPE_STRING_ID: GraphQLString, } result = scalar_types.get(property_type, None) if result: return result mapping_types = { PROPERTY_TYPE_EMBEDDED_SET_ID: GraphQLList, PROPERTY_TYPE_EMBEDDED_LIST_ID: GraphQLList, } wrapping_type = mapping_types.get(property_type, None) if wrapping_type: linked_property_obj = property_obj.qualifier # There are properties that are embedded collections of non-primitive types, # for example, ProxyEventSet.scalar_parameters. # The GraphQL compiler does not currently support these. if linked_property_obj in scalar_types: return wrapping_type(scalar_types[linked_property_obj]) # We weren't able to represent this property in GraphQL, so we'll hide it instead. return None
[ "def", "_property_descriptor_to_graphql_type", "(", "property_obj", ")", ":", "property_type", "=", "property_obj", ".", "type_id", "scalar_types", "=", "{", "PROPERTY_TYPE_BOOLEAN_ID", ":", "GraphQLBoolean", ",", "PROPERTY_TYPE_DATE_ID", ":", "GraphQLDate", ",", "PROPERTY_TYPE_DATETIME_ID", ":", "GraphQLDateTime", ",", "PROPERTY_TYPE_DECIMAL_ID", ":", "GraphQLDecimal", ",", "PROPERTY_TYPE_DOUBLE_ID", ":", "GraphQLFloat", ",", "PROPERTY_TYPE_FLOAT_ID", ":", "GraphQLFloat", ",", "PROPERTY_TYPE_INTEGER_ID", ":", "GraphQLInt", ",", "PROPERTY_TYPE_STRING_ID", ":", "GraphQLString", ",", "}", "result", "=", "scalar_types", ".", "get", "(", "property_type", ",", "None", ")", "if", "result", ":", "return", "result", "mapping_types", "=", "{", "PROPERTY_TYPE_EMBEDDED_SET_ID", ":", "GraphQLList", ",", "PROPERTY_TYPE_EMBEDDED_LIST_ID", ":", "GraphQLList", ",", "}", "wrapping_type", "=", "mapping_types", ".", "get", "(", "property_type", ",", "None", ")", "if", "wrapping_type", ":", "linked_property_obj", "=", "property_obj", ".", "qualifier", "# There are properties that are embedded collections of non-primitive types,", "# for example, ProxyEventSet.scalar_parameters.", "# The GraphQL compiler does not currently support these.", "if", "linked_property_obj", "in", "scalar_types", ":", "return", "wrapping_type", "(", "scalar_types", "[", "linked_property_obj", "]", ")", "# We weren't able to represent this property in GraphQL, so we'll hide it instead.", "return", "None" ]
Return the best GraphQL type representation for an OrientDB property descriptor.
[ "Return", "the", "best", "GraphQL", "type", "representation", "for", "an", "OrientDB", "property", "descriptor", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L64-L96
246,733
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/graphql_schema.py
_get_union_type_name
def _get_union_type_name(type_names_to_union): """Construct a unique union type name based on the type names being unioned.""" if not type_names_to_union: raise AssertionError(u'Expected a non-empty list of type names to union, received: ' u'{}'.format(type_names_to_union)) return u'Union__' + u'__'.join(sorted(type_names_to_union))
python
def _get_union_type_name(type_names_to_union): if not type_names_to_union: raise AssertionError(u'Expected a non-empty list of type names to union, received: ' u'{}'.format(type_names_to_union)) return u'Union__' + u'__'.join(sorted(type_names_to_union))
[ "def", "_get_union_type_name", "(", "type_names_to_union", ")", ":", "if", "not", "type_names_to_union", ":", "raise", "AssertionError", "(", "u'Expected a non-empty list of type names to union, received: '", "u'{}'", ".", "format", "(", "type_names_to_union", ")", ")", "return", "u'Union__'", "+", "u'__'", ".", "join", "(", "sorted", "(", "type_names_to_union", ")", ")" ]
Construct a unique union type name based on the type names being unioned.
[ "Construct", "a", "unique", "union", "type", "name", "based", "on", "the", "type", "names", "being", "unioned", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L99-L104
246,734
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/graphql_schema.py
_get_fields_for_class
def _get_fields_for_class(schema_graph, graphql_types, field_type_overrides, hidden_classes, cls_name): """Return a dict from field name to GraphQL field type, for the specified graph class.""" properties = schema_graph.get_element_by_class_name(cls_name).properties # Add leaf GraphQL fields (class properties). all_properties = { property_name: _property_descriptor_to_graphql_type(property_obj) for property_name, property_obj in six.iteritems(properties) } result = { property_name: graphql_representation for property_name, graphql_representation in six.iteritems(all_properties) if graphql_representation is not None } # Add edge GraphQL fields (edges to other vertex classes). schema_element = schema_graph.get_element_by_class_name(cls_name) outbound_edges = ( ('out_{}'.format(out_edge_name), schema_graph.get_element_by_class_name(out_edge_name).properties[ EDGE_DESTINATION_PROPERTY_NAME].qualifier) for out_edge_name in schema_element.out_connections ) inbound_edges = ( ('in_{}'.format(in_edge_name), schema_graph.get_element_by_class_name(in_edge_name).properties[ EDGE_SOURCE_PROPERTY_NAME].qualifier) for in_edge_name in schema_element.in_connections ) for field_name, to_type_name in chain(outbound_edges, inbound_edges): edge_endpoint_type_name = None subclasses = schema_graph.get_subclass_set(to_type_name) to_type_abstract = schema_graph.get_element_by_class_name(to_type_name).abstract if not to_type_abstract and len(subclasses) > 1: # If the edge endpoint type has no subclasses, it can't be coerced into any other type. # If the edge endpoint type is abstract (an interface type), we can already # coerce it to the proper type with a GraphQL fragment. However, if the endpoint type # is non-abstract and has subclasses, we need to return its subclasses as an union type. # This is because GraphQL fragments cannot be applied on concrete types, and # GraphQL does not support inheritance of concrete types. type_names_to_union = [ subclass for subclass in subclasses if subclass not in hidden_classes ] if type_names_to_union: edge_endpoint_type_name = _get_union_type_name(type_names_to_union) else: if to_type_name not in hidden_classes: edge_endpoint_type_name = to_type_name if edge_endpoint_type_name is not None: # If we decided to not hide this edge due to its endpoint type being non-representable, # represent the edge field as the GraphQL type List(edge_endpoint_type_name). result[field_name] = GraphQLList(graphql_types[edge_endpoint_type_name]) for field_name, field_type in six.iteritems(field_type_overrides): if field_name not in result: raise AssertionError(u'Attempting to override field "{}" from class "{}", but the ' u'class does not contain said field'.format(field_name, cls_name)) else: result[field_name] = field_type return result
python
def _get_fields_for_class(schema_graph, graphql_types, field_type_overrides, hidden_classes, cls_name): properties = schema_graph.get_element_by_class_name(cls_name).properties # Add leaf GraphQL fields (class properties). all_properties = { property_name: _property_descriptor_to_graphql_type(property_obj) for property_name, property_obj in six.iteritems(properties) } result = { property_name: graphql_representation for property_name, graphql_representation in six.iteritems(all_properties) if graphql_representation is not None } # Add edge GraphQL fields (edges to other vertex classes). schema_element = schema_graph.get_element_by_class_name(cls_name) outbound_edges = ( ('out_{}'.format(out_edge_name), schema_graph.get_element_by_class_name(out_edge_name).properties[ EDGE_DESTINATION_PROPERTY_NAME].qualifier) for out_edge_name in schema_element.out_connections ) inbound_edges = ( ('in_{}'.format(in_edge_name), schema_graph.get_element_by_class_name(in_edge_name).properties[ EDGE_SOURCE_PROPERTY_NAME].qualifier) for in_edge_name in schema_element.in_connections ) for field_name, to_type_name in chain(outbound_edges, inbound_edges): edge_endpoint_type_name = None subclasses = schema_graph.get_subclass_set(to_type_name) to_type_abstract = schema_graph.get_element_by_class_name(to_type_name).abstract if not to_type_abstract and len(subclasses) > 1: # If the edge endpoint type has no subclasses, it can't be coerced into any other type. # If the edge endpoint type is abstract (an interface type), we can already # coerce it to the proper type with a GraphQL fragment. However, if the endpoint type # is non-abstract and has subclasses, we need to return its subclasses as an union type. # This is because GraphQL fragments cannot be applied on concrete types, and # GraphQL does not support inheritance of concrete types. type_names_to_union = [ subclass for subclass in subclasses if subclass not in hidden_classes ] if type_names_to_union: edge_endpoint_type_name = _get_union_type_name(type_names_to_union) else: if to_type_name not in hidden_classes: edge_endpoint_type_name = to_type_name if edge_endpoint_type_name is not None: # If we decided to not hide this edge due to its endpoint type being non-representable, # represent the edge field as the GraphQL type List(edge_endpoint_type_name). result[field_name] = GraphQLList(graphql_types[edge_endpoint_type_name]) for field_name, field_type in six.iteritems(field_type_overrides): if field_name not in result: raise AssertionError(u'Attempting to override field "{}" from class "{}", but the ' u'class does not contain said field'.format(field_name, cls_name)) else: result[field_name] = field_type return result
[ "def", "_get_fields_for_class", "(", "schema_graph", ",", "graphql_types", ",", "field_type_overrides", ",", "hidden_classes", ",", "cls_name", ")", ":", "properties", "=", "schema_graph", ".", "get_element_by_class_name", "(", "cls_name", ")", ".", "properties", "# Add leaf GraphQL fields (class properties).", "all_properties", "=", "{", "property_name", ":", "_property_descriptor_to_graphql_type", "(", "property_obj", ")", "for", "property_name", ",", "property_obj", "in", "six", ".", "iteritems", "(", "properties", ")", "}", "result", "=", "{", "property_name", ":", "graphql_representation", "for", "property_name", ",", "graphql_representation", "in", "six", ".", "iteritems", "(", "all_properties", ")", "if", "graphql_representation", "is", "not", "None", "}", "# Add edge GraphQL fields (edges to other vertex classes).", "schema_element", "=", "schema_graph", ".", "get_element_by_class_name", "(", "cls_name", ")", "outbound_edges", "=", "(", "(", "'out_{}'", ".", "format", "(", "out_edge_name", ")", ",", "schema_graph", ".", "get_element_by_class_name", "(", "out_edge_name", ")", ".", "properties", "[", "EDGE_DESTINATION_PROPERTY_NAME", "]", ".", "qualifier", ")", "for", "out_edge_name", "in", "schema_element", ".", "out_connections", ")", "inbound_edges", "=", "(", "(", "'in_{}'", ".", "format", "(", "in_edge_name", ")", ",", "schema_graph", ".", "get_element_by_class_name", "(", "in_edge_name", ")", ".", "properties", "[", "EDGE_SOURCE_PROPERTY_NAME", "]", ".", "qualifier", ")", "for", "in_edge_name", "in", "schema_element", ".", "in_connections", ")", "for", "field_name", ",", "to_type_name", "in", "chain", "(", "outbound_edges", ",", "inbound_edges", ")", ":", "edge_endpoint_type_name", "=", "None", "subclasses", "=", "schema_graph", ".", "get_subclass_set", "(", "to_type_name", ")", "to_type_abstract", "=", "schema_graph", ".", "get_element_by_class_name", "(", "to_type_name", ")", ".", "abstract", "if", "not", "to_type_abstract", "and", "len", "(", "subclasses", ")", ">", "1", ":", "# If the edge endpoint type has no subclasses, it can't be coerced into any other type.", "# If the edge endpoint type is abstract (an interface type), we can already", "# coerce it to the proper type with a GraphQL fragment. However, if the endpoint type", "# is non-abstract and has subclasses, we need to return its subclasses as an union type.", "# This is because GraphQL fragments cannot be applied on concrete types, and", "# GraphQL does not support inheritance of concrete types.", "type_names_to_union", "=", "[", "subclass", "for", "subclass", "in", "subclasses", "if", "subclass", "not", "in", "hidden_classes", "]", "if", "type_names_to_union", ":", "edge_endpoint_type_name", "=", "_get_union_type_name", "(", "type_names_to_union", ")", "else", ":", "if", "to_type_name", "not", "in", "hidden_classes", ":", "edge_endpoint_type_name", "=", "to_type_name", "if", "edge_endpoint_type_name", "is", "not", "None", ":", "# If we decided to not hide this edge due to its endpoint type being non-representable,", "# represent the edge field as the GraphQL type List(edge_endpoint_type_name).", "result", "[", "field_name", "]", "=", "GraphQLList", "(", "graphql_types", "[", "edge_endpoint_type_name", "]", ")", "for", "field_name", ",", "field_type", "in", "six", ".", "iteritems", "(", "field_type_overrides", ")", ":", "if", "field_name", "not", "in", "result", ":", "raise", "AssertionError", "(", "u'Attempting to override field \"{}\" from class \"{}\", but the '", "u'class does not contain said field'", ".", "format", "(", "field_name", ",", "cls_name", ")", ")", "else", ":", "result", "[", "field_name", "]", "=", "field_type", "return", "result" ]
Return a dict from field name to GraphQL field type, for the specified graph class.
[ "Return", "a", "dict", "from", "field", "name", "to", "GraphQL", "field", "type", "for", "the", "specified", "graph", "class", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L107-L172
246,735
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/graphql_schema.py
_create_field_specification
def _create_field_specification(schema_graph, graphql_types, field_type_overrides, hidden_classes, cls_name): """Return a function that specifies the fields present on the given type.""" def field_maker_func(): """Create and return the fields for the given GraphQL type.""" result = EXTENDED_META_FIELD_DEFINITIONS.copy() result.update(OrderedDict([ (name, GraphQLField(value)) for name, value in sorted(six.iteritems(_get_fields_for_class( schema_graph, graphql_types, field_type_overrides, hidden_classes, cls_name)), key=lambda x: x[0]) ])) return result return field_maker_func
python
def _create_field_specification(schema_graph, graphql_types, field_type_overrides, hidden_classes, cls_name): def field_maker_func(): """Create and return the fields for the given GraphQL type.""" result = EXTENDED_META_FIELD_DEFINITIONS.copy() result.update(OrderedDict([ (name, GraphQLField(value)) for name, value in sorted(six.iteritems(_get_fields_for_class( schema_graph, graphql_types, field_type_overrides, hidden_classes, cls_name)), key=lambda x: x[0]) ])) return result return field_maker_func
[ "def", "_create_field_specification", "(", "schema_graph", ",", "graphql_types", ",", "field_type_overrides", ",", "hidden_classes", ",", "cls_name", ")", ":", "def", "field_maker_func", "(", ")", ":", "\"\"\"Create and return the fields for the given GraphQL type.\"\"\"", "result", "=", "EXTENDED_META_FIELD_DEFINITIONS", ".", "copy", "(", ")", "result", ".", "update", "(", "OrderedDict", "(", "[", "(", "name", ",", "GraphQLField", "(", "value", ")", ")", "for", "name", ",", "value", "in", "sorted", "(", "six", ".", "iteritems", "(", "_get_fields_for_class", "(", "schema_graph", ",", "graphql_types", ",", "field_type_overrides", ",", "hidden_classes", ",", "cls_name", ")", ")", ",", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ")", "]", ")", ")", "return", "result", "return", "field_maker_func" ]
Return a function that specifies the fields present on the given type.
[ "Return", "a", "function", "that", "specifies", "the", "fields", "present", "on", "the", "given", "type", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L175-L189
246,736
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/graphql_schema.py
_create_interface_specification
def _create_interface_specification(schema_graph, graphql_types, hidden_classes, cls_name): """Return a function that specifies the interfaces implemented by the given type.""" def interface_spec(): """Return a list of GraphQL interface types implemented by the type named 'cls_name'.""" abstract_inheritance_set = ( superclass_name for superclass_name in sorted(list(schema_graph.get_inheritance_set(cls_name))) if (superclass_name not in hidden_classes and schema_graph.get_element_by_class_name(superclass_name).abstract) ) return [ graphql_types[x] for x in abstract_inheritance_set if x not in hidden_classes ] return interface_spec
python
def _create_interface_specification(schema_graph, graphql_types, hidden_classes, cls_name): def interface_spec(): """Return a list of GraphQL interface types implemented by the type named 'cls_name'.""" abstract_inheritance_set = ( superclass_name for superclass_name in sorted(list(schema_graph.get_inheritance_set(cls_name))) if (superclass_name not in hidden_classes and schema_graph.get_element_by_class_name(superclass_name).abstract) ) return [ graphql_types[x] for x in abstract_inheritance_set if x not in hidden_classes ] return interface_spec
[ "def", "_create_interface_specification", "(", "schema_graph", ",", "graphql_types", ",", "hidden_classes", ",", "cls_name", ")", ":", "def", "interface_spec", "(", ")", ":", "\"\"\"Return a list of GraphQL interface types implemented by the type named 'cls_name'.\"\"\"", "abstract_inheritance_set", "=", "(", "superclass_name", "for", "superclass_name", "in", "sorted", "(", "list", "(", "schema_graph", ".", "get_inheritance_set", "(", "cls_name", ")", ")", ")", "if", "(", "superclass_name", "not", "in", "hidden_classes", "and", "schema_graph", ".", "get_element_by_class_name", "(", "superclass_name", ")", ".", "abstract", ")", ")", "return", "[", "graphql_types", "[", "x", "]", "for", "x", "in", "abstract_inheritance_set", "if", "x", "not", "in", "hidden_classes", "]", "return", "interface_spec" ]
Return a function that specifies the interfaces implemented by the given type.
[ "Return", "a", "function", "that", "specifies", "the", "interfaces", "implemented", "by", "the", "given", "type", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L192-L209
246,737
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/graphql_schema.py
_create_union_types_specification
def _create_union_types_specification(schema_graph, graphql_types, hidden_classes, base_name): """Return a function that gives the types in the union type rooted at base_name.""" # When edges point to vertices of type base_name, and base_name is both non-abstract and # has subclasses, we need to represent the edge endpoint type with a union type based on # base_name and its subclasses. This function calculates what types that union should include. def types_spec(): """Return a list of GraphQL types that this class' corresponding union type includes.""" return [ graphql_types[x] for x in sorted(list(schema_graph.get_subclass_set(base_name))) if x not in hidden_classes ] return types_spec
python
def _create_union_types_specification(schema_graph, graphql_types, hidden_classes, base_name): # When edges point to vertices of type base_name, and base_name is both non-abstract and # has subclasses, we need to represent the edge endpoint type with a union type based on # base_name and its subclasses. This function calculates what types that union should include. def types_spec(): """Return a list of GraphQL types that this class' corresponding union type includes.""" return [ graphql_types[x] for x in sorted(list(schema_graph.get_subclass_set(base_name))) if x not in hidden_classes ] return types_spec
[ "def", "_create_union_types_specification", "(", "schema_graph", ",", "graphql_types", ",", "hidden_classes", ",", "base_name", ")", ":", "# When edges point to vertices of type base_name, and base_name is both non-abstract and", "# has subclasses, we need to represent the edge endpoint type with a union type based on", "# base_name and its subclasses. This function calculates what types that union should include.", "def", "types_spec", "(", ")", ":", "\"\"\"Return a list of GraphQL types that this class' corresponding union type includes.\"\"\"", "return", "[", "graphql_types", "[", "x", "]", "for", "x", "in", "sorted", "(", "list", "(", "schema_graph", ".", "get_subclass_set", "(", "base_name", ")", ")", ")", "if", "x", "not", "in", "hidden_classes", "]", "return", "types_spec" ]
Return a function that gives the types in the union type rooted at base_name.
[ "Return", "a", "function", "that", "gives", "the", "types", "in", "the", "union", "type", "rooted", "at", "base_name", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L212-L225
246,738
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/graphql_schema.py
get_graphql_schema_from_schema_graph
def get_graphql_schema_from_schema_graph(schema_graph, class_to_field_type_overrides, hidden_classes): """Return a GraphQL schema object corresponding to the schema of the given schema graph. Args: schema_graph: SchemaGraph class_to_field_type_overrides: dict, class name -> {field name -> field type}, (string -> {string -> GraphQLType}). Used to override the type of a field in the class where it's first defined and all the class's subclasses. hidden_classes: set of strings, classes to not include in the GraphQL schema. Returns: tuple of (GraphQL schema object, GraphQL type equivalence hints dict). The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}). """ _validate_overriden_fields_are_not_defined_in_superclasses(class_to_field_type_overrides, schema_graph) # The field types of subclasses must also be overridden. # Remember that the result returned by get_subclass_set(class_name) includes class_name itself. inherited_field_type_overrides = _get_inherited_field_types(class_to_field_type_overrides, schema_graph) # We remove the base vertex class from the schema if it has no properties. # If it has no properties, it's meaningless and makes the schema less syntactically sweet. if not schema_graph.get_element_by_class_name(ORIENTDB_BASE_VERTEX_CLASS_NAME).properties: hidden_classes.add(ORIENTDB_BASE_VERTEX_CLASS_NAME) graphql_types = OrderedDict() type_equivalence_hints = OrderedDict() # For each vertex class, construct its analogous GraphQL type representation. for vertex_cls_name in sorted(schema_graph.vertex_class_names): vertex_cls = schema_graph.get_element_by_class_name(vertex_cls_name) if vertex_cls_name in hidden_classes: continue inherited_field_type_overrides.setdefault(vertex_cls_name, dict()) field_type_overrides = inherited_field_type_overrides[vertex_cls_name] # We have to use delayed type binding here, because some of the type references # are circular: if an edge connects vertices of types A and B, then # GraphQL type A has a List[B] field, and type B has a List[A] field. # To avoid the circular dependency, GraphQL allows us to initialize the types # initially without their field information, and fill in their field information # later using a lambda function as the second argument to GraphQLObjectType. # This lambda function will be called on each type after all types are created # in their initial blank state. # # However, 'cls_name' is a variable that would not be correctly bound # if we naively tried to construct a lambda in-place, because Python lambdas # are not closures. Instead, call a function with 'cls_name' as an argument, # and have that function construct and return the required lambda. field_specification_lambda = _create_field_specification( schema_graph, graphql_types, field_type_overrides, hidden_classes, vertex_cls_name) # Abstract classes are interfaces, concrete classes are object types. current_graphql_type = None if vertex_cls.abstract: # "fields" is a kwarg in the interface constructor, even though # it's a positional arg in the object type constructor. current_graphql_type = GraphQLInterfaceType(vertex_cls_name, fields=field_specification_lambda) else: # For similar reasons as the field_specification_lambda, # we need to create an interface specification lambda function that # specifies the interfaces implemented by this type. interface_specification_lambda = _create_interface_specification( schema_graph, graphql_types, hidden_classes, vertex_cls_name) # N.B.: Ignore the "is_type_of" argument below, it is simply a circumvention of # a sanity check inside the GraphQL library. The library assumes that we'll use # its execution system, so it complains that we don't provide a means to # differentiate between different implementations of the same interface. # We don't care, because we compile the GraphQL query to a database query. current_graphql_type = GraphQLObjectType(vertex_cls_name, field_specification_lambda, interfaces=interface_specification_lambda, is_type_of=lambda: None) graphql_types[vertex_cls_name] = current_graphql_type # For each vertex class, construct all union types representations. for vertex_cls_name in sorted(schema_graph.vertex_class_names): vertex_cls = schema_graph.get_element_by_class_name(vertex_cls_name) if vertex_cls_name in hidden_classes: continue vertex_cls_subclasses = schema_graph.get_subclass_set(vertex_cls_name) if not vertex_cls.abstract and len(vertex_cls_subclasses) > 1: # In addition to creating this class' corresponding GraphQL type, we'll need a # union type to represent it when it appears as the endpoint of an edge. union_type_name = _get_union_type_name(vertex_cls_subclasses) # For similar reasons as the field_specification_lambda, # we need to create a union type specification lambda function that specifies # the types that this union type is composed of. type_specification_lambda = _create_union_types_specification( schema_graph, graphql_types, hidden_classes, vertex_cls_name) union_type = GraphQLUnionType(union_type_name, types=type_specification_lambda) graphql_types[union_type_name] = union_type type_equivalence_hints[graphql_types[vertex_cls_name]] = union_type # Include all abstract non-vertex classes whose only non-abstract subclasses are vertices. for non_graph_cls_name in sorted(schema_graph.non_graph_class_names): if non_graph_cls_name in hidden_classes: continue if not schema_graph.get_element_by_class_name(non_graph_cls_name).abstract: continue cls_subclasses = schema_graph.get_subclass_set(non_graph_cls_name) # No need to add the possible abstract class if it doesn't have subclasses besides itself. if len(cls_subclasses) > 1: all_non_abstract_subclasses_are_vertices = True # Check all non-abstract subclasses are vertices. for subclass_name in cls_subclasses: subclass = schema_graph.get_element_by_class_name(subclass_name) if subclass_name != non_graph_cls_name: if not subclass.abstract and not subclass.is_vertex: all_non_abstract_subclasses_are_vertices = False break if all_non_abstract_subclasses_are_vertices: # Add abstract class as an interface. inherited_field_type_overrides.setdefault(non_graph_cls_name, dict()) field_type_overrides = inherited_field_type_overrides[non_graph_cls_name] field_specification_lambda = _create_field_specification( schema_graph, graphql_types, field_type_overrides, hidden_classes, non_graph_cls_name) graphql_type = GraphQLInterfaceType(non_graph_cls_name, fields=field_specification_lambda) graphql_types[non_graph_cls_name] = graphql_type if not graphql_types: raise EmptySchemaError(u'After evaluating all subclasses of V, we were not able to find ' u'visible schema data to import into the GraphQL schema object') # Create the root query GraphQL type. Consists of all non-union classes, i.e. # all non-abstract classes (as GraphQL types) and all abstract classes (as GraphQL interfaces). RootSchemaQuery = GraphQLObjectType('RootSchemaQuery', OrderedDict([ (name, GraphQLField(value)) for name, value in sorted(six.iteritems(graphql_types), key=lambda x: x[0]) if not isinstance(value, GraphQLUnionType) ])) schema = GraphQLSchema(RootSchemaQuery, directives=DIRECTIVES) # Note that the GraphQLSchema reconstructs the set of types in the schema by recursively # searching through the fields of the RootSchemaQuery. Since union types can only appear in the # fields of other types as edges, union types with no in or out edges will not appear in the # schema. Therefore, we remove these unions and their keys from the type equivalence hints. return schema, _get_referenced_type_equivalences(graphql_types, type_equivalence_hints)
python
def get_graphql_schema_from_schema_graph(schema_graph, class_to_field_type_overrides, hidden_classes): _validate_overriden_fields_are_not_defined_in_superclasses(class_to_field_type_overrides, schema_graph) # The field types of subclasses must also be overridden. # Remember that the result returned by get_subclass_set(class_name) includes class_name itself. inherited_field_type_overrides = _get_inherited_field_types(class_to_field_type_overrides, schema_graph) # We remove the base vertex class from the schema if it has no properties. # If it has no properties, it's meaningless and makes the schema less syntactically sweet. if not schema_graph.get_element_by_class_name(ORIENTDB_BASE_VERTEX_CLASS_NAME).properties: hidden_classes.add(ORIENTDB_BASE_VERTEX_CLASS_NAME) graphql_types = OrderedDict() type_equivalence_hints = OrderedDict() # For each vertex class, construct its analogous GraphQL type representation. for vertex_cls_name in sorted(schema_graph.vertex_class_names): vertex_cls = schema_graph.get_element_by_class_name(vertex_cls_name) if vertex_cls_name in hidden_classes: continue inherited_field_type_overrides.setdefault(vertex_cls_name, dict()) field_type_overrides = inherited_field_type_overrides[vertex_cls_name] # We have to use delayed type binding here, because some of the type references # are circular: if an edge connects vertices of types A and B, then # GraphQL type A has a List[B] field, and type B has a List[A] field. # To avoid the circular dependency, GraphQL allows us to initialize the types # initially without their field information, and fill in their field information # later using a lambda function as the second argument to GraphQLObjectType. # This lambda function will be called on each type after all types are created # in their initial blank state. # # However, 'cls_name' is a variable that would not be correctly bound # if we naively tried to construct a lambda in-place, because Python lambdas # are not closures. Instead, call a function with 'cls_name' as an argument, # and have that function construct and return the required lambda. field_specification_lambda = _create_field_specification( schema_graph, graphql_types, field_type_overrides, hidden_classes, vertex_cls_name) # Abstract classes are interfaces, concrete classes are object types. current_graphql_type = None if vertex_cls.abstract: # "fields" is a kwarg in the interface constructor, even though # it's a positional arg in the object type constructor. current_graphql_type = GraphQLInterfaceType(vertex_cls_name, fields=field_specification_lambda) else: # For similar reasons as the field_specification_lambda, # we need to create an interface specification lambda function that # specifies the interfaces implemented by this type. interface_specification_lambda = _create_interface_specification( schema_graph, graphql_types, hidden_classes, vertex_cls_name) # N.B.: Ignore the "is_type_of" argument below, it is simply a circumvention of # a sanity check inside the GraphQL library. The library assumes that we'll use # its execution system, so it complains that we don't provide a means to # differentiate between different implementations of the same interface. # We don't care, because we compile the GraphQL query to a database query. current_graphql_type = GraphQLObjectType(vertex_cls_name, field_specification_lambda, interfaces=interface_specification_lambda, is_type_of=lambda: None) graphql_types[vertex_cls_name] = current_graphql_type # For each vertex class, construct all union types representations. for vertex_cls_name in sorted(schema_graph.vertex_class_names): vertex_cls = schema_graph.get_element_by_class_name(vertex_cls_name) if vertex_cls_name in hidden_classes: continue vertex_cls_subclasses = schema_graph.get_subclass_set(vertex_cls_name) if not vertex_cls.abstract and len(vertex_cls_subclasses) > 1: # In addition to creating this class' corresponding GraphQL type, we'll need a # union type to represent it when it appears as the endpoint of an edge. union_type_name = _get_union_type_name(vertex_cls_subclasses) # For similar reasons as the field_specification_lambda, # we need to create a union type specification lambda function that specifies # the types that this union type is composed of. type_specification_lambda = _create_union_types_specification( schema_graph, graphql_types, hidden_classes, vertex_cls_name) union_type = GraphQLUnionType(union_type_name, types=type_specification_lambda) graphql_types[union_type_name] = union_type type_equivalence_hints[graphql_types[vertex_cls_name]] = union_type # Include all abstract non-vertex classes whose only non-abstract subclasses are vertices. for non_graph_cls_name in sorted(schema_graph.non_graph_class_names): if non_graph_cls_name in hidden_classes: continue if not schema_graph.get_element_by_class_name(non_graph_cls_name).abstract: continue cls_subclasses = schema_graph.get_subclass_set(non_graph_cls_name) # No need to add the possible abstract class if it doesn't have subclasses besides itself. if len(cls_subclasses) > 1: all_non_abstract_subclasses_are_vertices = True # Check all non-abstract subclasses are vertices. for subclass_name in cls_subclasses: subclass = schema_graph.get_element_by_class_name(subclass_name) if subclass_name != non_graph_cls_name: if not subclass.abstract and not subclass.is_vertex: all_non_abstract_subclasses_are_vertices = False break if all_non_abstract_subclasses_are_vertices: # Add abstract class as an interface. inherited_field_type_overrides.setdefault(non_graph_cls_name, dict()) field_type_overrides = inherited_field_type_overrides[non_graph_cls_name] field_specification_lambda = _create_field_specification( schema_graph, graphql_types, field_type_overrides, hidden_classes, non_graph_cls_name) graphql_type = GraphQLInterfaceType(non_graph_cls_name, fields=field_specification_lambda) graphql_types[non_graph_cls_name] = graphql_type if not graphql_types: raise EmptySchemaError(u'After evaluating all subclasses of V, we were not able to find ' u'visible schema data to import into the GraphQL schema object') # Create the root query GraphQL type. Consists of all non-union classes, i.e. # all non-abstract classes (as GraphQL types) and all abstract classes (as GraphQL interfaces). RootSchemaQuery = GraphQLObjectType('RootSchemaQuery', OrderedDict([ (name, GraphQLField(value)) for name, value in sorted(six.iteritems(graphql_types), key=lambda x: x[0]) if not isinstance(value, GraphQLUnionType) ])) schema = GraphQLSchema(RootSchemaQuery, directives=DIRECTIVES) # Note that the GraphQLSchema reconstructs the set of types in the schema by recursively # searching through the fields of the RootSchemaQuery. Since union types can only appear in the # fields of other types as edges, union types with no in or out edges will not appear in the # schema. Therefore, we remove these unions and their keys from the type equivalence hints. return schema, _get_referenced_type_equivalences(graphql_types, type_equivalence_hints)
[ "def", "get_graphql_schema_from_schema_graph", "(", "schema_graph", ",", "class_to_field_type_overrides", ",", "hidden_classes", ")", ":", "_validate_overriden_fields_are_not_defined_in_superclasses", "(", "class_to_field_type_overrides", ",", "schema_graph", ")", "# The field types of subclasses must also be overridden.", "# Remember that the result returned by get_subclass_set(class_name) includes class_name itself.", "inherited_field_type_overrides", "=", "_get_inherited_field_types", "(", "class_to_field_type_overrides", ",", "schema_graph", ")", "# We remove the base vertex class from the schema if it has no properties.", "# If it has no properties, it's meaningless and makes the schema less syntactically sweet.", "if", "not", "schema_graph", ".", "get_element_by_class_name", "(", "ORIENTDB_BASE_VERTEX_CLASS_NAME", ")", ".", "properties", ":", "hidden_classes", ".", "add", "(", "ORIENTDB_BASE_VERTEX_CLASS_NAME", ")", "graphql_types", "=", "OrderedDict", "(", ")", "type_equivalence_hints", "=", "OrderedDict", "(", ")", "# For each vertex class, construct its analogous GraphQL type representation.", "for", "vertex_cls_name", "in", "sorted", "(", "schema_graph", ".", "vertex_class_names", ")", ":", "vertex_cls", "=", "schema_graph", ".", "get_element_by_class_name", "(", "vertex_cls_name", ")", "if", "vertex_cls_name", "in", "hidden_classes", ":", "continue", "inherited_field_type_overrides", ".", "setdefault", "(", "vertex_cls_name", ",", "dict", "(", ")", ")", "field_type_overrides", "=", "inherited_field_type_overrides", "[", "vertex_cls_name", "]", "# We have to use delayed type binding here, because some of the type references", "# are circular: if an edge connects vertices of types A and B, then", "# GraphQL type A has a List[B] field, and type B has a List[A] field.", "# To avoid the circular dependency, GraphQL allows us to initialize the types", "# initially without their field information, and fill in their field information", "# later using a lambda function as the second argument to GraphQLObjectType.", "# This lambda function will be called on each type after all types are created", "# in their initial blank state.", "#", "# However, 'cls_name' is a variable that would not be correctly bound", "# if we naively tried to construct a lambda in-place, because Python lambdas", "# are not closures. Instead, call a function with 'cls_name' as an argument,", "# and have that function construct and return the required lambda.", "field_specification_lambda", "=", "_create_field_specification", "(", "schema_graph", ",", "graphql_types", ",", "field_type_overrides", ",", "hidden_classes", ",", "vertex_cls_name", ")", "# Abstract classes are interfaces, concrete classes are object types.", "current_graphql_type", "=", "None", "if", "vertex_cls", ".", "abstract", ":", "# \"fields\" is a kwarg in the interface constructor, even though", "# it's a positional arg in the object type constructor.", "current_graphql_type", "=", "GraphQLInterfaceType", "(", "vertex_cls_name", ",", "fields", "=", "field_specification_lambda", ")", "else", ":", "# For similar reasons as the field_specification_lambda,", "# we need to create an interface specification lambda function that", "# specifies the interfaces implemented by this type.", "interface_specification_lambda", "=", "_create_interface_specification", "(", "schema_graph", ",", "graphql_types", ",", "hidden_classes", ",", "vertex_cls_name", ")", "# N.B.: Ignore the \"is_type_of\" argument below, it is simply a circumvention of", "# a sanity check inside the GraphQL library. The library assumes that we'll use", "# its execution system, so it complains that we don't provide a means to", "# differentiate between different implementations of the same interface.", "# We don't care, because we compile the GraphQL query to a database query.", "current_graphql_type", "=", "GraphQLObjectType", "(", "vertex_cls_name", ",", "field_specification_lambda", ",", "interfaces", "=", "interface_specification_lambda", ",", "is_type_of", "=", "lambda", ":", "None", ")", "graphql_types", "[", "vertex_cls_name", "]", "=", "current_graphql_type", "# For each vertex class, construct all union types representations.", "for", "vertex_cls_name", "in", "sorted", "(", "schema_graph", ".", "vertex_class_names", ")", ":", "vertex_cls", "=", "schema_graph", ".", "get_element_by_class_name", "(", "vertex_cls_name", ")", "if", "vertex_cls_name", "in", "hidden_classes", ":", "continue", "vertex_cls_subclasses", "=", "schema_graph", ".", "get_subclass_set", "(", "vertex_cls_name", ")", "if", "not", "vertex_cls", ".", "abstract", "and", "len", "(", "vertex_cls_subclasses", ")", ">", "1", ":", "# In addition to creating this class' corresponding GraphQL type, we'll need a", "# union type to represent it when it appears as the endpoint of an edge.", "union_type_name", "=", "_get_union_type_name", "(", "vertex_cls_subclasses", ")", "# For similar reasons as the field_specification_lambda,", "# we need to create a union type specification lambda function that specifies", "# the types that this union type is composed of.", "type_specification_lambda", "=", "_create_union_types_specification", "(", "schema_graph", ",", "graphql_types", ",", "hidden_classes", ",", "vertex_cls_name", ")", "union_type", "=", "GraphQLUnionType", "(", "union_type_name", ",", "types", "=", "type_specification_lambda", ")", "graphql_types", "[", "union_type_name", "]", "=", "union_type", "type_equivalence_hints", "[", "graphql_types", "[", "vertex_cls_name", "]", "]", "=", "union_type", "# Include all abstract non-vertex classes whose only non-abstract subclasses are vertices.", "for", "non_graph_cls_name", "in", "sorted", "(", "schema_graph", ".", "non_graph_class_names", ")", ":", "if", "non_graph_cls_name", "in", "hidden_classes", ":", "continue", "if", "not", "schema_graph", ".", "get_element_by_class_name", "(", "non_graph_cls_name", ")", ".", "abstract", ":", "continue", "cls_subclasses", "=", "schema_graph", ".", "get_subclass_set", "(", "non_graph_cls_name", ")", "# No need to add the possible abstract class if it doesn't have subclasses besides itself.", "if", "len", "(", "cls_subclasses", ")", ">", "1", ":", "all_non_abstract_subclasses_are_vertices", "=", "True", "# Check all non-abstract subclasses are vertices.", "for", "subclass_name", "in", "cls_subclasses", ":", "subclass", "=", "schema_graph", ".", "get_element_by_class_name", "(", "subclass_name", ")", "if", "subclass_name", "!=", "non_graph_cls_name", ":", "if", "not", "subclass", ".", "abstract", "and", "not", "subclass", ".", "is_vertex", ":", "all_non_abstract_subclasses_are_vertices", "=", "False", "break", "if", "all_non_abstract_subclasses_are_vertices", ":", "# Add abstract class as an interface.", "inherited_field_type_overrides", ".", "setdefault", "(", "non_graph_cls_name", ",", "dict", "(", ")", ")", "field_type_overrides", "=", "inherited_field_type_overrides", "[", "non_graph_cls_name", "]", "field_specification_lambda", "=", "_create_field_specification", "(", "schema_graph", ",", "graphql_types", ",", "field_type_overrides", ",", "hidden_classes", ",", "non_graph_cls_name", ")", "graphql_type", "=", "GraphQLInterfaceType", "(", "non_graph_cls_name", ",", "fields", "=", "field_specification_lambda", ")", "graphql_types", "[", "non_graph_cls_name", "]", "=", "graphql_type", "if", "not", "graphql_types", ":", "raise", "EmptySchemaError", "(", "u'After evaluating all subclasses of V, we were not able to find '", "u'visible schema data to import into the GraphQL schema object'", ")", "# Create the root query GraphQL type. Consists of all non-union classes, i.e.", "# all non-abstract classes (as GraphQL types) and all abstract classes (as GraphQL interfaces).", "RootSchemaQuery", "=", "GraphQLObjectType", "(", "'RootSchemaQuery'", ",", "OrderedDict", "(", "[", "(", "name", ",", "GraphQLField", "(", "value", ")", ")", "for", "name", ",", "value", "in", "sorted", "(", "six", ".", "iteritems", "(", "graphql_types", ")", ",", "key", "=", "lambda", "x", ":", "x", "[", "0", "]", ")", "if", "not", "isinstance", "(", "value", ",", "GraphQLUnionType", ")", "]", ")", ")", "schema", "=", "GraphQLSchema", "(", "RootSchemaQuery", ",", "directives", "=", "DIRECTIVES", ")", "# Note that the GraphQLSchema reconstructs the set of types in the schema by recursively", "# searching through the fields of the RootSchemaQuery. Since union types can only appear in the", "# fields of other types as edges, union types with no in or out edges will not appear in the", "# schema. Therefore, we remove these unions and their keys from the type equivalence hints.", "return", "schema", ",", "_get_referenced_type_equivalences", "(", "graphql_types", ",", "type_equivalence_hints", ")" ]
Return a GraphQL schema object corresponding to the schema of the given schema graph. Args: schema_graph: SchemaGraph class_to_field_type_overrides: dict, class name -> {field name -> field type}, (string -> {string -> GraphQLType}). Used to override the type of a field in the class where it's first defined and all the class's subclasses. hidden_classes: set of strings, classes to not include in the GraphQL schema. Returns: tuple of (GraphQL schema object, GraphQL type equivalence hints dict). The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}).
[ "Return", "a", "GraphQL", "schema", "object", "corresponding", "to", "the", "schema", "of", "the", "given", "schema", "graph", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/graphql_schema.py#L228-L383
246,739
kensho-technologies/graphql-compiler
graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py
workaround_lowering_pass
def workaround_lowering_pass(ir_blocks, query_metadata_table): """Extract locations from TernaryConditionals and rewrite their Filter blocks as necessary.""" new_ir_blocks = [] for block in ir_blocks: if isinstance(block, Filter): new_block = _process_filter_block(query_metadata_table, block) else: new_block = block new_ir_blocks.append(new_block) return new_ir_blocks
python
def workaround_lowering_pass(ir_blocks, query_metadata_table): new_ir_blocks = [] for block in ir_blocks: if isinstance(block, Filter): new_block = _process_filter_block(query_metadata_table, block) else: new_block = block new_ir_blocks.append(new_block) return new_ir_blocks
[ "def", "workaround_lowering_pass", "(", "ir_blocks", ",", "query_metadata_table", ")", ":", "new_ir_blocks", "=", "[", "]", "for", "block", "in", "ir_blocks", ":", "if", "isinstance", "(", "block", ",", "Filter", ")", ":", "new_block", "=", "_process_filter_block", "(", "query_metadata_table", ",", "block", ")", "else", ":", "new_block", "=", "block", "new_ir_blocks", ".", "append", "(", "new_block", ")", "return", "new_ir_blocks" ]
Extract locations from TernaryConditionals and rewrite their Filter blocks as necessary.
[ "Extract", "locations", "from", "TernaryConditionals", "and", "rewrite", "their", "Filter", "blocks", "as", "necessary", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py#L19-L30
246,740
kensho-technologies/graphql-compiler
graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py
_process_filter_block
def _process_filter_block(query_metadata_table, block): """Rewrite the provided Filter block if necessary.""" # For a given Filter block with BinaryComposition predicate expression X, # let L be the set of all Locations referenced in any TernaryConditional # predicate expression enclosed in X. # For each location l in L, we construct a tautological expression that looks like: # ((l IS NULL) OR (l IS NOT NULL)) # and then join the original BinaryComposition X with all such expressions with ANDs. # We set this new BinaryComposition expression as the predicate of the Filter block. base_predicate = block.predicate # These variables are used by the visitor functions below. ternary_conditionals = [] # "problematic_locations" is a list and not a set, # to preserve ordering and generate a deterministic order of added clauses. # We expect the maximum size of this list to be a small constant number, # so the linear "in" operator is really not a concern. problematic_locations = [] def find_ternary_conditionals(expression): """Visitor function that extracts all enclosed TernaryConditional expressions.""" if isinstance(expression, TernaryConditional): ternary_conditionals.append(expression) return expression def extract_locations_visitor(expression): """Visitor function that extracts all the problematic locations.""" if isinstance(expression, (ContextField, ContextFieldExistence)): # We get the location at the vertex, ignoring property fields. # The vertex-level location is sufficient to work around the OrientDB bug, # and we want as few location as possible overall. location_at_vertex = expression.location.at_vertex() if location_at_vertex not in problematic_locations: problematic_locations.append(location_at_vertex) return expression # We aren't modifying the base predicate itself, just traversing it. # The returned "updated" value must be the exact same as the original. return_value = base_predicate.visit_and_update(find_ternary_conditionals) if return_value is not base_predicate: raise AssertionError(u'Read-only visitor function "find_ternary_conditionals" ' u'caused state to change: ' u'{} {}'.format(base_predicate, return_value)) for ternary in ternary_conditionals: # We aren't modifying the ternary itself, just traversing it. # The returned "updated" value must be the exact same as the original. return_value = ternary.visit_and_update(extract_locations_visitor) if return_value is not ternary: raise AssertionError(u'Read-only visitor function "extract_locations_visitor" ' u'caused state to change: ' u'{} {}'.format(ternary, return_value)) tautologies = [ _create_tautological_expression_for_location(query_metadata_table, location) for location in problematic_locations ] if not tautologies: return block final_predicate = base_predicate for tautology in tautologies: final_predicate = BinaryComposition(u'&&', final_predicate, tautology) return Filter(final_predicate)
python
def _process_filter_block(query_metadata_table, block): # For a given Filter block with BinaryComposition predicate expression X, # let L be the set of all Locations referenced in any TernaryConditional # predicate expression enclosed in X. # For each location l in L, we construct a tautological expression that looks like: # ((l IS NULL) OR (l IS NOT NULL)) # and then join the original BinaryComposition X with all such expressions with ANDs. # We set this new BinaryComposition expression as the predicate of the Filter block. base_predicate = block.predicate # These variables are used by the visitor functions below. ternary_conditionals = [] # "problematic_locations" is a list and not a set, # to preserve ordering and generate a deterministic order of added clauses. # We expect the maximum size of this list to be a small constant number, # so the linear "in" operator is really not a concern. problematic_locations = [] def find_ternary_conditionals(expression): """Visitor function that extracts all enclosed TernaryConditional expressions.""" if isinstance(expression, TernaryConditional): ternary_conditionals.append(expression) return expression def extract_locations_visitor(expression): """Visitor function that extracts all the problematic locations.""" if isinstance(expression, (ContextField, ContextFieldExistence)): # We get the location at the vertex, ignoring property fields. # The vertex-level location is sufficient to work around the OrientDB bug, # and we want as few location as possible overall. location_at_vertex = expression.location.at_vertex() if location_at_vertex not in problematic_locations: problematic_locations.append(location_at_vertex) return expression # We aren't modifying the base predicate itself, just traversing it. # The returned "updated" value must be the exact same as the original. return_value = base_predicate.visit_and_update(find_ternary_conditionals) if return_value is not base_predicate: raise AssertionError(u'Read-only visitor function "find_ternary_conditionals" ' u'caused state to change: ' u'{} {}'.format(base_predicate, return_value)) for ternary in ternary_conditionals: # We aren't modifying the ternary itself, just traversing it. # The returned "updated" value must be the exact same as the original. return_value = ternary.visit_and_update(extract_locations_visitor) if return_value is not ternary: raise AssertionError(u'Read-only visitor function "extract_locations_visitor" ' u'caused state to change: ' u'{} {}'.format(ternary, return_value)) tautologies = [ _create_tautological_expression_for_location(query_metadata_table, location) for location in problematic_locations ] if not tautologies: return block final_predicate = base_predicate for tautology in tautologies: final_predicate = BinaryComposition(u'&&', final_predicate, tautology) return Filter(final_predicate)
[ "def", "_process_filter_block", "(", "query_metadata_table", ",", "block", ")", ":", "# For a given Filter block with BinaryComposition predicate expression X,", "# let L be the set of all Locations referenced in any TernaryConditional", "# predicate expression enclosed in X.", "# For each location l in L, we construct a tautological expression that looks like:", "# ((l IS NULL) OR (l IS NOT NULL))", "# and then join the original BinaryComposition X with all such expressions with ANDs.", "# We set this new BinaryComposition expression as the predicate of the Filter block.", "base_predicate", "=", "block", ".", "predicate", "# These variables are used by the visitor functions below.", "ternary_conditionals", "=", "[", "]", "# \"problematic_locations\" is a list and not a set,", "# to preserve ordering and generate a deterministic order of added clauses.", "# We expect the maximum size of this list to be a small constant number,", "# so the linear \"in\" operator is really not a concern.", "problematic_locations", "=", "[", "]", "def", "find_ternary_conditionals", "(", "expression", ")", ":", "\"\"\"Visitor function that extracts all enclosed TernaryConditional expressions.\"\"\"", "if", "isinstance", "(", "expression", ",", "TernaryConditional", ")", ":", "ternary_conditionals", ".", "append", "(", "expression", ")", "return", "expression", "def", "extract_locations_visitor", "(", "expression", ")", ":", "\"\"\"Visitor function that extracts all the problematic locations.\"\"\"", "if", "isinstance", "(", "expression", ",", "(", "ContextField", ",", "ContextFieldExistence", ")", ")", ":", "# We get the location at the vertex, ignoring property fields.", "# The vertex-level location is sufficient to work around the OrientDB bug,", "# and we want as few location as possible overall.", "location_at_vertex", "=", "expression", ".", "location", ".", "at_vertex", "(", ")", "if", "location_at_vertex", "not", "in", "problematic_locations", ":", "problematic_locations", ".", "append", "(", "location_at_vertex", ")", "return", "expression", "# We aren't modifying the base predicate itself, just traversing it.", "# The returned \"updated\" value must be the exact same as the original.", "return_value", "=", "base_predicate", ".", "visit_and_update", "(", "find_ternary_conditionals", ")", "if", "return_value", "is", "not", "base_predicate", ":", "raise", "AssertionError", "(", "u'Read-only visitor function \"find_ternary_conditionals\" '", "u'caused state to change: '", "u'{} {}'", ".", "format", "(", "base_predicate", ",", "return_value", ")", ")", "for", "ternary", "in", "ternary_conditionals", ":", "# We aren't modifying the ternary itself, just traversing it.", "# The returned \"updated\" value must be the exact same as the original.", "return_value", "=", "ternary", ".", "visit_and_update", "(", "extract_locations_visitor", ")", "if", "return_value", "is", "not", "ternary", ":", "raise", "AssertionError", "(", "u'Read-only visitor function \"extract_locations_visitor\" '", "u'caused state to change: '", "u'{} {}'", ".", "format", "(", "ternary", ",", "return_value", ")", ")", "tautologies", "=", "[", "_create_tautological_expression_for_location", "(", "query_metadata_table", ",", "location", ")", "for", "location", "in", "problematic_locations", "]", "if", "not", "tautologies", ":", "return", "block", "final_predicate", "=", "base_predicate", "for", "tautology", "in", "tautologies", ":", "final_predicate", "=", "BinaryComposition", "(", "u'&&'", ",", "final_predicate", ",", "tautology", ")", "return", "Filter", "(", "final_predicate", ")" ]
Rewrite the provided Filter block if necessary.
[ "Rewrite", "the", "provided", "Filter", "block", "if", "necessary", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py#L33-L98
246,741
kensho-technologies/graphql-compiler
graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py
_create_tautological_expression_for_location
def _create_tautological_expression_for_location(query_metadata_table, location): """For a given location, create a BinaryComposition that always evaluates to 'true'.""" location_type = query_metadata_table.get_location_info(location).type location_exists = BinaryComposition( u'!=', ContextField(location, location_type), NullLiteral) location_does_not_exist = BinaryComposition( u'=', ContextField(location, location_type), NullLiteral) return BinaryComposition(u'||', location_exists, location_does_not_exist)
python
def _create_tautological_expression_for_location(query_metadata_table, location): location_type = query_metadata_table.get_location_info(location).type location_exists = BinaryComposition( u'!=', ContextField(location, location_type), NullLiteral) location_does_not_exist = BinaryComposition( u'=', ContextField(location, location_type), NullLiteral) return BinaryComposition(u'||', location_exists, location_does_not_exist)
[ "def", "_create_tautological_expression_for_location", "(", "query_metadata_table", ",", "location", ")", ":", "location_type", "=", "query_metadata_table", ".", "get_location_info", "(", "location", ")", ".", "type", "location_exists", "=", "BinaryComposition", "(", "u'!='", ",", "ContextField", "(", "location", ",", "location_type", ")", ",", "NullLiteral", ")", "location_does_not_exist", "=", "BinaryComposition", "(", "u'='", ",", "ContextField", "(", "location", ",", "location_type", ")", ",", "NullLiteral", ")", "return", "BinaryComposition", "(", "u'||'", ",", "location_exists", ",", "location_does_not_exist", ")" ]
For a given location, create a BinaryComposition that always evaluates to 'true'.
[ "For", "a", "given", "location", "create", "a", "BinaryComposition", "that", "always", "evaluates", "to", "true", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_eval_scheduling.py#L101-L109
246,742
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
get_only_element_from_collection
def get_only_element_from_collection(one_element_collection): """Assert that the collection has exactly one element, then return that element.""" if len(one_element_collection) != 1: raise AssertionError(u'Expected a collection with exactly one element, but got: {}' .format(one_element_collection)) return funcy.first(one_element_collection)
python
def get_only_element_from_collection(one_element_collection): if len(one_element_collection) != 1: raise AssertionError(u'Expected a collection with exactly one element, but got: {}' .format(one_element_collection)) return funcy.first(one_element_collection)
[ "def", "get_only_element_from_collection", "(", "one_element_collection", ")", ":", "if", "len", "(", "one_element_collection", ")", "!=", "1", ":", "raise", "AssertionError", "(", "u'Expected a collection with exactly one element, but got: {}'", ".", "format", "(", "one_element_collection", ")", ")", "return", "funcy", ".", "first", "(", "one_element_collection", ")" ]
Assert that the collection has exactly one element, then return that element.
[ "Assert", "that", "the", "collection", "has", "exactly", "one", "element", "then", "return", "that", "element", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L37-L42
246,743
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
get_ast_field_name
def get_ast_field_name(ast): """Return the normalized field name for the given AST node.""" replacements = { # We always rewrite the following field names into their proper underlying counterparts. TYPENAME_META_FIELD_NAME: '@class' } base_field_name = ast.name.value normalized_name = replacements.get(base_field_name, base_field_name) return normalized_name
python
def get_ast_field_name(ast): replacements = { # We always rewrite the following field names into their proper underlying counterparts. TYPENAME_META_FIELD_NAME: '@class' } base_field_name = ast.name.value normalized_name = replacements.get(base_field_name, base_field_name) return normalized_name
[ "def", "get_ast_field_name", "(", "ast", ")", ":", "replacements", "=", "{", "# We always rewrite the following field names into their proper underlying counterparts.", "TYPENAME_META_FIELD_NAME", ":", "'@class'", "}", "base_field_name", "=", "ast", ".", "name", ".", "value", "normalized_name", "=", "replacements", ".", "get", "(", "base_field_name", ",", "base_field_name", ")", "return", "normalized_name" ]
Return the normalized field name for the given AST node.
[ "Return", "the", "normalized", "field", "name", "for", "the", "given", "AST", "node", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L45-L53
246,744
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
get_field_type_from_schema
def get_field_type_from_schema(schema_type, field_name): """Return the type of the field in the given type, accounting for field name normalization.""" if field_name == '@class': return GraphQLString else: if field_name not in schema_type.fields: raise AssertionError(u'Field {} passed validation but was not present on type ' u'{}'.format(field_name, schema_type)) # Validation guarantees that the field must exist in the schema. return schema_type.fields[field_name].type
python
def get_field_type_from_schema(schema_type, field_name): if field_name == '@class': return GraphQLString else: if field_name not in schema_type.fields: raise AssertionError(u'Field {} passed validation but was not present on type ' u'{}'.format(field_name, schema_type)) # Validation guarantees that the field must exist in the schema. return schema_type.fields[field_name].type
[ "def", "get_field_type_from_schema", "(", "schema_type", ",", "field_name", ")", ":", "if", "field_name", "==", "'@class'", ":", "return", "GraphQLString", "else", ":", "if", "field_name", "not", "in", "schema_type", ".", "fields", ":", "raise", "AssertionError", "(", "u'Field {} passed validation but was not present on type '", "u'{}'", ".", "format", "(", "field_name", ",", "schema_type", ")", ")", "# Validation guarantees that the field must exist in the schema.", "return", "schema_type", ".", "fields", "[", "field_name", "]", ".", "type" ]
Return the type of the field in the given type, accounting for field name normalization.
[ "Return", "the", "type", "of", "the", "field", "in", "the", "given", "type", "accounting", "for", "field", "name", "normalization", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L63-L73
246,745
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
get_vertex_field_type
def get_vertex_field_type(current_schema_type, vertex_field_name): """Return the type of the vertex within the specified vertex field name of the given type.""" # According to the schema, the vertex field itself is of type GraphQLList, and this is # what get_field_type_from_schema returns. We care about what the type *inside* the list is, # i.e., the type on the other side of the edge (hence .of_type). # Validation guarantees that the field must exist in the schema. if not is_vertex_field_name(vertex_field_name): raise AssertionError(u'Trying to load the vertex field type of a non-vertex field: ' u'{} {}'.format(current_schema_type, vertex_field_name)) raw_field_type = get_field_type_from_schema(current_schema_type, vertex_field_name) if not isinstance(strip_non_null_from_type(raw_field_type), GraphQLList): raise AssertionError(u'Found an edge whose schema type was not GraphQLList: ' u'{} {} {}'.format(current_schema_type, vertex_field_name, raw_field_type)) return raw_field_type.of_type
python
def get_vertex_field_type(current_schema_type, vertex_field_name): # According to the schema, the vertex field itself is of type GraphQLList, and this is # what get_field_type_from_schema returns. We care about what the type *inside* the list is, # i.e., the type on the other side of the edge (hence .of_type). # Validation guarantees that the field must exist in the schema. if not is_vertex_field_name(vertex_field_name): raise AssertionError(u'Trying to load the vertex field type of a non-vertex field: ' u'{} {}'.format(current_schema_type, vertex_field_name)) raw_field_type = get_field_type_from_schema(current_schema_type, vertex_field_name) if not isinstance(strip_non_null_from_type(raw_field_type), GraphQLList): raise AssertionError(u'Found an edge whose schema type was not GraphQLList: ' u'{} {} {}'.format(current_schema_type, vertex_field_name, raw_field_type)) return raw_field_type.of_type
[ "def", "get_vertex_field_type", "(", "current_schema_type", ",", "vertex_field_name", ")", ":", "# According to the schema, the vertex field itself is of type GraphQLList, and this is", "# what get_field_type_from_schema returns. We care about what the type *inside* the list is,", "# i.e., the type on the other side of the edge (hence .of_type).", "# Validation guarantees that the field must exist in the schema.", "if", "not", "is_vertex_field_name", "(", "vertex_field_name", ")", ":", "raise", "AssertionError", "(", "u'Trying to load the vertex field type of a non-vertex field: '", "u'{} {}'", ".", "format", "(", "current_schema_type", ",", "vertex_field_name", ")", ")", "raw_field_type", "=", "get_field_type_from_schema", "(", "current_schema_type", ",", "vertex_field_name", ")", "if", "not", "isinstance", "(", "strip_non_null_from_type", "(", "raw_field_type", ")", ",", "GraphQLList", ")", ":", "raise", "AssertionError", "(", "u'Found an edge whose schema type was not GraphQLList: '", "u'{} {} {}'", ".", "format", "(", "current_schema_type", ",", "vertex_field_name", ",", "raw_field_type", ")", ")", "return", "raw_field_type", ".", "of_type" ]
Return the type of the vertex within the specified vertex field name of the given type.
[ "Return", "the", "type", "of", "the", "vertex", "within", "the", "specified", "vertex", "field", "name", "of", "the", "given", "type", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L76-L91
246,746
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
get_edge_direction_and_name
def get_edge_direction_and_name(vertex_field_name): """Get the edge direction and name from a non-root vertex field name.""" edge_direction = None edge_name = None if vertex_field_name.startswith(OUTBOUND_EDGE_FIELD_PREFIX): edge_direction = OUTBOUND_EDGE_DIRECTION edge_name = vertex_field_name[len(OUTBOUND_EDGE_FIELD_PREFIX):] elif vertex_field_name.startswith(INBOUND_EDGE_FIELD_PREFIX): edge_direction = INBOUND_EDGE_DIRECTION edge_name = vertex_field_name[len(INBOUND_EDGE_FIELD_PREFIX):] else: raise AssertionError(u'Unreachable condition reached:', vertex_field_name) validate_safe_string(edge_name) return edge_direction, edge_name
python
def get_edge_direction_and_name(vertex_field_name): edge_direction = None edge_name = None if vertex_field_name.startswith(OUTBOUND_EDGE_FIELD_PREFIX): edge_direction = OUTBOUND_EDGE_DIRECTION edge_name = vertex_field_name[len(OUTBOUND_EDGE_FIELD_PREFIX):] elif vertex_field_name.startswith(INBOUND_EDGE_FIELD_PREFIX): edge_direction = INBOUND_EDGE_DIRECTION edge_name = vertex_field_name[len(INBOUND_EDGE_FIELD_PREFIX):] else: raise AssertionError(u'Unreachable condition reached:', vertex_field_name) validate_safe_string(edge_name) return edge_direction, edge_name
[ "def", "get_edge_direction_and_name", "(", "vertex_field_name", ")", ":", "edge_direction", "=", "None", "edge_name", "=", "None", "if", "vertex_field_name", ".", "startswith", "(", "OUTBOUND_EDGE_FIELD_PREFIX", ")", ":", "edge_direction", "=", "OUTBOUND_EDGE_DIRECTION", "edge_name", "=", "vertex_field_name", "[", "len", "(", "OUTBOUND_EDGE_FIELD_PREFIX", ")", ":", "]", "elif", "vertex_field_name", ".", "startswith", "(", "INBOUND_EDGE_FIELD_PREFIX", ")", ":", "edge_direction", "=", "INBOUND_EDGE_DIRECTION", "edge_name", "=", "vertex_field_name", "[", "len", "(", "INBOUND_EDGE_FIELD_PREFIX", ")", ":", "]", "else", ":", "raise", "AssertionError", "(", "u'Unreachable condition reached:'", ",", "vertex_field_name", ")", "validate_safe_string", "(", "edge_name", ")", "return", "edge_direction", ",", "edge_name" ]
Get the edge direction and name from a non-root vertex field name.
[ "Get", "the", "edge", "direction", "and", "name", "from", "a", "non", "-", "root", "vertex", "field", "name", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L101-L116
246,747
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
is_vertex_field_type
def is_vertex_field_type(graphql_type): """Return True if the argument is a vertex field type, and False otherwise.""" # This will need to change if we ever support complex embedded types or edge field types. underlying_type = strip_non_null_from_type(graphql_type) return isinstance(underlying_type, (GraphQLInterfaceType, GraphQLObjectType, GraphQLUnionType))
python
def is_vertex_field_type(graphql_type): # This will need to change if we ever support complex embedded types or edge field types. underlying_type = strip_non_null_from_type(graphql_type) return isinstance(underlying_type, (GraphQLInterfaceType, GraphQLObjectType, GraphQLUnionType))
[ "def", "is_vertex_field_type", "(", "graphql_type", ")", ":", "# This will need to change if we ever support complex embedded types or edge field types.", "underlying_type", "=", "strip_non_null_from_type", "(", "graphql_type", ")", "return", "isinstance", "(", "underlying_type", ",", "(", "GraphQLInterfaceType", ",", "GraphQLObjectType", ",", "GraphQLUnionType", ")", ")" ]
Return True if the argument is a vertex field type, and False otherwise.
[ "Return", "True", "if", "the", "argument", "is", "a", "vertex", "field", "type", "and", "False", "otherwise", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L127-L131
246,748
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
ensure_unicode_string
def ensure_unicode_string(value): """Ensure the value is a string, and return it as unicode.""" if not isinstance(value, six.string_types): raise TypeError(u'Expected string value, got: {}'.format(value)) return six.text_type(value)
python
def ensure_unicode_string(value): if not isinstance(value, six.string_types): raise TypeError(u'Expected string value, got: {}'.format(value)) return six.text_type(value)
[ "def", "ensure_unicode_string", "(", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "raise", "TypeError", "(", "u'Expected string value, got: {}'", ".", "format", "(", "value", ")", ")", "return", "six", ".", "text_type", "(", "value", ")" ]
Ensure the value is a string, and return it as unicode.
[ "Ensure", "the", "value", "is", "a", "string", "and", "return", "it", "as", "unicode", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L140-L144
246,749
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
get_uniquely_named_objects_by_name
def get_uniquely_named_objects_by_name(object_list): """Return dict of name -> object pairs from a list of objects with unique names. Args: object_list: list of objects, each X of which has a unique name accessible as X.name.value Returns: dict, { X.name.value: X for x in object_list } If the list is empty or None, returns an empty dict. """ if not object_list: return dict() result = dict() for obj in object_list: name = obj.name.value if name in result: raise GraphQLCompilationError(u'Found duplicate object key: ' u'{} {}'.format(name, object_list)) result[name] = obj return result
python
def get_uniquely_named_objects_by_name(object_list): if not object_list: return dict() result = dict() for obj in object_list: name = obj.name.value if name in result: raise GraphQLCompilationError(u'Found duplicate object key: ' u'{} {}'.format(name, object_list)) result[name] = obj return result
[ "def", "get_uniquely_named_objects_by_name", "(", "object_list", ")", ":", "if", "not", "object_list", ":", "return", "dict", "(", ")", "result", "=", "dict", "(", ")", "for", "obj", "in", "object_list", ":", "name", "=", "obj", ".", "name", ".", "value", "if", "name", "in", "result", ":", "raise", "GraphQLCompilationError", "(", "u'Found duplicate object key: '", "u'{} {}'", ".", "format", "(", "name", ",", "object_list", ")", ")", "result", "[", "name", "]", "=", "obj", "return", "result" ]
Return dict of name -> object pairs from a list of objects with unique names. Args: object_list: list of objects, each X of which has a unique name accessible as X.name.value Returns: dict, { X.name.value: X for x in object_list } If the list is empty or None, returns an empty dict.
[ "Return", "dict", "of", "name", "-", ">", "object", "pairs", "from", "a", "list", "of", "objects", "with", "unique", "names", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L147-L168
246,750
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
validate_safe_string
def validate_safe_string(value): """Ensure the provided string does not have illegal characters.""" # The following strings are explicitly allowed, despite having otherwise-illegal chars. legal_strings_with_special_chars = frozenset({'@rid', '@class', '@this', '%'}) if not isinstance(value, six.string_types): raise TypeError(u'Expected string value, got: {} {}'.format( type(value).__name__, value)) if not value: raise GraphQLCompilationError(u'Empty strings are not allowed!') if value[0] in string.digits: raise GraphQLCompilationError(u'String values cannot start with a digit: {}'.format(value)) if not set(value).issubset(VARIABLE_ALLOWED_CHARS) and \ value not in legal_strings_with_special_chars: raise GraphQLCompilationError(u'Encountered illegal characters in string: {}'.format(value))
python
def validate_safe_string(value): # The following strings are explicitly allowed, despite having otherwise-illegal chars. legal_strings_with_special_chars = frozenset({'@rid', '@class', '@this', '%'}) if not isinstance(value, six.string_types): raise TypeError(u'Expected string value, got: {} {}'.format( type(value).__name__, value)) if not value: raise GraphQLCompilationError(u'Empty strings are not allowed!') if value[0] in string.digits: raise GraphQLCompilationError(u'String values cannot start with a digit: {}'.format(value)) if not set(value).issubset(VARIABLE_ALLOWED_CHARS) and \ value not in legal_strings_with_special_chars: raise GraphQLCompilationError(u'Encountered illegal characters in string: {}'.format(value))
[ "def", "validate_safe_string", "(", "value", ")", ":", "# The following strings are explicitly allowed, despite having otherwise-illegal chars.", "legal_strings_with_special_chars", "=", "frozenset", "(", "{", "'@rid'", ",", "'@class'", ",", "'@this'", ",", "'%'", "}", ")", "if", "not", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "raise", "TypeError", "(", "u'Expected string value, got: {} {}'", ".", "format", "(", "type", "(", "value", ")", ".", "__name__", ",", "value", ")", ")", "if", "not", "value", ":", "raise", "GraphQLCompilationError", "(", "u'Empty strings are not allowed!'", ")", "if", "value", "[", "0", "]", "in", "string", ".", "digits", ":", "raise", "GraphQLCompilationError", "(", "u'String values cannot start with a digit: {}'", ".", "format", "(", "value", ")", ")", "if", "not", "set", "(", "value", ")", ".", "issubset", "(", "VARIABLE_ALLOWED_CHARS", ")", "and", "value", "not", "in", "legal_strings_with_special_chars", ":", "raise", "GraphQLCompilationError", "(", "u'Encountered illegal characters in string: {}'", ".", "format", "(", "value", ")", ")" ]
Ensure the provided string does not have illegal characters.
[ "Ensure", "the", "provided", "string", "does", "not", "have", "illegal", "characters", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L177-L194
246,751
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
validate_edge_direction
def validate_edge_direction(edge_direction): """Ensure the provided edge direction is either "in" or "out".""" if not isinstance(edge_direction, six.string_types): raise TypeError(u'Expected string edge_direction, got: {} {}'.format( type(edge_direction), edge_direction)) if edge_direction not in ALLOWED_EDGE_DIRECTIONS: raise ValueError(u'Unrecognized edge direction: {}'.format(edge_direction))
python
def validate_edge_direction(edge_direction): if not isinstance(edge_direction, six.string_types): raise TypeError(u'Expected string edge_direction, got: {} {}'.format( type(edge_direction), edge_direction)) if edge_direction not in ALLOWED_EDGE_DIRECTIONS: raise ValueError(u'Unrecognized edge direction: {}'.format(edge_direction))
[ "def", "validate_edge_direction", "(", "edge_direction", ")", ":", "if", "not", "isinstance", "(", "edge_direction", ",", "six", ".", "string_types", ")", ":", "raise", "TypeError", "(", "u'Expected string edge_direction, got: {} {}'", ".", "format", "(", "type", "(", "edge_direction", ")", ",", "edge_direction", ")", ")", "if", "edge_direction", "not", "in", "ALLOWED_EDGE_DIRECTIONS", ":", "raise", "ValueError", "(", "u'Unrecognized edge direction: {}'", ".", "format", "(", "edge_direction", ")", ")" ]
Ensure the provided edge direction is either "in" or "out".
[ "Ensure", "the", "provided", "edge", "direction", "is", "either", "in", "or", "out", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L205-L212
246,752
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
validate_marked_location
def validate_marked_location(location): """Validate that a Location object is safe for marking, and not at a field.""" if not isinstance(location, (Location, FoldScopeLocation)): raise TypeError(u'Expected Location or FoldScopeLocation location, got: {} {}'.format( type(location).__name__, location)) if location.field is not None: raise GraphQLCompilationError(u'Cannot mark location at a field: {}'.format(location))
python
def validate_marked_location(location): if not isinstance(location, (Location, FoldScopeLocation)): raise TypeError(u'Expected Location or FoldScopeLocation location, got: {} {}'.format( type(location).__name__, location)) if location.field is not None: raise GraphQLCompilationError(u'Cannot mark location at a field: {}'.format(location))
[ "def", "validate_marked_location", "(", "location", ")", ":", "if", "not", "isinstance", "(", "location", ",", "(", "Location", ",", "FoldScopeLocation", ")", ")", ":", "raise", "TypeError", "(", "u'Expected Location or FoldScopeLocation location, got: {} {}'", ".", "format", "(", "type", "(", "location", ")", ".", "__name__", ",", "location", ")", ")", "if", "location", ".", "field", "is", "not", "None", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot mark location at a field: {}'", ".", "format", "(", "location", ")", ")" ]
Validate that a Location object is safe for marking, and not at a field.
[ "Validate", "that", "a", "Location", "object", "is", "safe", "for", "marking", "and", "not", "at", "a", "field", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L215-L222
246,753
kensho-technologies/graphql-compiler
graphql_compiler/compiler/helpers.py
invert_dict
def invert_dict(invertible_dict): """Invert a dict. A dict is invertible if values are unique and hashable.""" inverted = {} for k, v in six.iteritems(invertible_dict): if not isinstance(v, Hashable): raise TypeError(u'Expected an invertible dict, but value at key {} has type {}'.format( k, type(v).__name__)) if v in inverted: raise TypeError(u'Expected an invertible dict, but keys ' u'{} and {} map to the same value'.format( inverted[v], k)) inverted[v] = k return inverted
python
def invert_dict(invertible_dict): inverted = {} for k, v in six.iteritems(invertible_dict): if not isinstance(v, Hashable): raise TypeError(u'Expected an invertible dict, but value at key {} has type {}'.format( k, type(v).__name__)) if v in inverted: raise TypeError(u'Expected an invertible dict, but keys ' u'{} and {} map to the same value'.format( inverted[v], k)) inverted[v] = k return inverted
[ "def", "invert_dict", "(", "invertible_dict", ")", ":", "inverted", "=", "{", "}", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "invertible_dict", ")", ":", "if", "not", "isinstance", "(", "v", ",", "Hashable", ")", ":", "raise", "TypeError", "(", "u'Expected an invertible dict, but value at key {} has type {}'", ".", "format", "(", "k", ",", "type", "(", "v", ")", ".", "__name__", ")", ")", "if", "v", "in", "inverted", ":", "raise", "TypeError", "(", "u'Expected an invertible dict, but keys '", "u'{} and {} map to the same value'", ".", "format", "(", "inverted", "[", "v", "]", ",", "k", ")", ")", "inverted", "[", "v", "]", "=", "k", "return", "inverted" ]
Invert a dict. A dict is invertible if values are unique and hashable.
[ "Invert", "a", "dict", ".", "A", "dict", "is", "invertible", "if", "values", "are", "unique", "and", "hashable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/helpers.py#L230-L242
246,754
kensho-technologies/graphql-compiler
setup.py
read_file
def read_file(filename): """Read package file as text to get name and version""" # intentionally *not* adding an encoding option to open # see here: # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 here = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(here, 'graphql_compiler', filename), 'r') as f: return f.read()
python
def read_file(filename): # intentionally *not* adding an encoding option to open # see here: # https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690 here = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(here, 'graphql_compiler', filename), 'r') as f: return f.read()
[ "def", "read_file", "(", "filename", ")", ":", "# intentionally *not* adding an encoding option to open", "# see here:", "# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690", "here", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", "with", "codecs", ".", "open", "(", "os", ".", "path", ".", "join", "(", "here", ",", "'graphql_compiler'", ",", "filename", ")", ",", "'r'", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")" ]
Read package file as text to get name and version
[ "Read", "package", "file", "as", "text", "to", "get", "name", "and", "version" ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/setup.py#L13-L20
246,755
kensho-technologies/graphql-compiler
setup.py
find_version
def find_version(): """Only define version in one place""" version_file = read_file('__init__.py') version_match = re.search(r'^__version__ = ["\']([^"\']*)["\']', version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError('Unable to find version string.')
python
def find_version(): version_file = read_file('__init__.py') version_match = re.search(r'^__version__ = ["\']([^"\']*)["\']', version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError('Unable to find version string.')
[ "def", "find_version", "(", ")", ":", "version_file", "=", "read_file", "(", "'__init__.py'", ")", "version_match", "=", "re", ".", "search", "(", "r'^__version__ = [\"\\']([^\"\\']*)[\"\\']'", ",", "version_file", ",", "re", ".", "M", ")", "if", "version_match", ":", "return", "version_match", ".", "group", "(", "1", ")", "raise", "RuntimeError", "(", "'Unable to find version string.'", ")" ]
Only define version in one place
[ "Only", "define", "version", "in", "one", "place" ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/setup.py#L23-L30
246,756
kensho-technologies/graphql-compiler
setup.py
find_name
def find_name(): """Only define name in one place""" name_file = read_file('__init__.py') name_match = re.search(r'^__package_name__ = ["\']([^"\']*)["\']', name_file, re.M) if name_match: return name_match.group(1) raise RuntimeError('Unable to find name string.')
python
def find_name(): name_file = read_file('__init__.py') name_match = re.search(r'^__package_name__ = ["\']([^"\']*)["\']', name_file, re.M) if name_match: return name_match.group(1) raise RuntimeError('Unable to find name string.')
[ "def", "find_name", "(", ")", ":", "name_file", "=", "read_file", "(", "'__init__.py'", ")", "name_match", "=", "re", ".", "search", "(", "r'^__package_name__ = [\"\\']([^\"\\']*)[\"\\']'", ",", "name_file", ",", "re", ".", "M", ")", "if", "name_match", ":", "return", "name_match", ".", "group", "(", "1", ")", "raise", "RuntimeError", "(", "'Unable to find name string.'", ")" ]
Only define name in one place
[ "Only", "define", "name", "in", "one", "place" ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/setup.py#L33-L40
246,757
kensho-technologies/graphql-compiler
graphql_compiler/compiler/workarounds/orientdb_class_with_while.py
workaround_type_coercions_in_recursions
def workaround_type_coercions_in_recursions(match_query): """Lower CoerceType blocks into Filter blocks within Recurse steps.""" # This step is required to work around an OrientDB bug that causes queries with both # "while:" and "class:" in the same query location to fail to parse correctly. # # This bug is reported upstream: https://github.com/orientechnologies/orientdb/issues/8129 # # Instead of "class:", we use "INSTANCEOF" in the "where:" clause to get correct behavior. # However, we don't want to switch all coercions to this format, since the "class:" clause # provides valuable info to the MATCH query scheduler about how to schedule efficiently. new_match_traversals = [] for current_traversal in match_query.match_traversals: new_traversal = [] for match_step in current_traversal: new_match_step = match_step has_coerce_type = match_step.coerce_type_block is not None has_recurse_root = isinstance(match_step.root_block, Recurse) if has_coerce_type and has_recurse_root: new_where_block = convert_coerce_type_and_add_to_where_block( match_step.coerce_type_block, match_step.where_block) new_match_step = match_step._replace(coerce_type_block=None, where_block=new_where_block) new_traversal.append(new_match_step) new_match_traversals.append(new_traversal) return match_query._replace(match_traversals=new_match_traversals)
python
def workaround_type_coercions_in_recursions(match_query): # This step is required to work around an OrientDB bug that causes queries with both # "while:" and "class:" in the same query location to fail to parse correctly. # # This bug is reported upstream: https://github.com/orientechnologies/orientdb/issues/8129 # # Instead of "class:", we use "INSTANCEOF" in the "where:" clause to get correct behavior. # However, we don't want to switch all coercions to this format, since the "class:" clause # provides valuable info to the MATCH query scheduler about how to schedule efficiently. new_match_traversals = [] for current_traversal in match_query.match_traversals: new_traversal = [] for match_step in current_traversal: new_match_step = match_step has_coerce_type = match_step.coerce_type_block is not None has_recurse_root = isinstance(match_step.root_block, Recurse) if has_coerce_type and has_recurse_root: new_where_block = convert_coerce_type_and_add_to_where_block( match_step.coerce_type_block, match_step.where_block) new_match_step = match_step._replace(coerce_type_block=None, where_block=new_where_block) new_traversal.append(new_match_step) new_match_traversals.append(new_traversal) return match_query._replace(match_traversals=new_match_traversals)
[ "def", "workaround_type_coercions_in_recursions", "(", "match_query", ")", ":", "# This step is required to work around an OrientDB bug that causes queries with both", "# \"while:\" and \"class:\" in the same query location to fail to parse correctly.", "#", "# This bug is reported upstream: https://github.com/orientechnologies/orientdb/issues/8129", "#", "# Instead of \"class:\", we use \"INSTANCEOF\" in the \"where:\" clause to get correct behavior.", "# However, we don't want to switch all coercions to this format, since the \"class:\" clause", "# provides valuable info to the MATCH query scheduler about how to schedule efficiently.", "new_match_traversals", "=", "[", "]", "for", "current_traversal", "in", "match_query", ".", "match_traversals", ":", "new_traversal", "=", "[", "]", "for", "match_step", "in", "current_traversal", ":", "new_match_step", "=", "match_step", "has_coerce_type", "=", "match_step", ".", "coerce_type_block", "is", "not", "None", "has_recurse_root", "=", "isinstance", "(", "match_step", ".", "root_block", ",", "Recurse", ")", "if", "has_coerce_type", "and", "has_recurse_root", ":", "new_where_block", "=", "convert_coerce_type_and_add_to_where_block", "(", "match_step", ".", "coerce_type_block", ",", "match_step", ".", "where_block", ")", "new_match_step", "=", "match_step", ".", "_replace", "(", "coerce_type_block", "=", "None", ",", "where_block", "=", "new_where_block", ")", "new_traversal", ".", "append", "(", "new_match_step", ")", "new_match_traversals", ".", "append", "(", "new_traversal", ")", "return", "match_query", ".", "_replace", "(", "match_traversals", "=", "new_match_traversals", ")" ]
Lower CoerceType blocks into Filter blocks within Recurse steps.
[ "Lower", "CoerceType", "blocks", "into", "Filter", "blocks", "within", "Recurse", "steps", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_class_with_while.py#L11-L42
246,758
kensho-technologies/graphql-compiler
graphql_compiler/tool.py
main
def main(): """Read a GraphQL query from standard input, and output it pretty-printed to standard output.""" query = ' '.join(sys.stdin.readlines()) sys.stdout.write(pretty_print_graphql(query))
python
def main(): query = ' '.join(sys.stdin.readlines()) sys.stdout.write(pretty_print_graphql(query))
[ "def", "main", "(", ")", ":", "query", "=", "' '", ".", "join", "(", "sys", ".", "stdin", ".", "readlines", "(", ")", ")", "sys", ".", "stdout", ".", "write", "(", "pretty_print_graphql", "(", "query", ")", ")" ]
Read a GraphQL query from standard input, and output it pretty-printed to standard output.
[ "Read", "a", "GraphQL", "query", "from", "standard", "input", "and", "output", "it", "pretty", "-", "printed", "to", "standard", "output", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/tool.py#L12-L16
246,759
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/gremlin_formatting.py
_safe_gremlin_string
def _safe_gremlin_string(value): """Sanitize and represent a string argument in Gremlin.""" if not isinstance(value, six.string_types): if isinstance(value, bytes): # should only happen in py3 value = value.decode('utf-8') else: raise GraphQLInvalidArgumentError(u'Attempting to convert a non-string into a string: ' u'{}'.format(value)) # Using JSON encoding means that all unicode literals and special chars # (e.g. newlines and backslashes) are replaced by appropriate escape sequences. # However, the quoted result is wrapped in double quotes, and $ signs are not escaped, # so that would allow arbitrary code execution in Gremlin. # We will therefore turn the double-quoted string into a single-quoted one to avoid this risk. escaped_and_quoted = json.dumps(value) # Double-quoted string literals in Gremlin/Groovy allow # arbitrary code execution via string interpolation and closures. # To avoid this, we perform the following steps: # - we strip the wrapping double quotes; # - we un-escape any double-quotes in the string, by replacing \" with "; # - we escape any single-quotes in the string, by replacing ' with \'; # - finally, we wrap the string in single quotes. # http://www.groovy-lang.org/syntax.html#_double_quoted_string if not escaped_and_quoted[0] == escaped_and_quoted[-1] == '"': raise AssertionError(u'Unreachable state reached: {} {}'.format(value, escaped_and_quoted)) no_quotes = escaped_and_quoted[1:-1] re_escaped = no_quotes.replace('\\"', '"').replace('\'', '\\\'') final_escaped_value = '\'' + re_escaped + '\'' return final_escaped_value
python
def _safe_gremlin_string(value): if not isinstance(value, six.string_types): if isinstance(value, bytes): # should only happen in py3 value = value.decode('utf-8') else: raise GraphQLInvalidArgumentError(u'Attempting to convert a non-string into a string: ' u'{}'.format(value)) # Using JSON encoding means that all unicode literals and special chars # (e.g. newlines and backslashes) are replaced by appropriate escape sequences. # However, the quoted result is wrapped in double quotes, and $ signs are not escaped, # so that would allow arbitrary code execution in Gremlin. # We will therefore turn the double-quoted string into a single-quoted one to avoid this risk. escaped_and_quoted = json.dumps(value) # Double-quoted string literals in Gremlin/Groovy allow # arbitrary code execution via string interpolation and closures. # To avoid this, we perform the following steps: # - we strip the wrapping double quotes; # - we un-escape any double-quotes in the string, by replacing \" with "; # - we escape any single-quotes in the string, by replacing ' with \'; # - finally, we wrap the string in single quotes. # http://www.groovy-lang.org/syntax.html#_double_quoted_string if not escaped_and_quoted[0] == escaped_and_quoted[-1] == '"': raise AssertionError(u'Unreachable state reached: {} {}'.format(value, escaped_and_quoted)) no_quotes = escaped_and_quoted[1:-1] re_escaped = no_quotes.replace('\\"', '"').replace('\'', '\\\'') final_escaped_value = '\'' + re_escaped + '\'' return final_escaped_value
[ "def", "_safe_gremlin_string", "(", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "if", "isinstance", "(", "value", ",", "bytes", ")", ":", "# should only happen in py3", "value", "=", "value", ".", "decode", "(", "'utf-8'", ")", "else", ":", "raise", "GraphQLInvalidArgumentError", "(", "u'Attempting to convert a non-string into a string: '", "u'{}'", ".", "format", "(", "value", ")", ")", "# Using JSON encoding means that all unicode literals and special chars", "# (e.g. newlines and backslashes) are replaced by appropriate escape sequences.", "# However, the quoted result is wrapped in double quotes, and $ signs are not escaped,", "# so that would allow arbitrary code execution in Gremlin.", "# We will therefore turn the double-quoted string into a single-quoted one to avoid this risk.", "escaped_and_quoted", "=", "json", ".", "dumps", "(", "value", ")", "# Double-quoted string literals in Gremlin/Groovy allow", "# arbitrary code execution via string interpolation and closures.", "# To avoid this, we perform the following steps:", "# - we strip the wrapping double quotes;", "# - we un-escape any double-quotes in the string, by replacing \\\" with \";", "# - we escape any single-quotes in the string, by replacing ' with \\';", "# - finally, we wrap the string in single quotes.", "# http://www.groovy-lang.org/syntax.html#_double_quoted_string", "if", "not", "escaped_and_quoted", "[", "0", "]", "==", "escaped_and_quoted", "[", "-", "1", "]", "==", "'\"'", ":", "raise", "AssertionError", "(", "u'Unreachable state reached: {} {}'", ".", "format", "(", "value", ",", "escaped_and_quoted", ")", ")", "no_quotes", "=", "escaped_and_quoted", "[", "1", ":", "-", "1", "]", "re_escaped", "=", "no_quotes", ".", "replace", "(", "'\\\\\"'", ",", "'\"'", ")", ".", "replace", "(", "'\\''", ",", "'\\\\\\''", ")", "final_escaped_value", "=", "'\\''", "+", "re_escaped", "+", "'\\''", "return", "final_escaped_value" ]
Sanitize and represent a string argument in Gremlin.
[ "Sanitize", "and", "represent", "a", "string", "argument", "in", "Gremlin", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/gremlin_formatting.py#L18-L48
246,760
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/gremlin_formatting.py
_safe_gremlin_list
def _safe_gremlin_list(inner_type, argument_value): """Represent the list of "inner_type" objects in Gremlin form.""" if not isinstance(argument_value, list): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-list as a list: ' u'{}'.format(argument_value)) stripped_type = strip_non_null_from_type(inner_type) components = ( _safe_gremlin_argument(stripped_type, x) for x in argument_value ) return u'[' + u','.join(components) + u']'
python
def _safe_gremlin_list(inner_type, argument_value): if not isinstance(argument_value, list): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-list as a list: ' u'{}'.format(argument_value)) stripped_type = strip_non_null_from_type(inner_type) components = ( _safe_gremlin_argument(stripped_type, x) for x in argument_value ) return u'[' + u','.join(components) + u']'
[ "def", "_safe_gremlin_list", "(", "inner_type", ",", "argument_value", ")", ":", "if", "not", "isinstance", "(", "argument_value", ",", "list", ")", ":", "raise", "GraphQLInvalidArgumentError", "(", "u'Attempting to represent a non-list as a list: '", "u'{}'", ".", "format", "(", "argument_value", ")", ")", "stripped_type", "=", "strip_non_null_from_type", "(", "inner_type", ")", "components", "=", "(", "_safe_gremlin_argument", "(", "stripped_type", ",", "x", ")", "for", "x", "in", "argument_value", ")", "return", "u'['", "+", "u','", ".", "join", "(", "components", ")", "+", "u']'" ]
Represent the list of "inner_type" objects in Gremlin form.
[ "Represent", "the", "list", "of", "inner_type", "objects", "in", "Gremlin", "form", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/gremlin_formatting.py#L81-L92
246,761
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/gremlin_formatting.py
_safe_gremlin_argument
def _safe_gremlin_argument(expected_type, argument_value): """Return a Gremlin string representing the given argument value.""" if GraphQLString.is_same_type(expected_type): return _safe_gremlin_string(argument_value) elif GraphQLID.is_same_type(expected_type): # IDs can be strings or numbers, but the GraphQL library coerces them to strings. # We will follow suit and treat them as strings. if not isinstance(argument_value, six.string_types): if isinstance(argument_value, bytes): # should only happen in py3 argument_value = argument_value.decode('utf-8') else: argument_value = six.text_type(argument_value) return _safe_gremlin_string(argument_value) elif GraphQLFloat.is_same_type(expected_type): return represent_float_as_str(argument_value) elif GraphQLInt.is_same_type(expected_type): # Special case: in Python, isinstance(True, int) returns True. # Safeguard against this with an explicit check against bool type. if isinstance(argument_value, bool): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-int as an int: ' u'{}'.format(argument_value)) return type_check_and_str(int, argument_value) elif GraphQLBoolean.is_same_type(expected_type): return type_check_and_str(bool, argument_value) elif GraphQLDecimal.is_same_type(expected_type): return _safe_gremlin_decimal(argument_value) elif GraphQLDate.is_same_type(expected_type): return _safe_gremlin_date_and_datetime(expected_type, (datetime.date,), argument_value) elif GraphQLDateTime.is_same_type(expected_type): return _safe_gremlin_date_and_datetime(expected_type, (datetime.datetime, arrow.Arrow), argument_value) elif isinstance(expected_type, GraphQLList): return _safe_gremlin_list(expected_type.of_type, argument_value) else: raise AssertionError(u'Could not safely represent the requested GraphQL type: ' u'{} {}'.format(expected_type, argument_value))
python
def _safe_gremlin_argument(expected_type, argument_value): if GraphQLString.is_same_type(expected_type): return _safe_gremlin_string(argument_value) elif GraphQLID.is_same_type(expected_type): # IDs can be strings or numbers, but the GraphQL library coerces them to strings. # We will follow suit and treat them as strings. if not isinstance(argument_value, six.string_types): if isinstance(argument_value, bytes): # should only happen in py3 argument_value = argument_value.decode('utf-8') else: argument_value = six.text_type(argument_value) return _safe_gremlin_string(argument_value) elif GraphQLFloat.is_same_type(expected_type): return represent_float_as_str(argument_value) elif GraphQLInt.is_same_type(expected_type): # Special case: in Python, isinstance(True, int) returns True. # Safeguard against this with an explicit check against bool type. if isinstance(argument_value, bool): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-int as an int: ' u'{}'.format(argument_value)) return type_check_and_str(int, argument_value) elif GraphQLBoolean.is_same_type(expected_type): return type_check_and_str(bool, argument_value) elif GraphQLDecimal.is_same_type(expected_type): return _safe_gremlin_decimal(argument_value) elif GraphQLDate.is_same_type(expected_type): return _safe_gremlin_date_and_datetime(expected_type, (datetime.date,), argument_value) elif GraphQLDateTime.is_same_type(expected_type): return _safe_gremlin_date_and_datetime(expected_type, (datetime.datetime, arrow.Arrow), argument_value) elif isinstance(expected_type, GraphQLList): return _safe_gremlin_list(expected_type.of_type, argument_value) else: raise AssertionError(u'Could not safely represent the requested GraphQL type: ' u'{} {}'.format(expected_type, argument_value))
[ "def", "_safe_gremlin_argument", "(", "expected_type", ",", "argument_value", ")", ":", "if", "GraphQLString", ".", "is_same_type", "(", "expected_type", ")", ":", "return", "_safe_gremlin_string", "(", "argument_value", ")", "elif", "GraphQLID", ".", "is_same_type", "(", "expected_type", ")", ":", "# IDs can be strings or numbers, but the GraphQL library coerces them to strings.", "# We will follow suit and treat them as strings.", "if", "not", "isinstance", "(", "argument_value", ",", "six", ".", "string_types", ")", ":", "if", "isinstance", "(", "argument_value", ",", "bytes", ")", ":", "# should only happen in py3", "argument_value", "=", "argument_value", ".", "decode", "(", "'utf-8'", ")", "else", ":", "argument_value", "=", "six", ".", "text_type", "(", "argument_value", ")", "return", "_safe_gremlin_string", "(", "argument_value", ")", "elif", "GraphQLFloat", ".", "is_same_type", "(", "expected_type", ")", ":", "return", "represent_float_as_str", "(", "argument_value", ")", "elif", "GraphQLInt", ".", "is_same_type", "(", "expected_type", ")", ":", "# Special case: in Python, isinstance(True, int) returns True.", "# Safeguard against this with an explicit check against bool type.", "if", "isinstance", "(", "argument_value", ",", "bool", ")", ":", "raise", "GraphQLInvalidArgumentError", "(", "u'Attempting to represent a non-int as an int: '", "u'{}'", ".", "format", "(", "argument_value", ")", ")", "return", "type_check_and_str", "(", "int", ",", "argument_value", ")", "elif", "GraphQLBoolean", ".", "is_same_type", "(", "expected_type", ")", ":", "return", "type_check_and_str", "(", "bool", ",", "argument_value", ")", "elif", "GraphQLDecimal", ".", "is_same_type", "(", "expected_type", ")", ":", "return", "_safe_gremlin_decimal", "(", "argument_value", ")", "elif", "GraphQLDate", ".", "is_same_type", "(", "expected_type", ")", ":", "return", "_safe_gremlin_date_and_datetime", "(", "expected_type", ",", "(", "datetime", ".", "date", ",", ")", ",", "argument_value", ")", "elif", "GraphQLDateTime", ".", "is_same_type", "(", "expected_type", ")", ":", "return", "_safe_gremlin_date_and_datetime", "(", "expected_type", ",", "(", "datetime", ".", "datetime", ",", "arrow", ".", "Arrow", ")", ",", "argument_value", ")", "elif", "isinstance", "(", "expected_type", ",", "GraphQLList", ")", ":", "return", "_safe_gremlin_list", "(", "expected_type", ".", "of_type", ",", "argument_value", ")", "else", ":", "raise", "AssertionError", "(", "u'Could not safely represent the requested GraphQL type: '", "u'{} {}'", ".", "format", "(", "expected_type", ",", "argument_value", ")", ")" ]
Return a Gremlin string representing the given argument value.
[ "Return", "a", "Gremlin", "string", "representing", "the", "given", "argument", "value", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/gremlin_formatting.py#L95-L131
246,762
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/gremlin_formatting.py
insert_arguments_into_gremlin_query
def insert_arguments_into_gremlin_query(compilation_result, arguments): """Insert the arguments into the compiled Gremlin query to form a complete query. The GraphQL compiler attempts to use single-quoted string literals ('abc') in Gremlin output. Double-quoted strings allow inline interpolation with the $ symbol, see here for details: http://www.groovy-lang.org/syntax.html#all-strings If the compiler needs to emit a literal '$' character as part of the Gremlin query, it must be doubled ('$$') to avoid being interpreted as a query parameter. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every parameter the query expects. Returns: string, a Gremlin query with inserted argument data """ if compilation_result.language != GREMLIN_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query argument_types = compilation_result.input_metadata # The arguments are assumed to have already been validated against the query. sanitized_arguments = { key: _safe_gremlin_argument(argument_types[key], value) for key, value in six.iteritems(arguments) } return Template(base_query).substitute(sanitized_arguments)
python
def insert_arguments_into_gremlin_query(compilation_result, arguments): if compilation_result.language != GREMLIN_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query argument_types = compilation_result.input_metadata # The arguments are assumed to have already been validated against the query. sanitized_arguments = { key: _safe_gremlin_argument(argument_types[key], value) for key, value in six.iteritems(arguments) } return Template(base_query).substitute(sanitized_arguments)
[ "def", "insert_arguments_into_gremlin_query", "(", "compilation_result", ",", "arguments", ")", ":", "if", "compilation_result", ".", "language", "!=", "GREMLIN_LANGUAGE", ":", "raise", "AssertionError", "(", "u'Unexpected query output language: {}'", ".", "format", "(", "compilation_result", ")", ")", "base_query", "=", "compilation_result", ".", "query", "argument_types", "=", "compilation_result", ".", "input_metadata", "# The arguments are assumed to have already been validated against the query.", "sanitized_arguments", "=", "{", "key", ":", "_safe_gremlin_argument", "(", "argument_types", "[", "key", "]", ",", "value", ")", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "arguments", ")", "}", "return", "Template", "(", "base_query", ")", ".", "substitute", "(", "sanitized_arguments", ")" ]
Insert the arguments into the compiled Gremlin query to form a complete query. The GraphQL compiler attempts to use single-quoted string literals ('abc') in Gremlin output. Double-quoted strings allow inline interpolation with the $ symbol, see here for details: http://www.groovy-lang.org/syntax.html#all-strings If the compiler needs to emit a literal '$' character as part of the Gremlin query, it must be doubled ('$$') to avoid being interpreted as a query parameter. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every parameter the query expects. Returns: string, a Gremlin query with inserted argument data
[ "Insert", "the", "arguments", "into", "the", "compiled", "Gremlin", "query", "to", "form", "a", "complete", "query", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/gremlin_formatting.py#L138-L167
246,763
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_match.py
_get_vertex_location_name
def _get_vertex_location_name(location): """Get the location name from a location that is expected to point to a vertex.""" mark_name, field_name = location.get_location_name() if field_name is not None: raise AssertionError(u'Location unexpectedly pointed to a field: {}'.format(location)) return mark_name
python
def _get_vertex_location_name(location): mark_name, field_name = location.get_location_name() if field_name is not None: raise AssertionError(u'Location unexpectedly pointed to a field: {}'.format(location)) return mark_name
[ "def", "_get_vertex_location_name", "(", "location", ")", ":", "mark_name", ",", "field_name", "=", "location", ".", "get_location_name", "(", ")", "if", "field_name", "is", "not", "None", ":", "raise", "AssertionError", "(", "u'Location unexpectedly pointed to a field: {}'", ".", "format", "(", "location", ")", ")", "return", "mark_name" ]
Get the location name from a location that is expected to point to a vertex.
[ "Get", "the", "location", "name", "from", "a", "location", "that", "is", "expected", "to", "point", "to", "a", "vertex", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L12-L18
246,764
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_match.py
_first_step_to_match
def _first_step_to_match(match_step): """Transform the very first MATCH step into a MATCH query string.""" parts = [] if match_step.root_block is not None: if not isinstance(match_step.root_block, QueryRoot): raise AssertionError(u'Expected None or QueryRoot root block, received: ' u'{} {}'.format(match_step.root_block, match_step)) match_step.root_block.validate() start_class = get_only_element_from_collection(match_step.root_block.start_class) parts.append(u'class: %s' % (start_class,)) # MATCH steps with a QueryRoot root block shouldn't have a 'coerce_type_block'. if match_step.coerce_type_block is not None: raise AssertionError(u'Invalid MATCH step: {}'.format(match_step)) if match_step.where_block: match_step.where_block.validate() parts.append(u'where: (%s)' % (match_step.where_block.predicate.to_match(),)) if match_step.as_block is None: raise AssertionError(u'Found a MATCH step without a corresponding Location. ' u'This should never happen: {}'.format(match_step)) else: match_step.as_block.validate() parts.append(u'as: %s' % (_get_vertex_location_name(match_step.as_block.location),)) return u'{{ %s }}' % (u', '.join(parts),)
python
def _first_step_to_match(match_step): parts = [] if match_step.root_block is not None: if not isinstance(match_step.root_block, QueryRoot): raise AssertionError(u'Expected None or QueryRoot root block, received: ' u'{} {}'.format(match_step.root_block, match_step)) match_step.root_block.validate() start_class = get_only_element_from_collection(match_step.root_block.start_class) parts.append(u'class: %s' % (start_class,)) # MATCH steps with a QueryRoot root block shouldn't have a 'coerce_type_block'. if match_step.coerce_type_block is not None: raise AssertionError(u'Invalid MATCH step: {}'.format(match_step)) if match_step.where_block: match_step.where_block.validate() parts.append(u'where: (%s)' % (match_step.where_block.predicate.to_match(),)) if match_step.as_block is None: raise AssertionError(u'Found a MATCH step without a corresponding Location. ' u'This should never happen: {}'.format(match_step)) else: match_step.as_block.validate() parts.append(u'as: %s' % (_get_vertex_location_name(match_step.as_block.location),)) return u'{{ %s }}' % (u', '.join(parts),)
[ "def", "_first_step_to_match", "(", "match_step", ")", ":", "parts", "=", "[", "]", "if", "match_step", ".", "root_block", "is", "not", "None", ":", "if", "not", "isinstance", "(", "match_step", ".", "root_block", ",", "QueryRoot", ")", ":", "raise", "AssertionError", "(", "u'Expected None or QueryRoot root block, received: '", "u'{} {}'", ".", "format", "(", "match_step", ".", "root_block", ",", "match_step", ")", ")", "match_step", ".", "root_block", ".", "validate", "(", ")", "start_class", "=", "get_only_element_from_collection", "(", "match_step", ".", "root_block", ".", "start_class", ")", "parts", ".", "append", "(", "u'class: %s'", "%", "(", "start_class", ",", ")", ")", "# MATCH steps with a QueryRoot root block shouldn't have a 'coerce_type_block'.", "if", "match_step", ".", "coerce_type_block", "is", "not", "None", ":", "raise", "AssertionError", "(", "u'Invalid MATCH step: {}'", ".", "format", "(", "match_step", ")", ")", "if", "match_step", ".", "where_block", ":", "match_step", ".", "where_block", ".", "validate", "(", ")", "parts", ".", "append", "(", "u'where: (%s)'", "%", "(", "match_step", ".", "where_block", ".", "predicate", ".", "to_match", "(", ")", ",", ")", ")", "if", "match_step", ".", "as_block", "is", "None", ":", "raise", "AssertionError", "(", "u'Found a MATCH step without a corresponding Location. '", "u'This should never happen: {}'", ".", "format", "(", "match_step", ")", ")", "else", ":", "match_step", ".", "as_block", ".", "validate", "(", ")", "parts", ".", "append", "(", "u'as: %s'", "%", "(", "_get_vertex_location_name", "(", "match_step", ".", "as_block", ".", "location", ")", ",", ")", ")", "return", "u'{{ %s }}'", "%", "(", "u', '", ".", "join", "(", "parts", ")", ",", ")" ]
Transform the very first MATCH step into a MATCH query string.
[ "Transform", "the", "very", "first", "MATCH", "step", "into", "a", "MATCH", "query", "string", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L21-L50
246,765
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_match.py
_represent_match_traversal
def _represent_match_traversal(match_traversal): """Emit MATCH query code for an entire MATCH traversal sequence.""" output = [] output.append(_first_step_to_match(match_traversal[0])) for step in match_traversal[1:]: output.append(_subsequent_step_to_match(step)) return u''.join(output)
python
def _represent_match_traversal(match_traversal): output = [] output.append(_first_step_to_match(match_traversal[0])) for step in match_traversal[1:]: output.append(_subsequent_step_to_match(step)) return u''.join(output)
[ "def", "_represent_match_traversal", "(", "match_traversal", ")", ":", "output", "=", "[", "]", "output", ".", "append", "(", "_first_step_to_match", "(", "match_traversal", "[", "0", "]", ")", ")", "for", "step", "in", "match_traversal", "[", "1", ":", "]", ":", "output", ".", "append", "(", "_subsequent_step_to_match", "(", "step", ")", ")", "return", "u''", ".", "join", "(", "output", ")" ]
Emit MATCH query code for an entire MATCH traversal sequence.
[ "Emit", "MATCH", "query", "code", "for", "an", "entire", "MATCH", "traversal", "sequence", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L94-L102
246,766
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_match.py
_represent_fold
def _represent_fold(fold_location, fold_ir_blocks): """Emit a LET clause corresponding to the IR blocks for a @fold scope.""" start_let_template = u'$%(mark_name)s = %(base_location)s' traverse_edge_template = u'.%(direction)s("%(edge_name)s")' base_template = start_let_template + traverse_edge_template edge_direction, edge_name = fold_location.get_first_folded_edge() mark_name, _ = fold_location.get_location_name() base_location_name, _ = fold_location.base_location.get_location_name() validate_safe_string(mark_name) validate_safe_string(base_location_name) validate_safe_string(edge_direction) validate_safe_string(edge_name) template_data = { 'mark_name': mark_name, 'base_location': base_location_name, 'direction': edge_direction, 'edge_name': edge_name, } final_string = base_template % template_data for block in fold_ir_blocks: if isinstance(block, Filter): final_string += u'[' + block.predicate.to_match() + u']' elif isinstance(block, Traverse): template_data = { 'direction': block.direction, 'edge_name': block.edge_name, } final_string += traverse_edge_template % template_data elif isinstance(block, MarkLocation): # MarkLocation blocks inside a fold do not result in any MATCH output. pass else: raise AssertionError(u'Found an unexpected IR block in the folded IR blocks: ' u'{} {} {}'.format(type(block), block, fold_ir_blocks)) # Workaround for OrientDB's inconsistent return type when filtering a list. # https://github.com/orientechnologies/orientdb/issues/7811 final_string += '.asList()' return final_string
python
def _represent_fold(fold_location, fold_ir_blocks): start_let_template = u'$%(mark_name)s = %(base_location)s' traverse_edge_template = u'.%(direction)s("%(edge_name)s")' base_template = start_let_template + traverse_edge_template edge_direction, edge_name = fold_location.get_first_folded_edge() mark_name, _ = fold_location.get_location_name() base_location_name, _ = fold_location.base_location.get_location_name() validate_safe_string(mark_name) validate_safe_string(base_location_name) validate_safe_string(edge_direction) validate_safe_string(edge_name) template_data = { 'mark_name': mark_name, 'base_location': base_location_name, 'direction': edge_direction, 'edge_name': edge_name, } final_string = base_template % template_data for block in fold_ir_blocks: if isinstance(block, Filter): final_string += u'[' + block.predicate.to_match() + u']' elif isinstance(block, Traverse): template_data = { 'direction': block.direction, 'edge_name': block.edge_name, } final_string += traverse_edge_template % template_data elif isinstance(block, MarkLocation): # MarkLocation blocks inside a fold do not result in any MATCH output. pass else: raise AssertionError(u'Found an unexpected IR block in the folded IR blocks: ' u'{} {} {}'.format(type(block), block, fold_ir_blocks)) # Workaround for OrientDB's inconsistent return type when filtering a list. # https://github.com/orientechnologies/orientdb/issues/7811 final_string += '.asList()' return final_string
[ "def", "_represent_fold", "(", "fold_location", ",", "fold_ir_blocks", ")", ":", "start_let_template", "=", "u'$%(mark_name)s = %(base_location)s'", "traverse_edge_template", "=", "u'.%(direction)s(\"%(edge_name)s\")'", "base_template", "=", "start_let_template", "+", "traverse_edge_template", "edge_direction", ",", "edge_name", "=", "fold_location", ".", "get_first_folded_edge", "(", ")", "mark_name", ",", "_", "=", "fold_location", ".", "get_location_name", "(", ")", "base_location_name", ",", "_", "=", "fold_location", ".", "base_location", ".", "get_location_name", "(", ")", "validate_safe_string", "(", "mark_name", ")", "validate_safe_string", "(", "base_location_name", ")", "validate_safe_string", "(", "edge_direction", ")", "validate_safe_string", "(", "edge_name", ")", "template_data", "=", "{", "'mark_name'", ":", "mark_name", ",", "'base_location'", ":", "base_location_name", ",", "'direction'", ":", "edge_direction", ",", "'edge_name'", ":", "edge_name", ",", "}", "final_string", "=", "base_template", "%", "template_data", "for", "block", "in", "fold_ir_blocks", ":", "if", "isinstance", "(", "block", ",", "Filter", ")", ":", "final_string", "+=", "u'['", "+", "block", ".", "predicate", ".", "to_match", "(", ")", "+", "u']'", "elif", "isinstance", "(", "block", ",", "Traverse", ")", ":", "template_data", "=", "{", "'direction'", ":", "block", ".", "direction", ",", "'edge_name'", ":", "block", ".", "edge_name", ",", "}", "final_string", "+=", "traverse_edge_template", "%", "template_data", "elif", "isinstance", "(", "block", ",", "MarkLocation", ")", ":", "# MarkLocation blocks inside a fold do not result in any MATCH output.", "pass", "else", ":", "raise", "AssertionError", "(", "u'Found an unexpected IR block in the folded IR blocks: '", "u'{} {} {}'", ".", "format", "(", "type", "(", "block", ")", ",", "block", ",", "fold_ir_blocks", ")", ")", "# Workaround for OrientDB's inconsistent return type when filtering a list.", "# https://github.com/orientechnologies/orientdb/issues/7811", "final_string", "+=", "'.asList()'", "return", "final_string" ]
Emit a LET clause corresponding to the IR blocks for a @fold scope.
[ "Emit", "a", "LET", "clause", "corresponding", "to", "the", "IR", "blocks", "for", "a" ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L105-L147
246,767
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_match.py
_construct_output_to_match
def _construct_output_to_match(output_block): """Transform a ConstructResult block into a MATCH query string.""" output_block.validate() selections = ( u'%s AS `%s`' % (output_block.fields[key].to_match(), key) for key in sorted(output_block.fields.keys()) # Sort keys for deterministic output order. ) return u'SELECT %s FROM' % (u', '.join(selections),)
python
def _construct_output_to_match(output_block): output_block.validate() selections = ( u'%s AS `%s`' % (output_block.fields[key].to_match(), key) for key in sorted(output_block.fields.keys()) # Sort keys for deterministic output order. ) return u'SELECT %s FROM' % (u', '.join(selections),)
[ "def", "_construct_output_to_match", "(", "output_block", ")", ":", "output_block", ".", "validate", "(", ")", "selections", "=", "(", "u'%s AS `%s`'", "%", "(", "output_block", ".", "fields", "[", "key", "]", ".", "to_match", "(", ")", ",", "key", ")", "for", "key", "in", "sorted", "(", "output_block", ".", "fields", ".", "keys", "(", ")", ")", "# Sort keys for deterministic output order.", ")", "return", "u'SELECT %s FROM'", "%", "(", "u', '", ".", "join", "(", "selections", ")", ",", ")" ]
Transform a ConstructResult block into a MATCH query string.
[ "Transform", "a", "ConstructResult", "block", "into", "a", "MATCH", "query", "string", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L150-L159
246,768
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_match.py
_construct_where_to_match
def _construct_where_to_match(where_block): """Transform a Filter block into a MATCH query string.""" if where_block.predicate == TrueLiteral: raise AssertionError(u'Received WHERE block with TrueLiteral predicate: {}' .format(where_block)) return u'WHERE ' + where_block.predicate.to_match()
python
def _construct_where_to_match(where_block): if where_block.predicate == TrueLiteral: raise AssertionError(u'Received WHERE block with TrueLiteral predicate: {}' .format(where_block)) return u'WHERE ' + where_block.predicate.to_match()
[ "def", "_construct_where_to_match", "(", "where_block", ")", ":", "if", "where_block", ".", "predicate", "==", "TrueLiteral", ":", "raise", "AssertionError", "(", "u'Received WHERE block with TrueLiteral predicate: {}'", ".", "format", "(", "where_block", ")", ")", "return", "u'WHERE '", "+", "where_block", ".", "predicate", ".", "to_match", "(", ")" ]
Transform a Filter block into a MATCH query string.
[ "Transform", "a", "Filter", "block", "into", "a", "MATCH", "query", "string", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L162-L167
246,769
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_match.py
emit_code_from_multiple_match_queries
def emit_code_from_multiple_match_queries(match_queries): """Return a MATCH query string from a list of MatchQuery namedtuples.""" optional_variable_base_name = '$optional__' union_variable_name = '$result' query_data = deque([u'SELECT EXPAND(', union_variable_name, u')', u' LET ']) optional_variables = [] sub_queries = [emit_code_from_single_match_query(match_query) for match_query in match_queries] for (i, sub_query) in enumerate(sub_queries): variable_name = optional_variable_base_name + str(i) variable_assignment = variable_name + u' = (' sub_query_end = u'),' query_data.append(variable_assignment) query_data.append(sub_query) query_data.append(sub_query_end) optional_variables.append(variable_name) query_data.append(union_variable_name) query_data.append(u' = UNIONALL(') query_data.append(u', '.join(optional_variables)) query_data.append(u')') return u' '.join(query_data)
python
def emit_code_from_multiple_match_queries(match_queries): optional_variable_base_name = '$optional__' union_variable_name = '$result' query_data = deque([u'SELECT EXPAND(', union_variable_name, u')', u' LET ']) optional_variables = [] sub_queries = [emit_code_from_single_match_query(match_query) for match_query in match_queries] for (i, sub_query) in enumerate(sub_queries): variable_name = optional_variable_base_name + str(i) variable_assignment = variable_name + u' = (' sub_query_end = u'),' query_data.append(variable_assignment) query_data.append(sub_query) query_data.append(sub_query_end) optional_variables.append(variable_name) query_data.append(union_variable_name) query_data.append(u' = UNIONALL(') query_data.append(u', '.join(optional_variables)) query_data.append(u')') return u' '.join(query_data)
[ "def", "emit_code_from_multiple_match_queries", "(", "match_queries", ")", ":", "optional_variable_base_name", "=", "'$optional__'", "union_variable_name", "=", "'$result'", "query_data", "=", "deque", "(", "[", "u'SELECT EXPAND('", ",", "union_variable_name", ",", "u')'", ",", "u' LET '", "]", ")", "optional_variables", "=", "[", "]", "sub_queries", "=", "[", "emit_code_from_single_match_query", "(", "match_query", ")", "for", "match_query", "in", "match_queries", "]", "for", "(", "i", ",", "sub_query", ")", "in", "enumerate", "(", "sub_queries", ")", ":", "variable_name", "=", "optional_variable_base_name", "+", "str", "(", "i", ")", "variable_assignment", "=", "variable_name", "+", "u' = ('", "sub_query_end", "=", "u'),'", "query_data", ".", "append", "(", "variable_assignment", ")", "query_data", ".", "append", "(", "sub_query", ")", "query_data", ".", "append", "(", "sub_query_end", ")", "optional_variables", ".", "append", "(", "variable_name", ")", "query_data", ".", "append", "(", "union_variable_name", ")", "query_data", ".", "append", "(", "u' = UNIONALL('", ")", "query_data", ".", "append", "(", "u', '", ".", "join", "(", "optional_variables", ")", ")", "query_data", ".", "append", "(", "u')'", ")", "return", "u' '", ".", "join", "(", "query_data", ")" ]
Return a MATCH query string from a list of MatchQuery namedtuples.
[ "Return", "a", "MATCH", "query", "string", "from", "a", "list", "of", "MatchQuery", "namedtuples", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L218-L241
246,770
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_match.py
emit_code_from_ir
def emit_code_from_ir(compound_match_query, compiler_metadata): """Return a MATCH query string from a CompoundMatchQuery.""" # If the compound match query contains only one match query, # just call `emit_code_from_single_match_query` # If there are multiple match queries, construct the query string for each # individual query and combine them as follows. # # SELECT EXPAND($result) # LET # $optional__0 = ( # <query_string_0> # ), # $optional__1 = ( # <query_string_1> # ), # $optional__2 = ( # <query_string_2> # ), # # . . . # # $result = UNIONALL($optional__0, $optional__1, . . . ) match_queries = compound_match_query.match_queries if len(match_queries) == 1: query_string = emit_code_from_single_match_query(match_queries[0]) elif len(match_queries) > 1: query_string = emit_code_from_multiple_match_queries(match_queries) else: raise AssertionError(u'Received CompoundMatchQuery with an empty list of MatchQueries: ' u'{}'.format(match_queries)) return query_string
python
def emit_code_from_ir(compound_match_query, compiler_metadata): # If the compound match query contains only one match query, # just call `emit_code_from_single_match_query` # If there are multiple match queries, construct the query string for each # individual query and combine them as follows. # # SELECT EXPAND($result) # LET # $optional__0 = ( # <query_string_0> # ), # $optional__1 = ( # <query_string_1> # ), # $optional__2 = ( # <query_string_2> # ), # # . . . # # $result = UNIONALL($optional__0, $optional__1, . . . ) match_queries = compound_match_query.match_queries if len(match_queries) == 1: query_string = emit_code_from_single_match_query(match_queries[0]) elif len(match_queries) > 1: query_string = emit_code_from_multiple_match_queries(match_queries) else: raise AssertionError(u'Received CompoundMatchQuery with an empty list of MatchQueries: ' u'{}'.format(match_queries)) return query_string
[ "def", "emit_code_from_ir", "(", "compound_match_query", ",", "compiler_metadata", ")", ":", "# If the compound match query contains only one match query,", "# just call `emit_code_from_single_match_query`", "# If there are multiple match queries, construct the query string for each", "# individual query and combine them as follows.", "#", "# SELECT EXPAND($result)", "# LET", "# $optional__0 = (", "# <query_string_0>", "# ),", "# $optional__1 = (", "# <query_string_1>", "# ),", "# $optional__2 = (", "# <query_string_2>", "# ),", "#", "# . . .", "#", "# $result = UNIONALL($optional__0, $optional__1, . . . )", "match_queries", "=", "compound_match_query", ".", "match_queries", "if", "len", "(", "match_queries", ")", "==", "1", ":", "query_string", "=", "emit_code_from_single_match_query", "(", "match_queries", "[", "0", "]", ")", "elif", "len", "(", "match_queries", ")", ">", "1", ":", "query_string", "=", "emit_code_from_multiple_match_queries", "(", "match_queries", ")", "else", ":", "raise", "AssertionError", "(", "u'Received CompoundMatchQuery with an empty list of MatchQueries: '", "u'{}'", ".", "format", "(", "match_queries", ")", ")", "return", "query_string" ]
Return a MATCH query string from a CompoundMatchQuery.
[ "Return", "a", "MATCH", "query", "string", "from", "a", "CompoundMatchQuery", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_match.py#L244-L276
246,771
kensho-technologies/graphql-compiler
graphql_compiler/schema.py
_serialize_date
def _serialize_date(value): """Serialize a Date object to its proper ISO-8601 representation.""" if not isinstance(value, date): raise ValueError(u'The received object was not a date: ' u'{} {}'.format(type(value), value)) return value.isoformat()
python
def _serialize_date(value): if not isinstance(value, date): raise ValueError(u'The received object was not a date: ' u'{} {}'.format(type(value), value)) return value.isoformat()
[ "def", "_serialize_date", "(", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "date", ")", ":", "raise", "ValueError", "(", "u'The received object was not a date: '", "u'{} {}'", ".", "format", "(", "type", "(", "value", ")", ",", "value", ")", ")", "return", "value", ".", "isoformat", "(", ")" ]
Serialize a Date object to its proper ISO-8601 representation.
[ "Serialize", "a", "Date", "object", "to", "its", "proper", "ISO", "-", "8601", "representation", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema.py#L201-L206
246,772
kensho-technologies/graphql-compiler
graphql_compiler/schema.py
_serialize_datetime
def _serialize_datetime(value): """Serialize a DateTime object to its proper ISO-8601 representation.""" if not isinstance(value, (datetime, arrow.Arrow)): raise ValueError(u'The received object was not a datetime: ' u'{} {}'.format(type(value), value)) return value.isoformat()
python
def _serialize_datetime(value): if not isinstance(value, (datetime, arrow.Arrow)): raise ValueError(u'The received object was not a datetime: ' u'{} {}'.format(type(value), value)) return value.isoformat()
[ "def", "_serialize_datetime", "(", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "(", "datetime", ",", "arrow", ".", "Arrow", ")", ")", ":", "raise", "ValueError", "(", "u'The received object was not a datetime: '", "u'{} {}'", ".", "format", "(", "type", "(", "value", ")", ",", "value", ")", ")", "return", "value", ".", "isoformat", "(", ")" ]
Serialize a DateTime object to its proper ISO-8601 representation.
[ "Serialize", "a", "DateTime", "object", "to", "its", "proper", "ISO", "-", "8601", "representation", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema.py#L214-L219
246,773
kensho-technologies/graphql-compiler
graphql_compiler/schema.py
_parse_datetime_value
def _parse_datetime_value(value): """Deserialize a DateTime object from its proper ISO-8601 representation.""" if value.endswith('Z'): # Arrow doesn't support the "Z" literal to denote UTC time. # Strip the "Z" and add an explicit time zone instead. value = value[:-1] + '+00:00' return arrow.get(value, 'YYYY-MM-DDTHH:mm:ssZ').datetime
python
def _parse_datetime_value(value): if value.endswith('Z'): # Arrow doesn't support the "Z" literal to denote UTC time. # Strip the "Z" and add an explicit time zone instead. value = value[:-1] + '+00:00' return arrow.get(value, 'YYYY-MM-DDTHH:mm:ssZ').datetime
[ "def", "_parse_datetime_value", "(", "value", ")", ":", "if", "value", ".", "endswith", "(", "'Z'", ")", ":", "# Arrow doesn't support the \"Z\" literal to denote UTC time.", "# Strip the \"Z\" and add an explicit time zone instead.", "value", "=", "value", "[", ":", "-", "1", "]", "+", "'+00:00'", "return", "arrow", ".", "get", "(", "value", ",", "'YYYY-MM-DDTHH:mm:ssZ'", ")", ".", "datetime" ]
Deserialize a DateTime object from its proper ISO-8601 representation.
[ "Deserialize", "a", "DateTime", "object", "from", "its", "proper", "ISO", "-", "8601", "representation", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema.py#L222-L229
246,774
kensho-technologies/graphql-compiler
graphql_compiler/schema.py
insert_meta_fields_into_existing_schema
def insert_meta_fields_into_existing_schema(graphql_schema): """Add compiler-specific meta-fields into all interfaces and types of the specified schema. It is preferable to use the EXTENDED_META_FIELD_DEFINITIONS constant above to directly inject the meta-fields during the initial process of building the schema, as that approach is more robust. This function does its best to not mutate unexpected definitions, but may break unexpectedly as the GraphQL standard is extended and the underlying GraphQL library is updated. Use this function at your own risk. Don't say you haven't been warned. Properties added include: - "_x_count", which allows filtering folds based on the number of elements they capture. Args: graphql_schema: GraphQLSchema object describing the schema that is going to be used with the compiler. N.B.: MUTATED IN-PLACE in this method. """ root_type_name = graphql_schema.get_query_type().name for type_name, type_obj in six.iteritems(graphql_schema.get_type_map()): if type_name.startswith('__') or type_name == root_type_name: # Ignore the types that are built into GraphQL itself, as well as the root query type. continue if not isinstance(type_obj, (GraphQLObjectType, GraphQLInterfaceType)): # Ignore definitions that are not interfaces or types. continue for meta_field_name, meta_field in six.iteritems(EXTENDED_META_FIELD_DEFINITIONS): if meta_field_name in type_obj.fields: raise AssertionError(u'Unexpectedly encountered an existing field named {} while ' u'attempting to add a meta-field of the same name. Make sure ' u'you are not attempting to add meta-fields twice.' .format(meta_field_name)) type_obj.fields[meta_field_name] = meta_field
python
def insert_meta_fields_into_existing_schema(graphql_schema): root_type_name = graphql_schema.get_query_type().name for type_name, type_obj in six.iteritems(graphql_schema.get_type_map()): if type_name.startswith('__') or type_name == root_type_name: # Ignore the types that are built into GraphQL itself, as well as the root query type. continue if not isinstance(type_obj, (GraphQLObjectType, GraphQLInterfaceType)): # Ignore definitions that are not interfaces or types. continue for meta_field_name, meta_field in six.iteritems(EXTENDED_META_FIELD_DEFINITIONS): if meta_field_name in type_obj.fields: raise AssertionError(u'Unexpectedly encountered an existing field named {} while ' u'attempting to add a meta-field of the same name. Make sure ' u'you are not attempting to add meta-fields twice.' .format(meta_field_name)) type_obj.fields[meta_field_name] = meta_field
[ "def", "insert_meta_fields_into_existing_schema", "(", "graphql_schema", ")", ":", "root_type_name", "=", "graphql_schema", ".", "get_query_type", "(", ")", ".", "name", "for", "type_name", ",", "type_obj", "in", "six", ".", "iteritems", "(", "graphql_schema", ".", "get_type_map", "(", ")", ")", ":", "if", "type_name", ".", "startswith", "(", "'__'", ")", "or", "type_name", "==", "root_type_name", ":", "# Ignore the types that are built into GraphQL itself, as well as the root query type.", "continue", "if", "not", "isinstance", "(", "type_obj", ",", "(", "GraphQLObjectType", ",", "GraphQLInterfaceType", ")", ")", ":", "# Ignore definitions that are not interfaces or types.", "continue", "for", "meta_field_name", ",", "meta_field", "in", "six", ".", "iteritems", "(", "EXTENDED_META_FIELD_DEFINITIONS", ")", ":", "if", "meta_field_name", "in", "type_obj", ".", "fields", ":", "raise", "AssertionError", "(", "u'Unexpectedly encountered an existing field named {} while '", "u'attempting to add a meta-field of the same name. Make sure '", "u'you are not attempting to add meta-fields twice.'", ".", "format", "(", "meta_field_name", ")", ")", "type_obj", ".", "fields", "[", "meta_field_name", "]", "=", "meta_field" ]
Add compiler-specific meta-fields into all interfaces and types of the specified schema. It is preferable to use the EXTENDED_META_FIELD_DEFINITIONS constant above to directly inject the meta-fields during the initial process of building the schema, as that approach is more robust. This function does its best to not mutate unexpected definitions, but may break unexpectedly as the GraphQL standard is extended and the underlying GraphQL library is updated. Use this function at your own risk. Don't say you haven't been warned. Properties added include: - "_x_count", which allows filtering folds based on the number of elements they capture. Args: graphql_schema: GraphQLSchema object describing the schema that is going to be used with the compiler. N.B.: MUTATED IN-PLACE in this method.
[ "Add", "compiler", "-", "specific", "meta", "-", "fields", "into", "all", "interfaces", "and", "types", "of", "the", "specified", "schema", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema.py#L302-L338
246,775
kensho-technologies/graphql-compiler
graphql_compiler/compiler/context_helpers.py
validate_context_for_visiting_vertex_field
def validate_context_for_visiting_vertex_field(parent_location, vertex_field_name, context): """Ensure that the current context allows for visiting a vertex field.""" if is_in_fold_innermost_scope(context): raise GraphQLCompilationError( u'Traversing inside a @fold block after filtering on {} or outputting fields ' u'is not supported! Parent location: {}, vertex field name: {}' .format(COUNT_META_FIELD_NAME, parent_location, vertex_field_name))
python
def validate_context_for_visiting_vertex_field(parent_location, vertex_field_name, context): if is_in_fold_innermost_scope(context): raise GraphQLCompilationError( u'Traversing inside a @fold block after filtering on {} or outputting fields ' u'is not supported! Parent location: {}, vertex field name: {}' .format(COUNT_META_FIELD_NAME, parent_location, vertex_field_name))
[ "def", "validate_context_for_visiting_vertex_field", "(", "parent_location", ",", "vertex_field_name", ",", "context", ")", ":", "if", "is_in_fold_innermost_scope", "(", "context", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Traversing inside a @fold block after filtering on {} or outputting fields '", "u'is not supported! Parent location: {}, vertex field name: {}'", ".", "format", "(", "COUNT_META_FIELD_NAME", ",", "parent_location", ",", "vertex_field_name", ")", ")" ]
Ensure that the current context allows for visiting a vertex field.
[ "Ensure", "that", "the", "current", "context", "allows", "for", "visiting", "a", "vertex", "field", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/context_helpers.py#L95-L101
246,776
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/graphql_formatting.py
pretty_print_graphql
def pretty_print_graphql(query, use_four_spaces=True): """Take a GraphQL query, pretty print it, and return it.""" # Use our custom visitor, which fixes directive argument order # to get the canonical representation output = visit(parse(query), CustomPrintingVisitor()) # Using four spaces for indentation makes it easier to edit in # Python source files. if use_four_spaces: return fix_indentation_depth(output) return output
python
def pretty_print_graphql(query, use_four_spaces=True): # Use our custom visitor, which fixes directive argument order # to get the canonical representation output = visit(parse(query), CustomPrintingVisitor()) # Using four spaces for indentation makes it easier to edit in # Python source files. if use_four_spaces: return fix_indentation_depth(output) return output
[ "def", "pretty_print_graphql", "(", "query", ",", "use_four_spaces", "=", "True", ")", ":", "# Use our custom visitor, which fixes directive argument order", "# to get the canonical representation", "output", "=", "visit", "(", "parse", "(", "query", ")", ",", "CustomPrintingVisitor", "(", ")", ")", "# Using four spaces for indentation makes it easier to edit in", "# Python source files.", "if", "use_four_spaces", ":", "return", "fix_indentation_depth", "(", "output", ")", "return", "output" ]
Take a GraphQL query, pretty print it, and return it.
[ "Take", "a", "GraphQL", "query", "pretty", "print", "it", "and", "return", "it", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/graphql_formatting.py#L10-L20
246,777
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/graphql_formatting.py
fix_indentation_depth
def fix_indentation_depth(query): """Make indentation use 4 spaces, rather than the 2 spaces GraphQL normally uses.""" lines = query.split('\n') final_lines = [] for line in lines: consecutive_spaces = 0 for char in line: if char == ' ': consecutive_spaces += 1 else: break if consecutive_spaces % 2 != 0: raise AssertionError(u'Indentation was not a multiple of two: ' u'{}'.format(consecutive_spaces)) final_lines.append((' ' * consecutive_spaces) + line[consecutive_spaces:]) return '\n'.join(final_lines)
python
def fix_indentation_depth(query): lines = query.split('\n') final_lines = [] for line in lines: consecutive_spaces = 0 for char in line: if char == ' ': consecutive_spaces += 1 else: break if consecutive_spaces % 2 != 0: raise AssertionError(u'Indentation was not a multiple of two: ' u'{}'.format(consecutive_spaces)) final_lines.append((' ' * consecutive_spaces) + line[consecutive_spaces:]) return '\n'.join(final_lines)
[ "def", "fix_indentation_depth", "(", "query", ")", ":", "lines", "=", "query", ".", "split", "(", "'\\n'", ")", "final_lines", "=", "[", "]", "for", "line", "in", "lines", ":", "consecutive_spaces", "=", "0", "for", "char", "in", "line", ":", "if", "char", "==", "' '", ":", "consecutive_spaces", "+=", "1", "else", ":", "break", "if", "consecutive_spaces", "%", "2", "!=", "0", ":", "raise", "AssertionError", "(", "u'Indentation was not a multiple of two: '", "u'{}'", ".", "format", "(", "consecutive_spaces", ")", ")", "final_lines", ".", "append", "(", "(", "' '", "*", "consecutive_spaces", ")", "+", "line", "[", "consecutive_spaces", ":", "]", ")", "return", "'\\n'", ".", "join", "(", "final_lines", ")" ]
Make indentation use 4 spaces, rather than the 2 spaces GraphQL normally uses.
[ "Make", "indentation", "use", "4", "spaces", "rather", "than", "the", "2", "spaces", "GraphQL", "normally", "uses", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/graphql_formatting.py#L67-L86
246,778
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/graphql_formatting.py
CustomPrintingVisitor.leave_Directive
def leave_Directive(self, node, *args): """Call when exiting a directive node in the ast.""" name_to_arg_value = { # Taking [0] is ok here because the GraphQL parser checks for the # existence of ':' in directive arguments. arg.split(':', 1)[0]: arg for arg in node.arguments } ordered_args = node.arguments directive = DIRECTIVES_BY_NAME.get(node.name) if directive: sorted_args = [] encountered_argument_names = set() # Iterate through all defined arguments in the directive schema. for defined_arg_name in six.iterkeys(directive.args): if defined_arg_name in name_to_arg_value: # The argument was present in the query, print it in the correct order. encountered_argument_names.add(defined_arg_name) sorted_args.append(name_to_arg_value[defined_arg_name]) # Get all the arguments that weren't defined in the directive schema. # They will be printed after all the arguments that were in the schema. unsorted_args = [ value for name, value in six.iteritems(name_to_arg_value) if name not in encountered_argument_names ] ordered_args = sorted_args + unsorted_args return '@' + node.name + wrap('(', join(ordered_args, ', '), ')')
python
def leave_Directive(self, node, *args): name_to_arg_value = { # Taking [0] is ok here because the GraphQL parser checks for the # existence of ':' in directive arguments. arg.split(':', 1)[0]: arg for arg in node.arguments } ordered_args = node.arguments directive = DIRECTIVES_BY_NAME.get(node.name) if directive: sorted_args = [] encountered_argument_names = set() # Iterate through all defined arguments in the directive schema. for defined_arg_name in six.iterkeys(directive.args): if defined_arg_name in name_to_arg_value: # The argument was present in the query, print it in the correct order. encountered_argument_names.add(defined_arg_name) sorted_args.append(name_to_arg_value[defined_arg_name]) # Get all the arguments that weren't defined in the directive schema. # They will be printed after all the arguments that were in the schema. unsorted_args = [ value for name, value in six.iteritems(name_to_arg_value) if name not in encountered_argument_names ] ordered_args = sorted_args + unsorted_args return '@' + node.name + wrap('(', join(ordered_args, ', '), ')')
[ "def", "leave_Directive", "(", "self", ",", "node", ",", "*", "args", ")", ":", "name_to_arg_value", "=", "{", "# Taking [0] is ok here because the GraphQL parser checks for the", "# existence of ':' in directive arguments.", "arg", ".", "split", "(", "':'", ",", "1", ")", "[", "0", "]", ":", "arg", "for", "arg", "in", "node", ".", "arguments", "}", "ordered_args", "=", "node", ".", "arguments", "directive", "=", "DIRECTIVES_BY_NAME", ".", "get", "(", "node", ".", "name", ")", "if", "directive", ":", "sorted_args", "=", "[", "]", "encountered_argument_names", "=", "set", "(", ")", "# Iterate through all defined arguments in the directive schema.", "for", "defined_arg_name", "in", "six", ".", "iterkeys", "(", "directive", ".", "args", ")", ":", "if", "defined_arg_name", "in", "name_to_arg_value", ":", "# The argument was present in the query, print it in the correct order.", "encountered_argument_names", ".", "add", "(", "defined_arg_name", ")", "sorted_args", ".", "append", "(", "name_to_arg_value", "[", "defined_arg_name", "]", ")", "# Get all the arguments that weren't defined in the directive schema.", "# They will be printed after all the arguments that were in the schema.", "unsorted_args", "=", "[", "value", "for", "name", ",", "value", "in", "six", ".", "iteritems", "(", "name_to_arg_value", ")", "if", "name", "not", "in", "encountered_argument_names", "]", "ordered_args", "=", "sorted_args", "+", "unsorted_args", "return", "'@'", "+", "node", ".", "name", "+", "wrap", "(", "'('", ",", "join", "(", "ordered_args", ",", "', '", ")", ",", "')'", ")" ]
Call when exiting a directive node in the ast.
[ "Call", "when", "exiting", "a", "directive", "node", "in", "the", "ast", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/graphql_formatting.py#L32-L64
246,779
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/__init__.py
lower_ir
def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None): """Lower the IR into an IR form that can be represented in MATCH queries. Args: ir_blocks: list of IR blocks to lower into MATCH-compatible form query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: MatchQuery object containing the IR blocks organized in a MATCH-like structure """ sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table) # Construct the mapping of each location to its corresponding GraphQL type. location_types = { location: location_info.type for location, location_info in query_metadata_table.registered_locations } # Compute the set of all locations that have associated type coercions. coerced_locations = { location for location, location_info in query_metadata_table.registered_locations if location_info.coerced_from_type is not None } # Extract information for both simple and complex @optional traverses location_to_optional_results = extract_optional_location_root_info(ir_blocks) complex_optional_roots, location_to_optional_roots = location_to_optional_results simple_optional_root_info = extract_simple_optional_location_info( ir_blocks, complex_optional_roots, location_to_optional_roots) ir_blocks = remove_end_optionals(ir_blocks) # Append global operation block(s) to filter out incorrect results # from simple optional match traverses (using a WHERE statement) if len(simple_optional_root_info) > 0: where_filter_predicate = construct_where_filter_predicate( query_metadata_table, simple_optional_root_info) ir_blocks.insert(-1, GlobalOperationsStart()) ir_blocks.insert(-1, Filter(where_filter_predicate)) # These lowering / optimization passes work on IR blocks. ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table) ir_blocks = optimize_boolean_expression_comparisons(ir_blocks) ir_blocks = rewrite_binary_composition_inside_ternary_conditional(ir_blocks) ir_blocks = merge_consecutive_filter_clauses(ir_blocks) ir_blocks = lower_has_substring_binary_compositions(ir_blocks) ir_blocks = orientdb_eval_scheduling.workaround_lowering_pass(ir_blocks, query_metadata_table) # Here, we lower from raw IR blocks into a MatchQuery object. # From this point on, the lowering / optimization passes work on the MatchQuery representation. match_query = convert_to_match_query(ir_blocks) match_query = lower_comparisons_to_between(match_query) match_query = lower_backtrack_blocks(match_query, location_types) match_query = truncate_repeated_single_step_traversals(match_query) match_query = orientdb_class_with_while.workaround_type_coercions_in_recursions(match_query) # Optimize and lower the IR blocks inside @fold scopes. new_folds = { key: merge_consecutive_filter_clauses( remove_backtrack_blocks_from_fold( lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks) ) ) for key, folded_ir_blocks in six.iteritems(match_query.folds) } match_query = match_query._replace(folds=new_folds) compound_match_query = convert_optional_traversals_to_compound_match_query( match_query, complex_optional_roots, location_to_optional_roots) compound_match_query = prune_non_existent_outputs(compound_match_query) compound_match_query = collect_filters_to_first_location_occurrence(compound_match_query) compound_match_query = lower_context_field_expressions(compound_match_query) compound_match_query = truncate_repeated_single_step_traversals_in_sub_queries( compound_match_query) compound_match_query = orientdb_query_execution.expose_ideal_query_execution_start_points( compound_match_query, location_types, coerced_locations) return compound_match_query
python
def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None): sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table) # Construct the mapping of each location to its corresponding GraphQL type. location_types = { location: location_info.type for location, location_info in query_metadata_table.registered_locations } # Compute the set of all locations that have associated type coercions. coerced_locations = { location for location, location_info in query_metadata_table.registered_locations if location_info.coerced_from_type is not None } # Extract information for both simple and complex @optional traverses location_to_optional_results = extract_optional_location_root_info(ir_blocks) complex_optional_roots, location_to_optional_roots = location_to_optional_results simple_optional_root_info = extract_simple_optional_location_info( ir_blocks, complex_optional_roots, location_to_optional_roots) ir_blocks = remove_end_optionals(ir_blocks) # Append global operation block(s) to filter out incorrect results # from simple optional match traverses (using a WHERE statement) if len(simple_optional_root_info) > 0: where_filter_predicate = construct_where_filter_predicate( query_metadata_table, simple_optional_root_info) ir_blocks.insert(-1, GlobalOperationsStart()) ir_blocks.insert(-1, Filter(where_filter_predicate)) # These lowering / optimization passes work on IR blocks. ir_blocks = lower_context_field_existence(ir_blocks, query_metadata_table) ir_blocks = optimize_boolean_expression_comparisons(ir_blocks) ir_blocks = rewrite_binary_composition_inside_ternary_conditional(ir_blocks) ir_blocks = merge_consecutive_filter_clauses(ir_blocks) ir_blocks = lower_has_substring_binary_compositions(ir_blocks) ir_blocks = orientdb_eval_scheduling.workaround_lowering_pass(ir_blocks, query_metadata_table) # Here, we lower from raw IR blocks into a MatchQuery object. # From this point on, the lowering / optimization passes work on the MatchQuery representation. match_query = convert_to_match_query(ir_blocks) match_query = lower_comparisons_to_between(match_query) match_query = lower_backtrack_blocks(match_query, location_types) match_query = truncate_repeated_single_step_traversals(match_query) match_query = orientdb_class_with_while.workaround_type_coercions_in_recursions(match_query) # Optimize and lower the IR blocks inside @fold scopes. new_folds = { key: merge_consecutive_filter_clauses( remove_backtrack_blocks_from_fold( lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks) ) ) for key, folded_ir_blocks in six.iteritems(match_query.folds) } match_query = match_query._replace(folds=new_folds) compound_match_query = convert_optional_traversals_to_compound_match_query( match_query, complex_optional_roots, location_to_optional_roots) compound_match_query = prune_non_existent_outputs(compound_match_query) compound_match_query = collect_filters_to_first_location_occurrence(compound_match_query) compound_match_query = lower_context_field_expressions(compound_match_query) compound_match_query = truncate_repeated_single_step_traversals_in_sub_queries( compound_match_query) compound_match_query = orientdb_query_execution.expose_ideal_query_execution_start_points( compound_match_query, location_types, coerced_locations) return compound_match_query
[ "def", "lower_ir", "(", "ir_blocks", ",", "query_metadata_table", ",", "type_equivalence_hints", "=", "None", ")", ":", "sanity_check_ir_blocks_from_frontend", "(", "ir_blocks", ",", "query_metadata_table", ")", "# Construct the mapping of each location to its corresponding GraphQL type.", "location_types", "=", "{", "location", ":", "location_info", ".", "type", "for", "location", ",", "location_info", "in", "query_metadata_table", ".", "registered_locations", "}", "# Compute the set of all locations that have associated type coercions.", "coerced_locations", "=", "{", "location", "for", "location", ",", "location_info", "in", "query_metadata_table", ".", "registered_locations", "if", "location_info", ".", "coerced_from_type", "is", "not", "None", "}", "# Extract information for both simple and complex @optional traverses", "location_to_optional_results", "=", "extract_optional_location_root_info", "(", "ir_blocks", ")", "complex_optional_roots", ",", "location_to_optional_roots", "=", "location_to_optional_results", "simple_optional_root_info", "=", "extract_simple_optional_location_info", "(", "ir_blocks", ",", "complex_optional_roots", ",", "location_to_optional_roots", ")", "ir_blocks", "=", "remove_end_optionals", "(", "ir_blocks", ")", "# Append global operation block(s) to filter out incorrect results", "# from simple optional match traverses (using a WHERE statement)", "if", "len", "(", "simple_optional_root_info", ")", ">", "0", ":", "where_filter_predicate", "=", "construct_where_filter_predicate", "(", "query_metadata_table", ",", "simple_optional_root_info", ")", "ir_blocks", ".", "insert", "(", "-", "1", ",", "GlobalOperationsStart", "(", ")", ")", "ir_blocks", ".", "insert", "(", "-", "1", ",", "Filter", "(", "where_filter_predicate", ")", ")", "# These lowering / optimization passes work on IR blocks.", "ir_blocks", "=", "lower_context_field_existence", "(", "ir_blocks", ",", "query_metadata_table", ")", "ir_blocks", "=", "optimize_boolean_expression_comparisons", "(", "ir_blocks", ")", "ir_blocks", "=", "rewrite_binary_composition_inside_ternary_conditional", "(", "ir_blocks", ")", "ir_blocks", "=", "merge_consecutive_filter_clauses", "(", "ir_blocks", ")", "ir_blocks", "=", "lower_has_substring_binary_compositions", "(", "ir_blocks", ")", "ir_blocks", "=", "orientdb_eval_scheduling", ".", "workaround_lowering_pass", "(", "ir_blocks", ",", "query_metadata_table", ")", "# Here, we lower from raw IR blocks into a MatchQuery object.", "# From this point on, the lowering / optimization passes work on the MatchQuery representation.", "match_query", "=", "convert_to_match_query", "(", "ir_blocks", ")", "match_query", "=", "lower_comparisons_to_between", "(", "match_query", ")", "match_query", "=", "lower_backtrack_blocks", "(", "match_query", ",", "location_types", ")", "match_query", "=", "truncate_repeated_single_step_traversals", "(", "match_query", ")", "match_query", "=", "orientdb_class_with_while", ".", "workaround_type_coercions_in_recursions", "(", "match_query", ")", "# Optimize and lower the IR blocks inside @fold scopes.", "new_folds", "=", "{", "key", ":", "merge_consecutive_filter_clauses", "(", "remove_backtrack_blocks_from_fold", "(", "lower_folded_coerce_types_into_filter_blocks", "(", "folded_ir_blocks", ")", ")", ")", "for", "key", ",", "folded_ir_blocks", "in", "six", ".", "iteritems", "(", "match_query", ".", "folds", ")", "}", "match_query", "=", "match_query", ".", "_replace", "(", "folds", "=", "new_folds", ")", "compound_match_query", "=", "convert_optional_traversals_to_compound_match_query", "(", "match_query", ",", "complex_optional_roots", ",", "location_to_optional_roots", ")", "compound_match_query", "=", "prune_non_existent_outputs", "(", "compound_match_query", ")", "compound_match_query", "=", "collect_filters_to_first_location_occurrence", "(", "compound_match_query", ")", "compound_match_query", "=", "lower_context_field_expressions", "(", "compound_match_query", ")", "compound_match_query", "=", "truncate_repeated_single_step_traversals_in_sub_queries", "(", "compound_match_query", ")", "compound_match_query", "=", "orientdb_query_execution", ".", "expose_ideal_query_execution_start_points", "(", "compound_match_query", ",", "location_types", ",", "coerced_locations", ")", "return", "compound_match_query" ]
Lower the IR into an IR form that can be represented in MATCH queries. Args: ir_blocks: list of IR blocks to lower into MATCH-compatible form query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: MatchQuery object containing the IR blocks organized in a MATCH-like structure
[ "Lower", "the", "IR", "into", "an", "IR", "form", "that", "can", "be", "represented", "in", "MATCH", "queries", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/__init__.py#L31-L128
246,780
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/utils.py
toposort_classes
def toposort_classes(classes): """Sort class metadatas so that a superclass is always before the subclass""" def get_class_topolist(class_name, name_to_class, processed_classes, current_trace): """Return a topologically sorted list of this class's dependencies and class itself Args: class_name: string, name of the class to process name_to_class: dict, class_name -> descriptor processed_classes: set of strings, a set of classes that have already been processed current_trace: list of strings, list of classes traversed during the recursion Returns: list of dicts, list of classes sorted in topological order """ # Check if this class has already been handled if class_name in processed_classes: return [] if class_name in current_trace: raise AssertionError( 'Encountered self-reference in dependency chain of {}'.format(class_name)) cls = name_to_class[class_name] # Collect the dependency classes # These are bases and classes from linked properties dependencies = _list_superclasses(cls) # Recursively process linked edges properties = cls['properties'] if 'properties' in cls else [] for prop in properties: if 'linkedClass' in prop: dependencies.append(prop['linkedClass']) class_list = [] # Recursively process superclasses current_trace.add(class_name) for dependency in dependencies: class_list.extend(get_class_topolist( dependency, name_to_class, processed_classes, current_trace)) current_trace.remove(class_name) # Do the bookkeeping class_list.append(name_to_class[class_name]) processed_classes.add(class_name) return class_list # Map names to classes class_map = {c['name']: c for c in classes} seen_classes = set() toposorted = [] for name in class_map.keys(): toposorted.extend(get_class_topolist(name, class_map, seen_classes, set())) return toposorted
python
def toposort_classes(classes): def get_class_topolist(class_name, name_to_class, processed_classes, current_trace): """Return a topologically sorted list of this class's dependencies and class itself Args: class_name: string, name of the class to process name_to_class: dict, class_name -> descriptor processed_classes: set of strings, a set of classes that have already been processed current_trace: list of strings, list of classes traversed during the recursion Returns: list of dicts, list of classes sorted in topological order """ # Check if this class has already been handled if class_name in processed_classes: return [] if class_name in current_trace: raise AssertionError( 'Encountered self-reference in dependency chain of {}'.format(class_name)) cls = name_to_class[class_name] # Collect the dependency classes # These are bases and classes from linked properties dependencies = _list_superclasses(cls) # Recursively process linked edges properties = cls['properties'] if 'properties' in cls else [] for prop in properties: if 'linkedClass' in prop: dependencies.append(prop['linkedClass']) class_list = [] # Recursively process superclasses current_trace.add(class_name) for dependency in dependencies: class_list.extend(get_class_topolist( dependency, name_to_class, processed_classes, current_trace)) current_trace.remove(class_name) # Do the bookkeeping class_list.append(name_to_class[class_name]) processed_classes.add(class_name) return class_list # Map names to classes class_map = {c['name']: c for c in classes} seen_classes = set() toposorted = [] for name in class_map.keys(): toposorted.extend(get_class_topolist(name, class_map, seen_classes, set())) return toposorted
[ "def", "toposort_classes", "(", "classes", ")", ":", "def", "get_class_topolist", "(", "class_name", ",", "name_to_class", ",", "processed_classes", ",", "current_trace", ")", ":", "\"\"\"Return a topologically sorted list of this class's dependencies and class itself\n\n Args:\n class_name: string, name of the class to process\n name_to_class: dict, class_name -> descriptor\n processed_classes: set of strings, a set of classes that have already been processed\n current_trace: list of strings, list of classes traversed during the recursion\n\n Returns:\n list of dicts, list of classes sorted in topological order\n \"\"\"", "# Check if this class has already been handled", "if", "class_name", "in", "processed_classes", ":", "return", "[", "]", "if", "class_name", "in", "current_trace", ":", "raise", "AssertionError", "(", "'Encountered self-reference in dependency chain of {}'", ".", "format", "(", "class_name", ")", ")", "cls", "=", "name_to_class", "[", "class_name", "]", "# Collect the dependency classes", "# These are bases and classes from linked properties", "dependencies", "=", "_list_superclasses", "(", "cls", ")", "# Recursively process linked edges", "properties", "=", "cls", "[", "'properties'", "]", "if", "'properties'", "in", "cls", "else", "[", "]", "for", "prop", "in", "properties", ":", "if", "'linkedClass'", "in", "prop", ":", "dependencies", ".", "append", "(", "prop", "[", "'linkedClass'", "]", ")", "class_list", "=", "[", "]", "# Recursively process superclasses", "current_trace", ".", "add", "(", "class_name", ")", "for", "dependency", "in", "dependencies", ":", "class_list", ".", "extend", "(", "get_class_topolist", "(", "dependency", ",", "name_to_class", ",", "processed_classes", ",", "current_trace", ")", ")", "current_trace", ".", "remove", "(", "class_name", ")", "# Do the bookkeeping", "class_list", ".", "append", "(", "name_to_class", "[", "class_name", "]", ")", "processed_classes", ".", "add", "(", "class_name", ")", "return", "class_list", "# Map names to classes", "class_map", "=", "{", "c", "[", "'name'", "]", ":", "c", "for", "c", "in", "classes", "}", "seen_classes", "=", "set", "(", ")", "toposorted", "=", "[", "]", "for", "name", "in", "class_map", ".", "keys", "(", ")", ":", "toposorted", ".", "extend", "(", "get_class_topolist", "(", "name", ",", "class_map", ",", "seen_classes", ",", "set", "(", ")", ")", ")", "return", "toposorted" ]
Sort class metadatas so that a superclass is always before the subclass
[ "Sort", "class", "metadatas", "so", "that", "a", "superclass", "is", "always", "before", "the", "subclass" ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/utils.py#L11-L63
246,781
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/utils.py
_list_superclasses
def _list_superclasses(class_def): """Return a list of the superclasses of the given class""" superclasses = class_def.get('superClasses', []) if superclasses: # Make sure to duplicate the list return list(superclasses) sup = class_def.get('superClass', None) if sup: return [sup] else: return []
python
def _list_superclasses(class_def): superclasses = class_def.get('superClasses', []) if superclasses: # Make sure to duplicate the list return list(superclasses) sup = class_def.get('superClass', None) if sup: return [sup] else: return []
[ "def", "_list_superclasses", "(", "class_def", ")", ":", "superclasses", "=", "class_def", ".", "get", "(", "'superClasses'", ",", "[", "]", ")", "if", "superclasses", ":", "# Make sure to duplicate the list", "return", "list", "(", "superclasses", ")", "sup", "=", "class_def", ".", "get", "(", "'superClass'", ",", "None", ")", "if", "sup", ":", "return", "[", "sup", "]", "else", ":", "return", "[", "]" ]
Return a list of the superclasses of the given class
[ "Return", "a", "list", "of", "the", "superclasses", "of", "the", "given", "class" ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/utils.py#L66-L77
246,782
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_construct_location_stack_entry
def _construct_location_stack_entry(location, num_traverses): """Return a LocationStackEntry namedtuple with the specified parameters.""" if not isinstance(num_traverses, int) or num_traverses < 0: raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid ' u'value for "num_traverses" {}. This is not allowed.' .format(num_traverses)) if not isinstance(location, Location): raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid ' u'value for "location" {}. This is not allowed.' .format(location)) return LocationStackEntry(location=location, num_traverses=num_traverses)
python
def _construct_location_stack_entry(location, num_traverses): if not isinstance(num_traverses, int) or num_traverses < 0: raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid ' u'value for "num_traverses" {}. This is not allowed.' .format(num_traverses)) if not isinstance(location, Location): raise AssertionError(u'Attempted to create a LocationStackEntry namedtuple with an invalid ' u'value for "location" {}. This is not allowed.' .format(location)) return LocationStackEntry(location=location, num_traverses=num_traverses)
[ "def", "_construct_location_stack_entry", "(", "location", ",", "num_traverses", ")", ":", "if", "not", "isinstance", "(", "num_traverses", ",", "int", ")", "or", "num_traverses", "<", "0", ":", "raise", "AssertionError", "(", "u'Attempted to create a LocationStackEntry namedtuple with an invalid '", "u'value for \"num_traverses\" {}. This is not allowed.'", ".", "format", "(", "num_traverses", ")", ")", "if", "not", "isinstance", "(", "location", ",", "Location", ")", ":", "raise", "AssertionError", "(", "u'Attempted to create a LocationStackEntry namedtuple with an invalid '", "u'value for \"location\" {}. This is not allowed.'", ".", "format", "(", "location", ")", ")", "return", "LocationStackEntry", "(", "location", "=", "location", ",", "num_traverses", "=", "num_traverses", ")" ]
Return a LocationStackEntry namedtuple with the specified parameters.
[ "Return", "a", "LocationStackEntry", "namedtuple", "with", "the", "specified", "parameters", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L103-L113
246,783
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_get_fields
def _get_fields(ast): """Return a list of vertex fields, and a list of property fields, for the given AST node. Also verifies that all property fields for the AST node appear before all vertex fields, raising GraphQLCompilationError if that is not the case. Args: ast: GraphQL AST node, obtained from the graphql library Returns: tuple of two lists - the first list contains ASTs for vertex fields - the second list contains ASTs for property fields """ if not ast.selection_set: # There are no child fields. return [], [] property_fields = [] vertex_fields = [] seen_field_names = set() switched_to_vertices = False # Ensures that all property fields are before all vertex fields. for field_ast in ast.selection_set.selections: if not isinstance(field_ast, Field): # We are getting Fields only, ignore everything else. continue name = get_ast_field_name(field_ast) if name in seen_field_names: # If we ever allow repeated field names, # then we have to change the Location naming scheme to reflect the repetitions # and disambiguate between Recurse and Traverse visits to a Location. raise GraphQLCompilationError(u'Encountered repeated field name: {}'.format(name)) seen_field_names.add(name) # Vertex fields start with 'out_' or 'in_', denoting the edge direction to that vertex. if is_vertex_field_name(name): switched_to_vertices = True vertex_fields.append(field_ast) else: if switched_to_vertices: raise GraphQLCompilationError(u'Encountered property field {} ' u'after vertex fields!'.format(name)) property_fields.append(field_ast) return vertex_fields, property_fields
python
def _get_fields(ast): if not ast.selection_set: # There are no child fields. return [], [] property_fields = [] vertex_fields = [] seen_field_names = set() switched_to_vertices = False # Ensures that all property fields are before all vertex fields. for field_ast in ast.selection_set.selections: if not isinstance(field_ast, Field): # We are getting Fields only, ignore everything else. continue name = get_ast_field_name(field_ast) if name in seen_field_names: # If we ever allow repeated field names, # then we have to change the Location naming scheme to reflect the repetitions # and disambiguate between Recurse and Traverse visits to a Location. raise GraphQLCompilationError(u'Encountered repeated field name: {}'.format(name)) seen_field_names.add(name) # Vertex fields start with 'out_' or 'in_', denoting the edge direction to that vertex. if is_vertex_field_name(name): switched_to_vertices = True vertex_fields.append(field_ast) else: if switched_to_vertices: raise GraphQLCompilationError(u'Encountered property field {} ' u'after vertex fields!'.format(name)) property_fields.append(field_ast) return vertex_fields, property_fields
[ "def", "_get_fields", "(", "ast", ")", ":", "if", "not", "ast", ".", "selection_set", ":", "# There are no child fields.", "return", "[", "]", ",", "[", "]", "property_fields", "=", "[", "]", "vertex_fields", "=", "[", "]", "seen_field_names", "=", "set", "(", ")", "switched_to_vertices", "=", "False", "# Ensures that all property fields are before all vertex fields.", "for", "field_ast", "in", "ast", ".", "selection_set", ".", "selections", ":", "if", "not", "isinstance", "(", "field_ast", ",", "Field", ")", ":", "# We are getting Fields only, ignore everything else.", "continue", "name", "=", "get_ast_field_name", "(", "field_ast", ")", "if", "name", "in", "seen_field_names", ":", "# If we ever allow repeated field names,", "# then we have to change the Location naming scheme to reflect the repetitions", "# and disambiguate between Recurse and Traverse visits to a Location.", "raise", "GraphQLCompilationError", "(", "u'Encountered repeated field name: {}'", ".", "format", "(", "name", ")", ")", "seen_field_names", ".", "add", "(", "name", ")", "# Vertex fields start with 'out_' or 'in_', denoting the edge direction to that vertex.", "if", "is_vertex_field_name", "(", "name", ")", ":", "switched_to_vertices", "=", "True", "vertex_fields", ".", "append", "(", "field_ast", ")", "else", ":", "if", "switched_to_vertices", ":", "raise", "GraphQLCompilationError", "(", "u'Encountered property field {} '", "u'after vertex fields!'", ".", "format", "(", "name", ")", ")", "property_fields", ".", "append", "(", "field_ast", ")", "return", "vertex_fields", ",", "property_fields" ]
Return a list of vertex fields, and a list of property fields, for the given AST node. Also verifies that all property fields for the AST node appear before all vertex fields, raising GraphQLCompilationError if that is not the case. Args: ast: GraphQL AST node, obtained from the graphql library Returns: tuple of two lists - the first list contains ASTs for vertex fields - the second list contains ASTs for property fields
[ "Return", "a", "list", "of", "vertex", "fields", "and", "a", "list", "of", "property", "fields", "for", "the", "given", "AST", "node", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L142-L187
246,784
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_get_inline_fragment
def _get_inline_fragment(ast): """Return the inline fragment at the current AST node, or None if no fragment exists.""" if not ast.selection_set: # There is nothing selected here, so no fragment. return None fragments = [ ast_node for ast_node in ast.selection_set.selections if isinstance(ast_node, InlineFragment) ] if not fragments: return None if len(fragments) > 1: raise GraphQLCompilationError(u'Cannot compile GraphQL with more than one fragment in ' u'a given selection set.') return fragments[0]
python
def _get_inline_fragment(ast): if not ast.selection_set: # There is nothing selected here, so no fragment. return None fragments = [ ast_node for ast_node in ast.selection_set.selections if isinstance(ast_node, InlineFragment) ] if not fragments: return None if len(fragments) > 1: raise GraphQLCompilationError(u'Cannot compile GraphQL with more than one fragment in ' u'a given selection set.') return fragments[0]
[ "def", "_get_inline_fragment", "(", "ast", ")", ":", "if", "not", "ast", ".", "selection_set", ":", "# There is nothing selected here, so no fragment.", "return", "None", "fragments", "=", "[", "ast_node", "for", "ast_node", "in", "ast", ".", "selection_set", ".", "selections", "if", "isinstance", "(", "ast_node", ",", "InlineFragment", ")", "]", "if", "not", "fragments", ":", "return", "None", "if", "len", "(", "fragments", ")", ">", "1", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot compile GraphQL with more than one fragment in '", "u'a given selection set.'", ")", "return", "fragments", "[", "0", "]" ]
Return the inline fragment at the current AST node, or None if no fragment exists.
[ "Return", "the", "inline", "fragment", "at", "the", "current", "AST", "node", "or", "None", "if", "no", "fragment", "exists", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L190-L209
246,785
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_process_output_source_directive
def _process_output_source_directive(schema, current_schema_type, ast, location, context, local_unique_directives): """Process the output_source directive, modifying the context as appropriate. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! local_unique_directives: dict, directive name string -> directive object, containing unique directives present on the current AST node *only* Returns: an OutputSource block, if one should be emitted, or None otherwise """ # The 'ast' variable is only for function signature uniformity, and is currently not used. output_source_directive = local_unique_directives.get('output_source', None) if output_source_directive: if has_encountered_output_source(context): raise GraphQLCompilationError(u'Cannot have more than one output source!') if is_in_optional_scope(context): raise GraphQLCompilationError(u'Cannot have the output source in an optional block!') set_output_source_data(context, location) return blocks.OutputSource() else: return None
python
def _process_output_source_directive(schema, current_schema_type, ast, location, context, local_unique_directives): # The 'ast' variable is only for function signature uniformity, and is currently not used. output_source_directive = local_unique_directives.get('output_source', None) if output_source_directive: if has_encountered_output_source(context): raise GraphQLCompilationError(u'Cannot have more than one output source!') if is_in_optional_scope(context): raise GraphQLCompilationError(u'Cannot have the output source in an optional block!') set_output_source_data(context, location) return blocks.OutputSource() else: return None
[ "def", "_process_output_source_directive", "(", "schema", ",", "current_schema_type", ",", "ast", ",", "location", ",", "context", ",", "local_unique_directives", ")", ":", "# The 'ast' variable is only for function signature uniformity, and is currently not used.", "output_source_directive", "=", "local_unique_directives", ".", "get", "(", "'output_source'", ",", "None", ")", "if", "output_source_directive", ":", "if", "has_encountered_output_source", "(", "context", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot have more than one output source!'", ")", "if", "is_in_optional_scope", "(", "context", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot have the output source in an optional block!'", ")", "set_output_source_data", "(", "context", ",", "location", ")", "return", "blocks", ".", "OutputSource", "(", ")", "else", ":", "return", "None" ]
Process the output_source directive, modifying the context as appropriate. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! local_unique_directives: dict, directive name string -> directive object, containing unique directives present on the current AST node *only* Returns: an OutputSource block, if one should be emitted, or None otherwise
[ "Process", "the", "output_source", "directive", "modifying", "the", "context", "as", "appropriate", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L217-L244
246,786
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_compile_property_ast
def _compile_property_ast(schema, current_schema_type, ast, location, context, unique_local_directives): """Process property directives at this AST node, updating the query context as appropriate. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library. Only for function signature uniformity at the moment -- it is currently not used. location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! unique_local_directives: dict, directive name string -> directive object, containing unique directives present on the current AST node *only* """ validate_property_directives(unique_local_directives) if location.field == COUNT_META_FIELD_NAME: # Verify that uses of this field are within a @fold scope. if not is_in_fold_scope(context): raise GraphQLCompilationError(u'Cannot use the "{}" meta field when not within a @fold ' u'vertex field, as counting elements only makes sense ' u'in a fold. Location: {}' .format(COUNT_META_FIELD_NAME, location)) # step P-2: process property-only directives tag_directive = unique_local_directives.get('tag', None) if tag_directive: if is_in_fold_scope(context): raise GraphQLCompilationError(u'Tagging values within a @fold vertex field is ' u'not allowed! Location: {}'.format(location)) if location.field == COUNT_META_FIELD_NAME: raise AssertionError(u'Tags are prohibited within @fold, but unexpectedly found use of ' u'a tag on the {} meta field that is only allowed within a @fold!' u'Location: {}' .format(COUNT_META_FIELD_NAME, location)) # Schema validation has ensured that the fields below exist. tag_name = tag_directive.arguments[0].value.value if tag_name in context['tags']: raise GraphQLCompilationError(u'Cannot reuse tag name: {}'.format(tag_name)) validate_safe_string(tag_name) context['tags'][tag_name] = { 'location': location, 'optional': is_in_optional_scope(context), 'type': strip_non_null_from_type(current_schema_type), } context['metadata'].record_tag_info(tag_name, TagInfo(location=location)) output_directive = unique_local_directives.get('output', None) if output_directive: # Schema validation has ensured that the fields below exist. output_name = output_directive.arguments[0].value.value if output_name in context['outputs']: raise GraphQLCompilationError(u'Cannot reuse output name: ' u'{}, {}'.format(output_name, context)) validate_safe_string(output_name) validate_output_name(output_name) graphql_type = strip_non_null_from_type(current_schema_type) if is_in_fold_scope(context): # Fold outputs are only allowed at the last level of traversal. set_fold_innermost_scope(context) if location.field != COUNT_META_FIELD_NAME: graphql_type = GraphQLList(graphql_type) context['outputs'][output_name] = { 'location': location, 'optional': is_in_optional_scope(context), 'type': graphql_type, 'fold': context.get('fold', None), }
python
def _compile_property_ast(schema, current_schema_type, ast, location, context, unique_local_directives): validate_property_directives(unique_local_directives) if location.field == COUNT_META_FIELD_NAME: # Verify that uses of this field are within a @fold scope. if not is_in_fold_scope(context): raise GraphQLCompilationError(u'Cannot use the "{}" meta field when not within a @fold ' u'vertex field, as counting elements only makes sense ' u'in a fold. Location: {}' .format(COUNT_META_FIELD_NAME, location)) # step P-2: process property-only directives tag_directive = unique_local_directives.get('tag', None) if tag_directive: if is_in_fold_scope(context): raise GraphQLCompilationError(u'Tagging values within a @fold vertex field is ' u'not allowed! Location: {}'.format(location)) if location.field == COUNT_META_FIELD_NAME: raise AssertionError(u'Tags are prohibited within @fold, but unexpectedly found use of ' u'a tag on the {} meta field that is only allowed within a @fold!' u'Location: {}' .format(COUNT_META_FIELD_NAME, location)) # Schema validation has ensured that the fields below exist. tag_name = tag_directive.arguments[0].value.value if tag_name in context['tags']: raise GraphQLCompilationError(u'Cannot reuse tag name: {}'.format(tag_name)) validate_safe_string(tag_name) context['tags'][tag_name] = { 'location': location, 'optional': is_in_optional_scope(context), 'type': strip_non_null_from_type(current_schema_type), } context['metadata'].record_tag_info(tag_name, TagInfo(location=location)) output_directive = unique_local_directives.get('output', None) if output_directive: # Schema validation has ensured that the fields below exist. output_name = output_directive.arguments[0].value.value if output_name in context['outputs']: raise GraphQLCompilationError(u'Cannot reuse output name: ' u'{}, {}'.format(output_name, context)) validate_safe_string(output_name) validate_output_name(output_name) graphql_type = strip_non_null_from_type(current_schema_type) if is_in_fold_scope(context): # Fold outputs are only allowed at the last level of traversal. set_fold_innermost_scope(context) if location.field != COUNT_META_FIELD_NAME: graphql_type = GraphQLList(graphql_type) context['outputs'][output_name] = { 'location': location, 'optional': is_in_optional_scope(context), 'type': graphql_type, 'fold': context.get('fold', None), }
[ "def", "_compile_property_ast", "(", "schema", ",", "current_schema_type", ",", "ast", ",", "location", ",", "context", ",", "unique_local_directives", ")", ":", "validate_property_directives", "(", "unique_local_directives", ")", "if", "location", ".", "field", "==", "COUNT_META_FIELD_NAME", ":", "# Verify that uses of this field are within a @fold scope.", "if", "not", "is_in_fold_scope", "(", "context", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot use the \"{}\" meta field when not within a @fold '", "u'vertex field, as counting elements only makes sense '", "u'in a fold. Location: {}'", ".", "format", "(", "COUNT_META_FIELD_NAME", ",", "location", ")", ")", "# step P-2: process property-only directives", "tag_directive", "=", "unique_local_directives", ".", "get", "(", "'tag'", ",", "None", ")", "if", "tag_directive", ":", "if", "is_in_fold_scope", "(", "context", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Tagging values within a @fold vertex field is '", "u'not allowed! Location: {}'", ".", "format", "(", "location", ")", ")", "if", "location", ".", "field", "==", "COUNT_META_FIELD_NAME", ":", "raise", "AssertionError", "(", "u'Tags are prohibited within @fold, but unexpectedly found use of '", "u'a tag on the {} meta field that is only allowed within a @fold!'", "u'Location: {}'", ".", "format", "(", "COUNT_META_FIELD_NAME", ",", "location", ")", ")", "# Schema validation has ensured that the fields below exist.", "tag_name", "=", "tag_directive", ".", "arguments", "[", "0", "]", ".", "value", ".", "value", "if", "tag_name", "in", "context", "[", "'tags'", "]", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot reuse tag name: {}'", ".", "format", "(", "tag_name", ")", ")", "validate_safe_string", "(", "tag_name", ")", "context", "[", "'tags'", "]", "[", "tag_name", "]", "=", "{", "'location'", ":", "location", ",", "'optional'", ":", "is_in_optional_scope", "(", "context", ")", ",", "'type'", ":", "strip_non_null_from_type", "(", "current_schema_type", ")", ",", "}", "context", "[", "'metadata'", "]", ".", "record_tag_info", "(", "tag_name", ",", "TagInfo", "(", "location", "=", "location", ")", ")", "output_directive", "=", "unique_local_directives", ".", "get", "(", "'output'", ",", "None", ")", "if", "output_directive", ":", "# Schema validation has ensured that the fields below exist.", "output_name", "=", "output_directive", ".", "arguments", "[", "0", "]", ".", "value", ".", "value", "if", "output_name", "in", "context", "[", "'outputs'", "]", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot reuse output name: '", "u'{}, {}'", ".", "format", "(", "output_name", ",", "context", ")", ")", "validate_safe_string", "(", "output_name", ")", "validate_output_name", "(", "output_name", ")", "graphql_type", "=", "strip_non_null_from_type", "(", "current_schema_type", ")", "if", "is_in_fold_scope", "(", "context", ")", ":", "# Fold outputs are only allowed at the last level of traversal.", "set_fold_innermost_scope", "(", "context", ")", "if", "location", ".", "field", "!=", "COUNT_META_FIELD_NAME", ":", "graphql_type", "=", "GraphQLList", "(", "graphql_type", ")", "context", "[", "'outputs'", "]", "[", "output_name", "]", "=", "{", "'location'", ":", "location", ",", "'optional'", ":", "is_in_optional_scope", "(", "context", ")", ",", "'type'", ":", "graphql_type", ",", "'fold'", ":", "context", ".", "get", "(", "'fold'", ",", "None", ")", ",", "}" ]
Process property directives at this AST node, updating the query context as appropriate. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library. Only for function signature uniformity at the moment -- it is currently not used. location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! unique_local_directives: dict, directive name string -> directive object, containing unique directives present on the current AST node *only*
[ "Process", "property", "directives", "at", "this", "AST", "node", "updating", "the", "query", "context", "as", "appropriate", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L247-L320
246,787
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_get_recurse_directive_depth
def _get_recurse_directive_depth(field_name, field_directives): """Validate and return the depth parameter of the recurse directive.""" recurse_directive = field_directives['recurse'] optional_directive = field_directives.get('optional', None) if optional_directive: raise GraphQLCompilationError(u'Found both @optional and @recurse on ' u'the same vertex field: {}'.format(field_name)) recurse_args = get_uniquely_named_objects_by_name(recurse_directive.arguments) recurse_depth = int(recurse_args['depth'].value.value) if recurse_depth < 1: raise GraphQLCompilationError(u'Found recurse directive with disallowed depth: ' u'{}'.format(recurse_depth)) return recurse_depth
python
def _get_recurse_directive_depth(field_name, field_directives): recurse_directive = field_directives['recurse'] optional_directive = field_directives.get('optional', None) if optional_directive: raise GraphQLCompilationError(u'Found both @optional and @recurse on ' u'the same vertex field: {}'.format(field_name)) recurse_args = get_uniquely_named_objects_by_name(recurse_directive.arguments) recurse_depth = int(recurse_args['depth'].value.value) if recurse_depth < 1: raise GraphQLCompilationError(u'Found recurse directive with disallowed depth: ' u'{}'.format(recurse_depth)) return recurse_depth
[ "def", "_get_recurse_directive_depth", "(", "field_name", ",", "field_directives", ")", ":", "recurse_directive", "=", "field_directives", "[", "'recurse'", "]", "optional_directive", "=", "field_directives", ".", "get", "(", "'optional'", ",", "None", ")", "if", "optional_directive", ":", "raise", "GraphQLCompilationError", "(", "u'Found both @optional and @recurse on '", "u'the same vertex field: {}'", ".", "format", "(", "field_name", ")", ")", "recurse_args", "=", "get_uniquely_named_objects_by_name", "(", "recurse_directive", ".", "arguments", ")", "recurse_depth", "=", "int", "(", "recurse_args", "[", "'depth'", "]", ".", "value", ".", "value", ")", "if", "recurse_depth", "<", "1", ":", "raise", "GraphQLCompilationError", "(", "u'Found recurse directive with disallowed depth: '", "u'{}'", ".", "format", "(", "recurse_depth", ")", ")", "return", "recurse_depth" ]
Validate and return the depth parameter of the recurse directive.
[ "Validate", "and", "return", "the", "depth", "parameter", "of", "the", "recurse", "directive", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L323-L338
246,788
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_validate_recurse_directive_types
def _validate_recurse_directive_types(current_schema_type, field_schema_type, context): """Perform type checks on the enclosing type and the recursed type for a recurse directive. Args: current_schema_type: GraphQLType, the schema type at the current location field_schema_type: GraphQLType, the schema type at the inner scope context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! """ # Get the set of all allowed types in the current scope. type_hints = context['type_equivalence_hints'].get(field_schema_type) type_hints_inverse = context['type_equivalence_hints_inverse'].get(field_schema_type) allowed_current_types = {field_schema_type} if type_hints and isinstance(type_hints, GraphQLUnionType): allowed_current_types.update(type_hints.types) if type_hints_inverse and isinstance(type_hints_inverse, GraphQLUnionType): allowed_current_types.update(type_hints_inverse.types) # The current scope must be of the same type as the field scope, or an acceptable subtype. current_scope_is_allowed = current_schema_type in allowed_current_types is_implemented_interface = ( isinstance(field_schema_type, GraphQLInterfaceType) and isinstance(current_schema_type, GraphQLObjectType) and field_schema_type in current_schema_type.interfaces ) if not any((current_scope_is_allowed, is_implemented_interface)): raise GraphQLCompilationError(u'Edges expanded with a @recurse directive must either ' u'be of the same type as their enclosing scope, a supertype ' u'of the enclosing scope, or be of an interface type that is ' u'implemented by the type of their enclosing scope. ' u'Enclosing scope type: {}, edge type: ' u'{}'.format(current_schema_type, field_schema_type))
python
def _validate_recurse_directive_types(current_schema_type, field_schema_type, context): # Get the set of all allowed types in the current scope. type_hints = context['type_equivalence_hints'].get(field_schema_type) type_hints_inverse = context['type_equivalence_hints_inverse'].get(field_schema_type) allowed_current_types = {field_schema_type} if type_hints and isinstance(type_hints, GraphQLUnionType): allowed_current_types.update(type_hints.types) if type_hints_inverse and isinstance(type_hints_inverse, GraphQLUnionType): allowed_current_types.update(type_hints_inverse.types) # The current scope must be of the same type as the field scope, or an acceptable subtype. current_scope_is_allowed = current_schema_type in allowed_current_types is_implemented_interface = ( isinstance(field_schema_type, GraphQLInterfaceType) and isinstance(current_schema_type, GraphQLObjectType) and field_schema_type in current_schema_type.interfaces ) if not any((current_scope_is_allowed, is_implemented_interface)): raise GraphQLCompilationError(u'Edges expanded with a @recurse directive must either ' u'be of the same type as their enclosing scope, a supertype ' u'of the enclosing scope, or be of an interface type that is ' u'implemented by the type of their enclosing scope. ' u'Enclosing scope type: {}, edge type: ' u'{}'.format(current_schema_type, field_schema_type))
[ "def", "_validate_recurse_directive_types", "(", "current_schema_type", ",", "field_schema_type", ",", "context", ")", ":", "# Get the set of all allowed types in the current scope.", "type_hints", "=", "context", "[", "'type_equivalence_hints'", "]", ".", "get", "(", "field_schema_type", ")", "type_hints_inverse", "=", "context", "[", "'type_equivalence_hints_inverse'", "]", ".", "get", "(", "field_schema_type", ")", "allowed_current_types", "=", "{", "field_schema_type", "}", "if", "type_hints", "and", "isinstance", "(", "type_hints", ",", "GraphQLUnionType", ")", ":", "allowed_current_types", ".", "update", "(", "type_hints", ".", "types", ")", "if", "type_hints_inverse", "and", "isinstance", "(", "type_hints_inverse", ",", "GraphQLUnionType", ")", ":", "allowed_current_types", ".", "update", "(", "type_hints_inverse", ".", "types", ")", "# The current scope must be of the same type as the field scope, or an acceptable subtype.", "current_scope_is_allowed", "=", "current_schema_type", "in", "allowed_current_types", "is_implemented_interface", "=", "(", "isinstance", "(", "field_schema_type", ",", "GraphQLInterfaceType", ")", "and", "isinstance", "(", "current_schema_type", ",", "GraphQLObjectType", ")", "and", "field_schema_type", "in", "current_schema_type", ".", "interfaces", ")", "if", "not", "any", "(", "(", "current_scope_is_allowed", ",", "is_implemented_interface", ")", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Edges expanded with a @recurse directive must either '", "u'be of the same type as their enclosing scope, a supertype '", "u'of the enclosing scope, or be of an interface type that is '", "u'implemented by the type of their enclosing scope. '", "u'Enclosing scope type: {}, edge type: '", "u'{}'", ".", "format", "(", "current_schema_type", ",", "field_schema_type", ")", ")" ]
Perform type checks on the enclosing type and the recursed type for a recurse directive. Args: current_schema_type: GraphQLType, the schema type at the current location field_schema_type: GraphQLType, the schema type at the inner scope context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function!
[ "Perform", "type", "checks", "on", "the", "enclosing", "type", "and", "the", "recursed", "type", "for", "a", "recurse", "directive", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L341-L376
246,789
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_compile_fragment_ast
def _compile_fragment_ast(schema, current_schema_type, ast, location, context): """Return a list of basic blocks corresponding to the inline fragment at this AST node. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library. location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: list of basic blocks, the compiled output of the vertex AST node """ query_metadata_table = context['metadata'] # step F-2. Emit a type coercion block if appropriate, # then recurse into the fragment's selection. coerces_to_type_name = ast.type_condition.name.value coerces_to_type_obj = schema.get_type(coerces_to_type_name) basic_blocks = [] # Check if the coercion is necessary. # No coercion is necessary if coercing to the current type of the scope, # or if the scope is of union type, to the base type of the union as defined by # the type_equivalence_hints compilation parameter. is_same_type_as_scope = current_schema_type.is_same_type(coerces_to_type_obj) equivalent_union_type = context['type_equivalence_hints'].get(coerces_to_type_obj, None) is_base_type_of_union = ( isinstance(current_schema_type, GraphQLUnionType) and current_schema_type.is_same_type(equivalent_union_type) ) if not (is_same_type_as_scope or is_base_type_of_union): # Coercion is required. query_metadata_table.record_coercion_at_location(location, coerces_to_type_obj) basic_blocks.append(blocks.CoerceType({coerces_to_type_name})) inner_basic_blocks = _compile_ast_node_to_ir( schema, coerces_to_type_obj, ast, location, context) basic_blocks.extend(inner_basic_blocks) return basic_blocks
python
def _compile_fragment_ast(schema, current_schema_type, ast, location, context): query_metadata_table = context['metadata'] # step F-2. Emit a type coercion block if appropriate, # then recurse into the fragment's selection. coerces_to_type_name = ast.type_condition.name.value coerces_to_type_obj = schema.get_type(coerces_to_type_name) basic_blocks = [] # Check if the coercion is necessary. # No coercion is necessary if coercing to the current type of the scope, # or if the scope is of union type, to the base type of the union as defined by # the type_equivalence_hints compilation parameter. is_same_type_as_scope = current_schema_type.is_same_type(coerces_to_type_obj) equivalent_union_type = context['type_equivalence_hints'].get(coerces_to_type_obj, None) is_base_type_of_union = ( isinstance(current_schema_type, GraphQLUnionType) and current_schema_type.is_same_type(equivalent_union_type) ) if not (is_same_type_as_scope or is_base_type_of_union): # Coercion is required. query_metadata_table.record_coercion_at_location(location, coerces_to_type_obj) basic_blocks.append(blocks.CoerceType({coerces_to_type_name})) inner_basic_blocks = _compile_ast_node_to_ir( schema, coerces_to_type_obj, ast, location, context) basic_blocks.extend(inner_basic_blocks) return basic_blocks
[ "def", "_compile_fragment_ast", "(", "schema", ",", "current_schema_type", ",", "ast", ",", "location", ",", "context", ")", ":", "query_metadata_table", "=", "context", "[", "'metadata'", "]", "# step F-2. Emit a type coercion block if appropriate,", "# then recurse into the fragment's selection.", "coerces_to_type_name", "=", "ast", ".", "type_condition", ".", "name", ".", "value", "coerces_to_type_obj", "=", "schema", ".", "get_type", "(", "coerces_to_type_name", ")", "basic_blocks", "=", "[", "]", "# Check if the coercion is necessary.", "# No coercion is necessary if coercing to the current type of the scope,", "# or if the scope is of union type, to the base type of the union as defined by", "# the type_equivalence_hints compilation parameter.", "is_same_type_as_scope", "=", "current_schema_type", ".", "is_same_type", "(", "coerces_to_type_obj", ")", "equivalent_union_type", "=", "context", "[", "'type_equivalence_hints'", "]", ".", "get", "(", "coerces_to_type_obj", ",", "None", ")", "is_base_type_of_union", "=", "(", "isinstance", "(", "current_schema_type", ",", "GraphQLUnionType", ")", "and", "current_schema_type", ".", "is_same_type", "(", "equivalent_union_type", ")", ")", "if", "not", "(", "is_same_type_as_scope", "or", "is_base_type_of_union", ")", ":", "# Coercion is required.", "query_metadata_table", ".", "record_coercion_at_location", "(", "location", ",", "coerces_to_type_obj", ")", "basic_blocks", ".", "append", "(", "blocks", ".", "CoerceType", "(", "{", "coerces_to_type_name", "}", ")", ")", "inner_basic_blocks", "=", "_compile_ast_node_to_ir", "(", "schema", ",", "coerces_to_type_obj", ",", "ast", ",", "location", ",", "context", ")", "basic_blocks", ".", "extend", "(", "inner_basic_blocks", ")", "return", "basic_blocks" ]
Return a list of basic blocks corresponding to the inline fragment at this AST node. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: GraphQL AST node, obtained from the graphql library. location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: list of basic blocks, the compiled output of the vertex AST node
[ "Return", "a", "list", "of", "basic", "blocks", "corresponding", "to", "the", "inline", "fragment", "at", "this", "AST", "node", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L583-L626
246,790
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_compile_ast_node_to_ir
def _compile_ast_node_to_ir(schema, current_schema_type, ast, location, context): """Compile the given GraphQL AST node into a list of basic blocks. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: the current GraphQL AST node, obtained from the graphql library location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: list of basic blocks corresponding to this GraphQL AST node """ basic_blocks = [] # step 0: preprocessing local_unique_directives = get_unique_directives(ast) fields = _get_fields(ast) vertex_fields, property_fields = fields fragment = _get_inline_fragment(ast) filter_operations = get_local_filter_directives( ast, current_schema_type, vertex_fields) # We don't support type coercion while at the same time selecting fields. # Either there are no fields, or there is no fragment, otherwise we raise a compilation error. fragment_exists = fragment is not None fields_exist = vertex_fields or property_fields if fragment_exists and fields_exist: raise GraphQLCompilationError(u'Cannot compile GraphQL that has inline fragment and ' u'selected fields in the same selection. Please move the ' u'selected fields inside the inline fragment.') if location.field is not None: # we're at a property field # sanity-check: cannot have an inline fragment at a property field if fragment_exists: raise AssertionError(u'Found inline fragment at a property field: ' u'{} {}'.format(location, fragment)) # sanity-check: locations at properties don't have their own property locations if len(property_fields) > 0: raise AssertionError(u'Found property fields on a property field: ' u'{} {}'.format(location, property_fields)) # step 1: apply local filter, if any for filter_operation_info in filter_operations: filter_block = process_filter_directive(filter_operation_info, location, context) if isinstance(location, FoldScopeLocation) and location.field == COUNT_META_FIELD_NAME: # Filtering on the fold count field is only allowed at the innermost scope of a fold. set_fold_innermost_scope(context) # This Filter is going in the global operations section of the query, so it cannot # use LocalField expressions since there is no "local" location to use. # Rewrite it so that all references of data at a location instead use ContextFields. expected_field = expressions.LocalField(COUNT_META_FIELD_NAME) replacement_field = expressions.FoldedContextField(location, GraphQLInt) visitor_fn = expressions.make_replacement_visitor(expected_field, replacement_field) filter_block = filter_block.visit_and_update_expressions(visitor_fn) visitor_fn = expressions.make_type_replacement_visitor( expressions.ContextField, lambda context_field: expressions.GlobalContextField( context_field.location, context_field.field_type)) filter_block = filter_block.visit_and_update_expressions(visitor_fn) set_fold_count_filter(context) context['global_filters'].append(filter_block) else: basic_blocks.append(filter_block) if location.field is not None: # The location is at a property, compile the property data following P-steps. _compile_property_ast(schema, current_schema_type, ast, location, context, local_unique_directives) else: # The location is at a vertex. if fragment_exists: # Compile the fragment data following F-steps. # N.B.: Note that the "fragment" variable is the fragment's AST. Since we've asserted # that the fragment is the only part of the selection set at the current AST node, # we pass the "fragment" in the AST parameter of the _compile_fragment_ast() # function, rather than the current AST node as in the other compilation steps. basic_blocks.extend( _compile_fragment_ast(schema, current_schema_type, fragment, location, context)) else: # Compile the vertex data following V-steps. basic_blocks.extend( _compile_vertex_ast(schema, current_schema_type, ast, location, context, local_unique_directives, fields)) return basic_blocks
python
def _compile_ast_node_to_ir(schema, current_schema_type, ast, location, context): basic_blocks = [] # step 0: preprocessing local_unique_directives = get_unique_directives(ast) fields = _get_fields(ast) vertex_fields, property_fields = fields fragment = _get_inline_fragment(ast) filter_operations = get_local_filter_directives( ast, current_schema_type, vertex_fields) # We don't support type coercion while at the same time selecting fields. # Either there are no fields, or there is no fragment, otherwise we raise a compilation error. fragment_exists = fragment is not None fields_exist = vertex_fields or property_fields if fragment_exists and fields_exist: raise GraphQLCompilationError(u'Cannot compile GraphQL that has inline fragment and ' u'selected fields in the same selection. Please move the ' u'selected fields inside the inline fragment.') if location.field is not None: # we're at a property field # sanity-check: cannot have an inline fragment at a property field if fragment_exists: raise AssertionError(u'Found inline fragment at a property field: ' u'{} {}'.format(location, fragment)) # sanity-check: locations at properties don't have their own property locations if len(property_fields) > 0: raise AssertionError(u'Found property fields on a property field: ' u'{} {}'.format(location, property_fields)) # step 1: apply local filter, if any for filter_operation_info in filter_operations: filter_block = process_filter_directive(filter_operation_info, location, context) if isinstance(location, FoldScopeLocation) and location.field == COUNT_META_FIELD_NAME: # Filtering on the fold count field is only allowed at the innermost scope of a fold. set_fold_innermost_scope(context) # This Filter is going in the global operations section of the query, so it cannot # use LocalField expressions since there is no "local" location to use. # Rewrite it so that all references of data at a location instead use ContextFields. expected_field = expressions.LocalField(COUNT_META_FIELD_NAME) replacement_field = expressions.FoldedContextField(location, GraphQLInt) visitor_fn = expressions.make_replacement_visitor(expected_field, replacement_field) filter_block = filter_block.visit_and_update_expressions(visitor_fn) visitor_fn = expressions.make_type_replacement_visitor( expressions.ContextField, lambda context_field: expressions.GlobalContextField( context_field.location, context_field.field_type)) filter_block = filter_block.visit_and_update_expressions(visitor_fn) set_fold_count_filter(context) context['global_filters'].append(filter_block) else: basic_blocks.append(filter_block) if location.field is not None: # The location is at a property, compile the property data following P-steps. _compile_property_ast(schema, current_schema_type, ast, location, context, local_unique_directives) else: # The location is at a vertex. if fragment_exists: # Compile the fragment data following F-steps. # N.B.: Note that the "fragment" variable is the fragment's AST. Since we've asserted # that the fragment is the only part of the selection set at the current AST node, # we pass the "fragment" in the AST parameter of the _compile_fragment_ast() # function, rather than the current AST node as in the other compilation steps. basic_blocks.extend( _compile_fragment_ast(schema, current_schema_type, fragment, location, context)) else: # Compile the vertex data following V-steps. basic_blocks.extend( _compile_vertex_ast(schema, current_schema_type, ast, location, context, local_unique_directives, fields)) return basic_blocks
[ "def", "_compile_ast_node_to_ir", "(", "schema", ",", "current_schema_type", ",", "ast", ",", "location", ",", "context", ")", ":", "basic_blocks", "=", "[", "]", "# step 0: preprocessing", "local_unique_directives", "=", "get_unique_directives", "(", "ast", ")", "fields", "=", "_get_fields", "(", "ast", ")", "vertex_fields", ",", "property_fields", "=", "fields", "fragment", "=", "_get_inline_fragment", "(", "ast", ")", "filter_operations", "=", "get_local_filter_directives", "(", "ast", ",", "current_schema_type", ",", "vertex_fields", ")", "# We don't support type coercion while at the same time selecting fields.", "# Either there are no fields, or there is no fragment, otherwise we raise a compilation error.", "fragment_exists", "=", "fragment", "is", "not", "None", "fields_exist", "=", "vertex_fields", "or", "property_fields", "if", "fragment_exists", "and", "fields_exist", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot compile GraphQL that has inline fragment and '", "u'selected fields in the same selection. Please move the '", "u'selected fields inside the inline fragment.'", ")", "if", "location", ".", "field", "is", "not", "None", ":", "# we're at a property field", "# sanity-check: cannot have an inline fragment at a property field", "if", "fragment_exists", ":", "raise", "AssertionError", "(", "u'Found inline fragment at a property field: '", "u'{} {}'", ".", "format", "(", "location", ",", "fragment", ")", ")", "# sanity-check: locations at properties don't have their own property locations", "if", "len", "(", "property_fields", ")", ">", "0", ":", "raise", "AssertionError", "(", "u'Found property fields on a property field: '", "u'{} {}'", ".", "format", "(", "location", ",", "property_fields", ")", ")", "# step 1: apply local filter, if any", "for", "filter_operation_info", "in", "filter_operations", ":", "filter_block", "=", "process_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ")", "if", "isinstance", "(", "location", ",", "FoldScopeLocation", ")", "and", "location", ".", "field", "==", "COUNT_META_FIELD_NAME", ":", "# Filtering on the fold count field is only allowed at the innermost scope of a fold.", "set_fold_innermost_scope", "(", "context", ")", "# This Filter is going in the global operations section of the query, so it cannot", "# use LocalField expressions since there is no \"local\" location to use.", "# Rewrite it so that all references of data at a location instead use ContextFields.", "expected_field", "=", "expressions", ".", "LocalField", "(", "COUNT_META_FIELD_NAME", ")", "replacement_field", "=", "expressions", ".", "FoldedContextField", "(", "location", ",", "GraphQLInt", ")", "visitor_fn", "=", "expressions", ".", "make_replacement_visitor", "(", "expected_field", ",", "replacement_field", ")", "filter_block", "=", "filter_block", ".", "visit_and_update_expressions", "(", "visitor_fn", ")", "visitor_fn", "=", "expressions", ".", "make_type_replacement_visitor", "(", "expressions", ".", "ContextField", ",", "lambda", "context_field", ":", "expressions", ".", "GlobalContextField", "(", "context_field", ".", "location", ",", "context_field", ".", "field_type", ")", ")", "filter_block", "=", "filter_block", ".", "visit_and_update_expressions", "(", "visitor_fn", ")", "set_fold_count_filter", "(", "context", ")", "context", "[", "'global_filters'", "]", ".", "append", "(", "filter_block", ")", "else", ":", "basic_blocks", ".", "append", "(", "filter_block", ")", "if", "location", ".", "field", "is", "not", "None", ":", "# The location is at a property, compile the property data following P-steps.", "_compile_property_ast", "(", "schema", ",", "current_schema_type", ",", "ast", ",", "location", ",", "context", ",", "local_unique_directives", ")", "else", ":", "# The location is at a vertex.", "if", "fragment_exists", ":", "# Compile the fragment data following F-steps.", "# N.B.: Note that the \"fragment\" variable is the fragment's AST. Since we've asserted", "# that the fragment is the only part of the selection set at the current AST node,", "# we pass the \"fragment\" in the AST parameter of the _compile_fragment_ast()", "# function, rather than the current AST node as in the other compilation steps.", "basic_blocks", ".", "extend", "(", "_compile_fragment_ast", "(", "schema", ",", "current_schema_type", ",", "fragment", ",", "location", ",", "context", ")", ")", "else", ":", "# Compile the vertex data following V-steps.", "basic_blocks", ".", "extend", "(", "_compile_vertex_ast", "(", "schema", ",", "current_schema_type", ",", "ast", ",", "location", ",", "context", ",", "local_unique_directives", ",", "fields", ")", ")", "return", "basic_blocks" ]
Compile the given GraphQL AST node into a list of basic blocks. Args: schema: GraphQL schema object, obtained from the graphql library current_schema_type: GraphQLType, the schema type at the current location ast: the current GraphQL AST node, obtained from the graphql library location: Location object representing the current location in the query context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: list of basic blocks corresponding to this GraphQL AST node
[ "Compile", "the", "given", "GraphQL", "AST", "node", "into", "a", "list", "of", "basic", "blocks", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L629-L720
246,791
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_validate_all_tags_are_used
def _validate_all_tags_are_used(metadata): """Ensure all tags are used in some filter.""" tag_names = set([tag_name for tag_name, _ in metadata.tags]) filter_arg_names = set() for location, _ in metadata.registered_locations: for filter_info in metadata.get_filter_infos(location): for filter_arg in filter_info.args: if is_tag_argument(filter_arg): filter_arg_names.add(get_directive_argument_name(filter_arg)) unused_tags = tag_names - filter_arg_names if unused_tags: raise GraphQLCompilationError(u'This GraphQL query contains @tag directives whose values ' u'are not used: {}. This is not allowed. Please either use ' u'them in a filter or remove them entirely.' .format(unused_tags))
python
def _validate_all_tags_are_used(metadata): tag_names = set([tag_name for tag_name, _ in metadata.tags]) filter_arg_names = set() for location, _ in metadata.registered_locations: for filter_info in metadata.get_filter_infos(location): for filter_arg in filter_info.args: if is_tag_argument(filter_arg): filter_arg_names.add(get_directive_argument_name(filter_arg)) unused_tags = tag_names - filter_arg_names if unused_tags: raise GraphQLCompilationError(u'This GraphQL query contains @tag directives whose values ' u'are not used: {}. This is not allowed. Please either use ' u'them in a filter or remove them entirely.' .format(unused_tags))
[ "def", "_validate_all_tags_are_used", "(", "metadata", ")", ":", "tag_names", "=", "set", "(", "[", "tag_name", "for", "tag_name", ",", "_", "in", "metadata", ".", "tags", "]", ")", "filter_arg_names", "=", "set", "(", ")", "for", "location", ",", "_", "in", "metadata", ".", "registered_locations", ":", "for", "filter_info", "in", "metadata", ".", "get_filter_infos", "(", "location", ")", ":", "for", "filter_arg", "in", "filter_info", ".", "args", ":", "if", "is_tag_argument", "(", "filter_arg", ")", ":", "filter_arg_names", ".", "add", "(", "get_directive_argument_name", "(", "filter_arg", ")", ")", "unused_tags", "=", "tag_names", "-", "filter_arg_names", "if", "unused_tags", ":", "raise", "GraphQLCompilationError", "(", "u'This GraphQL query contains @tag directives whose values '", "u'are not used: {}. This is not allowed. Please either use '", "u'them in a filter or remove them entirely.'", ".", "format", "(", "unused_tags", ")", ")" ]
Ensure all tags are used in some filter.
[ "Ensure", "all", "tags", "are", "used", "in", "some", "filter", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L723-L738
246,792
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_compile_output_step
def _compile_output_step(outputs): """Construct the final ConstructResult basic block that defines the output format of the query. Args: outputs: dict, output name (string) -> output data dict, specifying the location from where to get the data, and whether the data is optional (and therefore may be missing); missing optional data is replaced with 'null' Returns: a ConstructResult basic block that constructs appropriate outputs for the query """ if not outputs: raise GraphQLCompilationError(u'No fields were selected for output! Please mark at least ' u'one field with the @output directive.') output_fields = {} for output_name, output_context in six.iteritems(outputs): location = output_context['location'] optional = output_context['optional'] graphql_type = output_context['type'] expression = None existence_check = None # pylint: disable=redefined-variable-type if isinstance(location, FoldScopeLocation): if optional: raise AssertionError(u'Unreachable state reached, optional in fold: ' u'{}'.format(output_context)) if location.field == COUNT_META_FIELD_NAME: expression = expressions.FoldCountContextField(location) else: expression = expressions.FoldedContextField(location, graphql_type) else: expression = expressions.OutputContextField(location, graphql_type) if optional: existence_check = expressions.ContextFieldExistence(location.at_vertex()) if existence_check: expression = expressions.TernaryConditional( existence_check, expression, expressions.NullLiteral) # pylint: enable=redefined-variable-type output_fields[output_name] = expression return blocks.ConstructResult(output_fields)
python
def _compile_output_step(outputs): if not outputs: raise GraphQLCompilationError(u'No fields were selected for output! Please mark at least ' u'one field with the @output directive.') output_fields = {} for output_name, output_context in six.iteritems(outputs): location = output_context['location'] optional = output_context['optional'] graphql_type = output_context['type'] expression = None existence_check = None # pylint: disable=redefined-variable-type if isinstance(location, FoldScopeLocation): if optional: raise AssertionError(u'Unreachable state reached, optional in fold: ' u'{}'.format(output_context)) if location.field == COUNT_META_FIELD_NAME: expression = expressions.FoldCountContextField(location) else: expression = expressions.FoldedContextField(location, graphql_type) else: expression = expressions.OutputContextField(location, graphql_type) if optional: existence_check = expressions.ContextFieldExistence(location.at_vertex()) if existence_check: expression = expressions.TernaryConditional( existence_check, expression, expressions.NullLiteral) # pylint: enable=redefined-variable-type output_fields[output_name] = expression return blocks.ConstructResult(output_fields)
[ "def", "_compile_output_step", "(", "outputs", ")", ":", "if", "not", "outputs", ":", "raise", "GraphQLCompilationError", "(", "u'No fields were selected for output! Please mark at least '", "u'one field with the @output directive.'", ")", "output_fields", "=", "{", "}", "for", "output_name", ",", "output_context", "in", "six", ".", "iteritems", "(", "outputs", ")", ":", "location", "=", "output_context", "[", "'location'", "]", "optional", "=", "output_context", "[", "'optional'", "]", "graphql_type", "=", "output_context", "[", "'type'", "]", "expression", "=", "None", "existence_check", "=", "None", "# pylint: disable=redefined-variable-type", "if", "isinstance", "(", "location", ",", "FoldScopeLocation", ")", ":", "if", "optional", ":", "raise", "AssertionError", "(", "u'Unreachable state reached, optional in fold: '", "u'{}'", ".", "format", "(", "output_context", ")", ")", "if", "location", ".", "field", "==", "COUNT_META_FIELD_NAME", ":", "expression", "=", "expressions", ".", "FoldCountContextField", "(", "location", ")", "else", ":", "expression", "=", "expressions", ".", "FoldedContextField", "(", "location", ",", "graphql_type", ")", "else", ":", "expression", "=", "expressions", ".", "OutputContextField", "(", "location", ",", "graphql_type", ")", "if", "optional", ":", "existence_check", "=", "expressions", ".", "ContextFieldExistence", "(", "location", ".", "at_vertex", "(", ")", ")", "if", "existence_check", ":", "expression", "=", "expressions", ".", "TernaryConditional", "(", "existence_check", ",", "expression", ",", "expressions", ".", "NullLiteral", ")", "# pylint: enable=redefined-variable-type", "output_fields", "[", "output_name", "]", "=", "expression", "return", "blocks", ".", "ConstructResult", "(", "output_fields", ")" ]
Construct the final ConstructResult basic block that defines the output format of the query. Args: outputs: dict, output name (string) -> output data dict, specifying the location from where to get the data, and whether the data is optional (and therefore may be missing); missing optional data is replaced with 'null' Returns: a ConstructResult basic block that constructs appropriate outputs for the query
[ "Construct", "the", "final", "ConstructResult", "basic", "block", "that", "defines", "the", "output", "format", "of", "the", "query", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L866-L912
246,793
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
_validate_schema_and_ast
def _validate_schema_and_ast(schema, ast): """Validate the supplied graphql schema and ast. This method wraps around graphql-core's validation to enforce a stricter requirement of the schema -- all directives supported by the compiler must be declared by the schema, regardless of whether each directive is used in the query or not. Args: schema: GraphQL schema object, created using the GraphQL library ast: abstract syntax tree representation of a graphql query Returns: list containing schema and/or query validation errors """ core_graphql_errors = validate(schema, ast) # The following directives appear in the core-graphql library, but are not supported by the # graphql compiler. unsupported_default_directives = frozenset([ frozenset([ 'include', frozenset(['FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT']), frozenset(['if']) ]), frozenset([ 'skip', frozenset(['FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT']), frozenset(['if']) ]), frozenset([ 'deprecated', frozenset(['ENUM_VALUE', 'FIELD_DEFINITION']), frozenset(['reason']) ]) ]) # Directives expected by the graphql compiler. expected_directives = { frozenset([ directive.name, frozenset(directive.locations), frozenset(six.viewkeys(directive.args)) ]) for directive in DIRECTIVES } # Directives provided in the parsed graphql schema. actual_directives = { frozenset([ directive.name, frozenset(directive.locations), frozenset(six.viewkeys(directive.args)) ]) for directive in schema.get_directives() } # Directives missing from the actual directives provided. missing_directives = expected_directives - actual_directives if missing_directives: missing_message = (u'The following directives were missing from the ' u'provided schema: {}'.format(missing_directives)) core_graphql_errors.append(missing_message) # Directives that are not specified by the core graphql library. Note that Graphql-core # automatically injects default directives into the schema, regardless of whether # the schema supports said directives. Hence, while the directives contained in # unsupported_default_directives are incompatible with the graphql-compiler, we allow them to # be present in the parsed schema string. extra_directives = actual_directives - expected_directives - unsupported_default_directives if extra_directives: extra_message = (u'The following directives were supplied in the given schema, but are not ' u'not supported by the GraphQL compiler: {}'.format(extra_directives)) core_graphql_errors.append(extra_message) return core_graphql_errors
python
def _validate_schema_and_ast(schema, ast): core_graphql_errors = validate(schema, ast) # The following directives appear in the core-graphql library, but are not supported by the # graphql compiler. unsupported_default_directives = frozenset([ frozenset([ 'include', frozenset(['FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT']), frozenset(['if']) ]), frozenset([ 'skip', frozenset(['FIELD', 'FRAGMENT_SPREAD', 'INLINE_FRAGMENT']), frozenset(['if']) ]), frozenset([ 'deprecated', frozenset(['ENUM_VALUE', 'FIELD_DEFINITION']), frozenset(['reason']) ]) ]) # Directives expected by the graphql compiler. expected_directives = { frozenset([ directive.name, frozenset(directive.locations), frozenset(six.viewkeys(directive.args)) ]) for directive in DIRECTIVES } # Directives provided in the parsed graphql schema. actual_directives = { frozenset([ directive.name, frozenset(directive.locations), frozenset(six.viewkeys(directive.args)) ]) for directive in schema.get_directives() } # Directives missing from the actual directives provided. missing_directives = expected_directives - actual_directives if missing_directives: missing_message = (u'The following directives were missing from the ' u'provided schema: {}'.format(missing_directives)) core_graphql_errors.append(missing_message) # Directives that are not specified by the core graphql library. Note that Graphql-core # automatically injects default directives into the schema, regardless of whether # the schema supports said directives. Hence, while the directives contained in # unsupported_default_directives are incompatible with the graphql-compiler, we allow them to # be present in the parsed schema string. extra_directives = actual_directives - expected_directives - unsupported_default_directives if extra_directives: extra_message = (u'The following directives were supplied in the given schema, but are not ' u'not supported by the GraphQL compiler: {}'.format(extra_directives)) core_graphql_errors.append(extra_message) return core_graphql_errors
[ "def", "_validate_schema_and_ast", "(", "schema", ",", "ast", ")", ":", "core_graphql_errors", "=", "validate", "(", "schema", ",", "ast", ")", "# The following directives appear in the core-graphql library, but are not supported by the", "# graphql compiler.", "unsupported_default_directives", "=", "frozenset", "(", "[", "frozenset", "(", "[", "'include'", ",", "frozenset", "(", "[", "'FIELD'", ",", "'FRAGMENT_SPREAD'", ",", "'INLINE_FRAGMENT'", "]", ")", ",", "frozenset", "(", "[", "'if'", "]", ")", "]", ")", ",", "frozenset", "(", "[", "'skip'", ",", "frozenset", "(", "[", "'FIELD'", ",", "'FRAGMENT_SPREAD'", ",", "'INLINE_FRAGMENT'", "]", ")", ",", "frozenset", "(", "[", "'if'", "]", ")", "]", ")", ",", "frozenset", "(", "[", "'deprecated'", ",", "frozenset", "(", "[", "'ENUM_VALUE'", ",", "'FIELD_DEFINITION'", "]", ")", ",", "frozenset", "(", "[", "'reason'", "]", ")", "]", ")", "]", ")", "# Directives expected by the graphql compiler.", "expected_directives", "=", "{", "frozenset", "(", "[", "directive", ".", "name", ",", "frozenset", "(", "directive", ".", "locations", ")", ",", "frozenset", "(", "six", ".", "viewkeys", "(", "directive", ".", "args", ")", ")", "]", ")", "for", "directive", "in", "DIRECTIVES", "}", "# Directives provided in the parsed graphql schema.", "actual_directives", "=", "{", "frozenset", "(", "[", "directive", ".", "name", ",", "frozenset", "(", "directive", ".", "locations", ")", ",", "frozenset", "(", "six", ".", "viewkeys", "(", "directive", ".", "args", ")", ")", "]", ")", "for", "directive", "in", "schema", ".", "get_directives", "(", ")", "}", "# Directives missing from the actual directives provided.", "missing_directives", "=", "expected_directives", "-", "actual_directives", "if", "missing_directives", ":", "missing_message", "=", "(", "u'The following directives were missing from the '", "u'provided schema: {}'", ".", "format", "(", "missing_directives", ")", ")", "core_graphql_errors", ".", "append", "(", "missing_message", ")", "# Directives that are not specified by the core graphql library. Note that Graphql-core", "# automatically injects default directives into the schema, regardless of whether", "# the schema supports said directives. Hence, while the directives contained in", "# unsupported_default_directives are incompatible with the graphql-compiler, we allow them to", "# be present in the parsed schema string.", "extra_directives", "=", "actual_directives", "-", "expected_directives", "-", "unsupported_default_directives", "if", "extra_directives", ":", "extra_message", "=", "(", "u'The following directives were supplied in the given schema, but are not '", "u'not supported by the GraphQL compiler: {}'", ".", "format", "(", "extra_directives", ")", ")", "core_graphql_errors", ".", "append", "(", "extra_message", ")", "return", "core_graphql_errors" ]
Validate the supplied graphql schema and ast. This method wraps around graphql-core's validation to enforce a stricter requirement of the schema -- all directives supported by the compiler must be declared by the schema, regardless of whether each directive is used in the query or not. Args: schema: GraphQL schema object, created using the GraphQL library ast: abstract syntax tree representation of a graphql query Returns: list containing schema and/or query validation errors
[ "Validate", "the", "supplied", "graphql", "schema", "and", "ast", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L922-L996
246,794
kensho-technologies/graphql-compiler
graphql_compiler/compiler/compiler_frontend.py
graphql_to_ir
def graphql_to_ir(schema, graphql_string, type_equivalence_hints=None): """Convert the given GraphQL string into compiler IR, using the given schema object. Args: schema: GraphQL schema object, created using the GraphQL library graphql_string: string containing the GraphQL to compile to compiler IR type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: IrAndMetadata named tuple, containing fields: - ir_blocks: a list of IR basic block objects - input_metadata: a dict of expected input parameters (string) -> inferred GraphQL type - output_metadata: a dict of output name (string) -> OutputMetadata object - query_metadata_table: a QueryMetadataTable object containing location metadata Raises flavors of GraphQLError in the following cases: - if the query is invalid GraphQL (GraphQLParsingError); - if the query doesn't match the schema (GraphQLValidationError); - if the query has more than one definition block (GraphQLValidationError); - if the query has more than one selection in the root object (GraphQLCompilationError); - if the query does not obey directive usage rules (GraphQLCompilationError); - if the query provides invalid / disallowed / wrong number of arguments for a directive (GraphQLCompilationError). In the case of implementation bugs, could also raise ValueError, TypeError, or AssertionError. """ graphql_string = _preprocess_graphql_string(graphql_string) try: ast = parse(graphql_string) except GraphQLSyntaxError as e: raise GraphQLParsingError(e) validation_errors = _validate_schema_and_ast(schema, ast) if validation_errors: raise GraphQLValidationError(u'String does not validate: {}'.format(validation_errors)) if len(ast.definitions) != 1: raise AssertionError(u'Unsupported graphql string with multiple definitions, should have ' u'been caught in validation: \n{}\n{}'.format(graphql_string, ast)) base_ast = ast.definitions[0] return _compile_root_ast_to_ir(schema, base_ast, type_equivalence_hints=type_equivalence_hints)
python
def graphql_to_ir(schema, graphql_string, type_equivalence_hints=None): graphql_string = _preprocess_graphql_string(graphql_string) try: ast = parse(graphql_string) except GraphQLSyntaxError as e: raise GraphQLParsingError(e) validation_errors = _validate_schema_and_ast(schema, ast) if validation_errors: raise GraphQLValidationError(u'String does not validate: {}'.format(validation_errors)) if len(ast.definitions) != 1: raise AssertionError(u'Unsupported graphql string with multiple definitions, should have ' u'been caught in validation: \n{}\n{}'.format(graphql_string, ast)) base_ast = ast.definitions[0] return _compile_root_ast_to_ir(schema, base_ast, type_equivalence_hints=type_equivalence_hints)
[ "def", "graphql_to_ir", "(", "schema", ",", "graphql_string", ",", "type_equivalence_hints", "=", "None", ")", ":", "graphql_string", "=", "_preprocess_graphql_string", "(", "graphql_string", ")", "try", ":", "ast", "=", "parse", "(", "graphql_string", ")", "except", "GraphQLSyntaxError", "as", "e", ":", "raise", "GraphQLParsingError", "(", "e", ")", "validation_errors", "=", "_validate_schema_and_ast", "(", "schema", ",", "ast", ")", "if", "validation_errors", ":", "raise", "GraphQLValidationError", "(", "u'String does not validate: {}'", ".", "format", "(", "validation_errors", ")", ")", "if", "len", "(", "ast", ".", "definitions", ")", "!=", "1", ":", "raise", "AssertionError", "(", "u'Unsupported graphql string with multiple definitions, should have '", "u'been caught in validation: \\n{}\\n{}'", ".", "format", "(", "graphql_string", ",", "ast", ")", ")", "base_ast", "=", "ast", ".", "definitions", "[", "0", "]", "return", "_compile_root_ast_to_ir", "(", "schema", ",", "base_ast", ",", "type_equivalence_hints", "=", "type_equivalence_hints", ")" ]
Convert the given GraphQL string into compiler IR, using the given schema object. Args: schema: GraphQL schema object, created using the GraphQL library graphql_string: string containing the GraphQL to compile to compiler IR type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. Used as a workaround for GraphQL's lack of support for inheritance across "types" (i.e. non-interfaces), as well as a workaround for Gremlin's total lack of inheritance-awareness. The key-value pairs in the dict specify that the "key" type is equivalent to the "value" type, i.e. that the GraphQL type or interface in the key is the most-derived common supertype of every GraphQL type in the "value" GraphQL union. Recursive expansion of type equivalence hints is not performed, and only type-level correctness of this argument is enforced. See README.md for more details on everything this parameter does. ***** Be very careful with this option, as bad input here will lead to incorrect output queries being generated. ***** Returns: IrAndMetadata named tuple, containing fields: - ir_blocks: a list of IR basic block objects - input_metadata: a dict of expected input parameters (string) -> inferred GraphQL type - output_metadata: a dict of output name (string) -> OutputMetadata object - query_metadata_table: a QueryMetadataTable object containing location metadata Raises flavors of GraphQLError in the following cases: - if the query is invalid GraphQL (GraphQLParsingError); - if the query doesn't match the schema (GraphQLValidationError); - if the query has more than one definition block (GraphQLValidationError); - if the query has more than one selection in the root object (GraphQLCompilationError); - if the query does not obey directive usage rules (GraphQLCompilationError); - if the query provides invalid / disallowed / wrong number of arguments for a directive (GraphQLCompilationError). In the case of implementation bugs, could also raise ValueError, TypeError, or AssertionError.
[ "Convert", "the", "given", "GraphQL", "string", "into", "compiler", "IR", "using", "the", "given", "schema", "object", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/compiler_frontend.py#L1003-L1059
246,795
kensho-technologies/graphql-compiler
graphql_compiler/debugging_utils.py
pretty_print_gremlin
def pretty_print_gremlin(gremlin): """Return a human-readable representation of a gremlin command string.""" gremlin = remove_custom_formatting(gremlin) too_many_parts = re.split(r'([)}]|scatter)[ ]?\.', gremlin) # Put the ) and } back on. parts = [ too_many_parts[i] + too_many_parts[i + 1] for i in six.moves.xrange(0, len(too_many_parts) - 1, 2) ] parts.append(too_many_parts[-1]) # Put the . back on. for i in six.moves.xrange(1, len(parts)): parts[i] = '.' + parts[i] indentation = 0 indentation_increment = 4 output = [] for current_part in parts: if any([current_part.startswith('.out'), current_part.startswith('.in'), current_part.startswith('.ifThenElse')]): indentation += indentation_increment elif current_part.startswith('.back') or current_part.startswith('.optional'): indentation -= indentation_increment if indentation < 0: raise AssertionError(u'Indentation became negative: {}'.format(indentation)) output.append((' ' * indentation) + current_part) return '\n'.join(output).strip()
python
def pretty_print_gremlin(gremlin): gremlin = remove_custom_formatting(gremlin) too_many_parts = re.split(r'([)}]|scatter)[ ]?\.', gremlin) # Put the ) and } back on. parts = [ too_many_parts[i] + too_many_parts[i + 1] for i in six.moves.xrange(0, len(too_many_parts) - 1, 2) ] parts.append(too_many_parts[-1]) # Put the . back on. for i in six.moves.xrange(1, len(parts)): parts[i] = '.' + parts[i] indentation = 0 indentation_increment = 4 output = [] for current_part in parts: if any([current_part.startswith('.out'), current_part.startswith('.in'), current_part.startswith('.ifThenElse')]): indentation += indentation_increment elif current_part.startswith('.back') or current_part.startswith('.optional'): indentation -= indentation_increment if indentation < 0: raise AssertionError(u'Indentation became negative: {}'.format(indentation)) output.append((' ' * indentation) + current_part) return '\n'.join(output).strip()
[ "def", "pretty_print_gremlin", "(", "gremlin", ")", ":", "gremlin", "=", "remove_custom_formatting", "(", "gremlin", ")", "too_many_parts", "=", "re", ".", "split", "(", "r'([)}]|scatter)[ ]?\\.'", ",", "gremlin", ")", "# Put the ) and } back on.", "parts", "=", "[", "too_many_parts", "[", "i", "]", "+", "too_many_parts", "[", "i", "+", "1", "]", "for", "i", "in", "six", ".", "moves", ".", "xrange", "(", "0", ",", "len", "(", "too_many_parts", ")", "-", "1", ",", "2", ")", "]", "parts", ".", "append", "(", "too_many_parts", "[", "-", "1", "]", ")", "# Put the . back on.", "for", "i", "in", "six", ".", "moves", ".", "xrange", "(", "1", ",", "len", "(", "parts", ")", ")", ":", "parts", "[", "i", "]", "=", "'.'", "+", "parts", "[", "i", "]", "indentation", "=", "0", "indentation_increment", "=", "4", "output", "=", "[", "]", "for", "current_part", "in", "parts", ":", "if", "any", "(", "[", "current_part", ".", "startswith", "(", "'.out'", ")", ",", "current_part", ".", "startswith", "(", "'.in'", ")", ",", "current_part", ".", "startswith", "(", "'.ifThenElse'", ")", "]", ")", ":", "indentation", "+=", "indentation_increment", "elif", "current_part", ".", "startswith", "(", "'.back'", ")", "or", "current_part", ".", "startswith", "(", "'.optional'", ")", ":", "indentation", "-=", "indentation_increment", "if", "indentation", "<", "0", ":", "raise", "AssertionError", "(", "u'Indentation became negative: {}'", ".", "format", "(", "indentation", ")", ")", "output", ".", "append", "(", "(", "' '", "*", "indentation", ")", "+", "current_part", ")", "return", "'\\n'", ".", "join", "(", "output", ")", ".", "strip", "(", ")" ]
Return a human-readable representation of a gremlin command string.
[ "Return", "a", "human", "-", "readable", "representation", "of", "a", "gremlin", "command", "string", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/debugging_utils.py#L13-L44
246,796
kensho-technologies/graphql-compiler
graphql_compiler/debugging_utils.py
pretty_print_match
def pretty_print_match(match, parameterized=True): """Return a human-readable representation of a parameterized MATCH query string.""" left_curly = '{{' if parameterized else '{' right_curly = '}}' if parameterized else '}' match = remove_custom_formatting(match) parts = re.split('({}|{})'.format(left_curly, right_curly), match) inside_braces = False indent_size = 4 indent = ' ' * indent_size output = [parts[0]] for current_index, current_part in enumerate(parts[1:]): if current_part == left_curly: if inside_braces: raise AssertionError(u'Found open-braces pair while already inside braces: ' u'{} {} {}'.format(current_index, parts, match)) inside_braces = True output.append(current_part + '\n') elif current_part == right_curly: if not inside_braces: raise AssertionError(u'Found close-braces pair while not inside braces: ' u'{} {} {}'.format(current_index, parts, match)) inside_braces = False output.append(current_part) else: if not inside_braces: stripped_part = current_part.lstrip() if stripped_part.startswith('.'): # Strip whitespace before traversal steps. output.append(stripped_part) else: # Do not strip whitespace before e.g. the RETURN keyword. output.append(current_part) else: # Split out the keywords, initially getting rid of commas. separate_keywords = re.split(', ([a-z]+:)', current_part) # The first item in the separated list is the full first "keyword: value" pair. # For every subsequent item, the keyword and value are separated; join them # back together, outputting the comma, newline and indentation before them. output.append(indent + separate_keywords[0].lstrip()) for i in six.moves.xrange(1, len(separate_keywords) - 1, 2): output.append(',\n{indent}{keyword} {value}'.format( keyword=separate_keywords[i].strip(), value=separate_keywords[i + 1].strip(), indent=indent)) output.append('\n') return ''.join(output).strip()
python
def pretty_print_match(match, parameterized=True): left_curly = '{{' if parameterized else '{' right_curly = '}}' if parameterized else '}' match = remove_custom_formatting(match) parts = re.split('({}|{})'.format(left_curly, right_curly), match) inside_braces = False indent_size = 4 indent = ' ' * indent_size output = [parts[0]] for current_index, current_part in enumerate(parts[1:]): if current_part == left_curly: if inside_braces: raise AssertionError(u'Found open-braces pair while already inside braces: ' u'{} {} {}'.format(current_index, parts, match)) inside_braces = True output.append(current_part + '\n') elif current_part == right_curly: if not inside_braces: raise AssertionError(u'Found close-braces pair while not inside braces: ' u'{} {} {}'.format(current_index, parts, match)) inside_braces = False output.append(current_part) else: if not inside_braces: stripped_part = current_part.lstrip() if stripped_part.startswith('.'): # Strip whitespace before traversal steps. output.append(stripped_part) else: # Do not strip whitespace before e.g. the RETURN keyword. output.append(current_part) else: # Split out the keywords, initially getting rid of commas. separate_keywords = re.split(', ([a-z]+:)', current_part) # The first item in the separated list is the full first "keyword: value" pair. # For every subsequent item, the keyword and value are separated; join them # back together, outputting the comma, newline and indentation before them. output.append(indent + separate_keywords[0].lstrip()) for i in six.moves.xrange(1, len(separate_keywords) - 1, 2): output.append(',\n{indent}{keyword} {value}'.format( keyword=separate_keywords[i].strip(), value=separate_keywords[i + 1].strip(), indent=indent)) output.append('\n') return ''.join(output).strip()
[ "def", "pretty_print_match", "(", "match", ",", "parameterized", "=", "True", ")", ":", "left_curly", "=", "'{{'", "if", "parameterized", "else", "'{'", "right_curly", "=", "'}}'", "if", "parameterized", "else", "'}'", "match", "=", "remove_custom_formatting", "(", "match", ")", "parts", "=", "re", ".", "split", "(", "'({}|{})'", ".", "format", "(", "left_curly", ",", "right_curly", ")", ",", "match", ")", "inside_braces", "=", "False", "indent_size", "=", "4", "indent", "=", "' '", "*", "indent_size", "output", "=", "[", "parts", "[", "0", "]", "]", "for", "current_index", ",", "current_part", "in", "enumerate", "(", "parts", "[", "1", ":", "]", ")", ":", "if", "current_part", "==", "left_curly", ":", "if", "inside_braces", ":", "raise", "AssertionError", "(", "u'Found open-braces pair while already inside braces: '", "u'{} {} {}'", ".", "format", "(", "current_index", ",", "parts", ",", "match", ")", ")", "inside_braces", "=", "True", "output", ".", "append", "(", "current_part", "+", "'\\n'", ")", "elif", "current_part", "==", "right_curly", ":", "if", "not", "inside_braces", ":", "raise", "AssertionError", "(", "u'Found close-braces pair while not inside braces: '", "u'{} {} {}'", ".", "format", "(", "current_index", ",", "parts", ",", "match", ")", ")", "inside_braces", "=", "False", "output", ".", "append", "(", "current_part", ")", "else", ":", "if", "not", "inside_braces", ":", "stripped_part", "=", "current_part", ".", "lstrip", "(", ")", "if", "stripped_part", ".", "startswith", "(", "'.'", ")", ":", "# Strip whitespace before traversal steps.", "output", ".", "append", "(", "stripped_part", ")", "else", ":", "# Do not strip whitespace before e.g. the RETURN keyword.", "output", ".", "append", "(", "current_part", ")", "else", ":", "# Split out the keywords, initially getting rid of commas.", "separate_keywords", "=", "re", ".", "split", "(", "', ([a-z]+:)'", ",", "current_part", ")", "# The first item in the separated list is the full first \"keyword: value\" pair.", "# For every subsequent item, the keyword and value are separated; join them", "# back together, outputting the comma, newline and indentation before them.", "output", ".", "append", "(", "indent", "+", "separate_keywords", "[", "0", "]", ".", "lstrip", "(", ")", ")", "for", "i", "in", "six", ".", "moves", ".", "xrange", "(", "1", ",", "len", "(", "separate_keywords", ")", "-", "1", ",", "2", ")", ":", "output", ".", "append", "(", "',\\n{indent}{keyword} {value}'", ".", "format", "(", "keyword", "=", "separate_keywords", "[", "i", "]", ".", "strip", "(", ")", ",", "value", "=", "separate_keywords", "[", "i", "+", "1", "]", ".", "strip", "(", ")", ",", "indent", "=", "indent", ")", ")", "output", ".", "append", "(", "'\\n'", ")", "return", "''", ".", "join", "(", "output", ")", ".", "strip", "(", ")" ]
Return a human-readable representation of a parameterized MATCH query string.
[ "Return", "a", "human", "-", "readable", "representation", "of", "a", "parameterized", "MATCH", "query", "string", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/debugging_utils.py#L47-L96
246,797
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/representations.py
represent_float_as_str
def represent_float_as_str(value): """Represent a float as a string without losing precision.""" # In Python 2, calling str() on a float object loses precision: # # In [1]: 1.23456789012345678 # Out[1]: 1.2345678901234567 # # In [2]: 1.2345678901234567 # Out[2]: 1.2345678901234567 # # In [3]: str(1.2345678901234567) # Out[3]: '1.23456789012' # # The best way to ensure precision is not lost is to convert to string via Decimal: # https://github.com/mogui/pyorient/pull/226/files if not isinstance(value, float): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-float as a float: ' u'{}'.format(value)) with decimal.localcontext() as ctx: ctx.prec = 20 # floats are max 80-bits wide = 20 significant digits return u'{:f}'.format(decimal.Decimal(value))
python
def represent_float_as_str(value): # In Python 2, calling str() on a float object loses precision: # # In [1]: 1.23456789012345678 # Out[1]: 1.2345678901234567 # # In [2]: 1.2345678901234567 # Out[2]: 1.2345678901234567 # # In [3]: str(1.2345678901234567) # Out[3]: '1.23456789012' # # The best way to ensure precision is not lost is to convert to string via Decimal: # https://github.com/mogui/pyorient/pull/226/files if not isinstance(value, float): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-float as a float: ' u'{}'.format(value)) with decimal.localcontext() as ctx: ctx.prec = 20 # floats are max 80-bits wide = 20 significant digits return u'{:f}'.format(decimal.Decimal(value))
[ "def", "represent_float_as_str", "(", "value", ")", ":", "# In Python 2, calling str() on a float object loses precision:", "#", "# In [1]: 1.23456789012345678", "# Out[1]: 1.2345678901234567", "#", "# In [2]: 1.2345678901234567", "# Out[2]: 1.2345678901234567", "#", "# In [3]: str(1.2345678901234567)", "# Out[3]: '1.23456789012'", "#", "# The best way to ensure precision is not lost is to convert to string via Decimal:", "# https://github.com/mogui/pyorient/pull/226/files", "if", "not", "isinstance", "(", "value", ",", "float", ")", ":", "raise", "GraphQLInvalidArgumentError", "(", "u'Attempting to represent a non-float as a float: '", "u'{}'", ".", "format", "(", "value", ")", ")", "with", "decimal", ".", "localcontext", "(", ")", "as", "ctx", ":", "ctx", ".", "prec", "=", "20", "# floats are max 80-bits wide = 20 significant digits", "return", "u'{:f}'", ".", "format", "(", "decimal", ".", "Decimal", "(", "value", ")", ")" ]
Represent a float as a string without losing precision.
[ "Represent", "a", "float", "as", "a", "string", "without", "losing", "precision", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/representations.py#L8-L29
246,798
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/representations.py
coerce_to_decimal
def coerce_to_decimal(value): """Attempt to coerce the value to a Decimal, or raise an error if unable to do so.""" if isinstance(value, decimal.Decimal): return value else: try: return decimal.Decimal(value) except decimal.InvalidOperation as e: raise GraphQLInvalidArgumentError(e)
python
def coerce_to_decimal(value): if isinstance(value, decimal.Decimal): return value else: try: return decimal.Decimal(value) except decimal.InvalidOperation as e: raise GraphQLInvalidArgumentError(e)
[ "def", "coerce_to_decimal", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "decimal", ".", "Decimal", ")", ":", "return", "value", "else", ":", "try", ":", "return", "decimal", ".", "Decimal", "(", "value", ")", "except", "decimal", ".", "InvalidOperation", "as", "e", ":", "raise", "GraphQLInvalidArgumentError", "(", "e", ")" ]
Attempt to coerce the value to a Decimal, or raise an error if unable to do so.
[ "Attempt", "to", "coerce", "the", "value", "to", "a", "Decimal", "or", "raise", "an", "error", "if", "unable", "to", "do", "so", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/representations.py#L41-L49
246,799
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
make_replacement_visitor
def make_replacement_visitor(find_expression, replace_expression): """Return a visitor function that replaces every instance of one expression with another one.""" def visitor_fn(expression): """Return the replacement if this expression matches the expression we're looking for.""" if expression == find_expression: return replace_expression else: return expression return visitor_fn
python
def make_replacement_visitor(find_expression, replace_expression): def visitor_fn(expression): """Return the replacement if this expression matches the expression we're looking for.""" if expression == find_expression: return replace_expression else: return expression return visitor_fn
[ "def", "make_replacement_visitor", "(", "find_expression", ",", "replace_expression", ")", ":", "def", "visitor_fn", "(", "expression", ")", ":", "\"\"\"Return the replacement if this expression matches the expression we're looking for.\"\"\"", "if", "expression", "==", "find_expression", ":", "return", "replace_expression", "else", ":", "return", "expression", "return", "visitor_fn" ]
Return a visitor function that replaces every instance of one expression with another one.
[ "Return", "a", "visitor", "function", "that", "replaces", "every", "instance", "of", "one", "expression", "with", "another", "one", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L29-L38