id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
246,800
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
make_type_replacement_visitor
def make_type_replacement_visitor(find_types, replacement_func): """Return a visitor function that replaces expressions of a given type with new expressions.""" def visitor_fn(expression): """Return a replacement expression if the original expression is of the correct type.""" if isinstance(expression, find_types): return replacement_func(expression) else: return expression return visitor_fn
python
def make_type_replacement_visitor(find_types, replacement_func): def visitor_fn(expression): """Return a replacement expression if the original expression is of the correct type.""" if isinstance(expression, find_types): return replacement_func(expression) else: return expression return visitor_fn
[ "def", "make_type_replacement_visitor", "(", "find_types", ",", "replacement_func", ")", ":", "def", "visitor_fn", "(", "expression", ")", ":", "\"\"\"Return a replacement expression if the original expression is of the correct type.\"\"\"", "if", "isinstance", "(", "expression", ",", "find_types", ")", ":", "return", "replacement_func", "(", "expression", ")", "else", ":", "return", "expression", "return", "visitor_fn" ]
Return a visitor function that replaces expressions of a given type with new expressions.
[ "Return", "a", "visitor", "function", "that", "replaces", "expressions", "of", "a", "given", "type", "with", "new", "expressions", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L41-L50
246,801
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
_validate_operator_name
def _validate_operator_name(operator, supported_operators): """Ensure the named operator is valid and supported.""" if not isinstance(operator, six.text_type): raise TypeError(u'Expected operator as unicode string, got: {} {}'.format( type(operator).__name__, operator)) if operator not in supported_operators: raise GraphQLCompilationError(u'Unrecognized operator: {}'.format(operator))
python
def _validate_operator_name(operator, supported_operators): if not isinstance(operator, six.text_type): raise TypeError(u'Expected operator as unicode string, got: {} {}'.format( type(operator).__name__, operator)) if operator not in supported_operators: raise GraphQLCompilationError(u'Unrecognized operator: {}'.format(operator))
[ "def", "_validate_operator_name", "(", "operator", ",", "supported_operators", ")", ":", "if", "not", "isinstance", "(", "operator", ",", "six", ".", "text_type", ")", ":", "raise", "TypeError", "(", "u'Expected operator as unicode string, got: {} {}'", ".", "format", "(", "type", "(", "operator", ")", ".", "__name__", ",", "operator", ")", ")", "if", "operator", "not", "in", "supported_operators", ":", "raise", "GraphQLCompilationError", "(", "u'Unrecognized operator: {}'", ".", "format", "(", "operator", ")", ")" ]
Ensure the named operator is valid and supported.
[ "Ensure", "the", "named", "operator", "is", "valid", "and", "supported", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L665-L672
246,802
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
Literal.validate
def validate(self): """Validate that the Literal is correctly representable.""" # Literals representing boolean values or None are correctly representable and supported. if self.value is None or self.value is True or self.value is False: return # Literal safe strings are correctly representable and supported. if isinstance(self.value, six.string_types): validate_safe_string(self.value) return # Literal ints are correctly representable and supported. if isinstance(self.value, int): return # Literal empty lists, and non-empty lists of safe strings, are # correctly representable and supported. if isinstance(self.value, list): if len(self.value) > 0: for x in self.value: validate_safe_string(x) return raise GraphQLCompilationError(u'Cannot represent literal: {}'.format(self.value))
python
def validate(self): # Literals representing boolean values or None are correctly representable and supported. if self.value is None or self.value is True or self.value is False: return # Literal safe strings are correctly representable and supported. if isinstance(self.value, six.string_types): validate_safe_string(self.value) return # Literal ints are correctly representable and supported. if isinstance(self.value, int): return # Literal empty lists, and non-empty lists of safe strings, are # correctly representable and supported. if isinstance(self.value, list): if len(self.value) > 0: for x in self.value: validate_safe_string(x) return raise GraphQLCompilationError(u'Cannot represent literal: {}'.format(self.value))
[ "def", "validate", "(", "self", ")", ":", "# Literals representing boolean values or None are correctly representable and supported.", "if", "self", ".", "value", "is", "None", "or", "self", ".", "value", "is", "True", "or", "self", ".", "value", "is", "False", ":", "return", "# Literal safe strings are correctly representable and supported.", "if", "isinstance", "(", "self", ".", "value", ",", "six", ".", "string_types", ")", ":", "validate_safe_string", "(", "self", ".", "value", ")", "return", "# Literal ints are correctly representable and supported.", "if", "isinstance", "(", "self", ".", "value", ",", "int", ")", ":", "return", "# Literal empty lists, and non-empty lists of safe strings, are", "# correctly representable and supported.", "if", "isinstance", "(", "self", ".", "value", ",", "list", ")", ":", "if", "len", "(", "self", ".", "value", ")", ">", "0", ":", "for", "x", "in", "self", ".", "value", ":", "validate_safe_string", "(", "x", ")", "return", "raise", "GraphQLCompilationError", "(", "u'Cannot represent literal: {}'", ".", "format", "(", "self", ".", "value", ")", ")" ]
Validate that the Literal is correctly representable.
[ "Validate", "that", "the", "Literal", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L72-L95
246,803
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
Variable.validate
def validate(self): """Validate that the Variable is correctly representable.""" # Get the first letter, or empty string if it doesn't exist. if not self.variable_name.startswith(u'$'): raise GraphQLCompilationError(u'Expected variable name to start with $, but was: ' u'{}'.format(self.variable_name)) if self.variable_name in RESERVED_MATCH_KEYWORDS: raise GraphQLCompilationError(u'Cannot use reserved MATCH keyword {} as variable ' u'name!'.format(self.variable_name)) validate_safe_string(self.variable_name[1:]) if not is_graphql_type(self.inferred_type): raise ValueError(u'Invalid value of "inferred_type": {}'.format(self.inferred_type)) if isinstance(self.inferred_type, GraphQLNonNull): raise ValueError(u'GraphQL non-null types are not supported as "inferred_type": ' u'{}'.format(self.inferred_type)) if isinstance(self.inferred_type, GraphQLList): inner_type = strip_non_null_from_type(self.inferred_type.of_type) if GraphQLDate.is_same_type(inner_type) or GraphQLDateTime.is_same_type(inner_type): # This is a compilation error rather than a ValueError as # it can be caused by an invalid GraphQL query on an otherwise valid schema. # In other words, it's an error in writing the GraphQL query, rather than # a programming error within the library. raise GraphQLCompilationError( u'Lists of Date or DateTime cannot currently be represented as ' u'Variable objects: {}'.format(self.inferred_type))
python
def validate(self): # Get the first letter, or empty string if it doesn't exist. if not self.variable_name.startswith(u'$'): raise GraphQLCompilationError(u'Expected variable name to start with $, but was: ' u'{}'.format(self.variable_name)) if self.variable_name in RESERVED_MATCH_KEYWORDS: raise GraphQLCompilationError(u'Cannot use reserved MATCH keyword {} as variable ' u'name!'.format(self.variable_name)) validate_safe_string(self.variable_name[1:]) if not is_graphql_type(self.inferred_type): raise ValueError(u'Invalid value of "inferred_type": {}'.format(self.inferred_type)) if isinstance(self.inferred_type, GraphQLNonNull): raise ValueError(u'GraphQL non-null types are not supported as "inferred_type": ' u'{}'.format(self.inferred_type)) if isinstance(self.inferred_type, GraphQLList): inner_type = strip_non_null_from_type(self.inferred_type.of_type) if GraphQLDate.is_same_type(inner_type) or GraphQLDateTime.is_same_type(inner_type): # This is a compilation error rather than a ValueError as # it can be caused by an invalid GraphQL query on an otherwise valid schema. # In other words, it's an error in writing the GraphQL query, rather than # a programming error within the library. raise GraphQLCompilationError( u'Lists of Date or DateTime cannot currently be represented as ' u'Variable objects: {}'.format(self.inferred_type))
[ "def", "validate", "(", "self", ")", ":", "# Get the first letter, or empty string if it doesn't exist.", "if", "not", "self", ".", "variable_name", ".", "startswith", "(", "u'$'", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Expected variable name to start with $, but was: '", "u'{}'", ".", "format", "(", "self", ".", "variable_name", ")", ")", "if", "self", ".", "variable_name", "in", "RESERVED_MATCH_KEYWORDS", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot use reserved MATCH keyword {} as variable '", "u'name!'", ".", "format", "(", "self", ".", "variable_name", ")", ")", "validate_safe_string", "(", "self", ".", "variable_name", "[", "1", ":", "]", ")", "if", "not", "is_graphql_type", "(", "self", ".", "inferred_type", ")", ":", "raise", "ValueError", "(", "u'Invalid value of \"inferred_type\": {}'", ".", "format", "(", "self", ".", "inferred_type", ")", ")", "if", "isinstance", "(", "self", ".", "inferred_type", ",", "GraphQLNonNull", ")", ":", "raise", "ValueError", "(", "u'GraphQL non-null types are not supported as \"inferred_type\": '", "u'{}'", ".", "format", "(", "self", ".", "inferred_type", ")", ")", "if", "isinstance", "(", "self", ".", "inferred_type", ",", "GraphQLList", ")", ":", "inner_type", "=", "strip_non_null_from_type", "(", "self", ".", "inferred_type", ".", "of_type", ")", "if", "GraphQLDate", ".", "is_same_type", "(", "inner_type", ")", "or", "GraphQLDateTime", ".", "is_same_type", "(", "inner_type", ")", ":", "# This is a compilation error rather than a ValueError as", "# it can be caused by an invalid GraphQL query on an otherwise valid schema.", "# In other words, it's an error in writing the GraphQL query, rather than", "# a programming error within the library.", "raise", "GraphQLCompilationError", "(", "u'Lists of Date or DateTime cannot currently be represented as '", "u'Variable objects: {}'", ".", "format", "(", "self", ".", "inferred_type", ")", ")" ]
Validate that the Variable is correctly representable.
[ "Validate", "that", "the", "Variable", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L154-L183
246,804
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
Variable.to_match
def to_match(self): """Return a unicode object with the MATCH representation of this Variable.""" self.validate() # We don't want the dollar sign as part of the variable name. variable_with_no_dollar_sign = self.variable_name[1:] match_variable_name = '{%s}' % (six.text_type(variable_with_no_dollar_sign),) # We can't directly pass a Date or DateTime object, so we have to pass it as a string # and then parse it inline. For date format parameter meanings, see: # http://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html # For the semantics of the date() OrientDB SQL function, see: # http://orientdb.com/docs/last/SQL-Functions.html#date if GraphQLDate.is_same_type(self.inferred_type): return u'date(%s, "%s")' % (match_variable_name, STANDARD_DATE_FORMAT) elif GraphQLDateTime.is_same_type(self.inferred_type): return u'date(%s, "%s")' % (match_variable_name, STANDARD_DATETIME_FORMAT) else: return match_variable_name
python
def to_match(self): self.validate() # We don't want the dollar sign as part of the variable name. variable_with_no_dollar_sign = self.variable_name[1:] match_variable_name = '{%s}' % (six.text_type(variable_with_no_dollar_sign),) # We can't directly pass a Date or DateTime object, so we have to pass it as a string # and then parse it inline. For date format parameter meanings, see: # http://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html # For the semantics of the date() OrientDB SQL function, see: # http://orientdb.com/docs/last/SQL-Functions.html#date if GraphQLDate.is_same_type(self.inferred_type): return u'date(%s, "%s")' % (match_variable_name, STANDARD_DATE_FORMAT) elif GraphQLDateTime.is_same_type(self.inferred_type): return u'date(%s, "%s")' % (match_variable_name, STANDARD_DATETIME_FORMAT) else: return match_variable_name
[ "def", "to_match", "(", "self", ")", ":", "self", ".", "validate", "(", ")", "# We don't want the dollar sign as part of the variable name.", "variable_with_no_dollar_sign", "=", "self", ".", "variable_name", "[", "1", ":", "]", "match_variable_name", "=", "'{%s}'", "%", "(", "six", ".", "text_type", "(", "variable_with_no_dollar_sign", ")", ",", ")", "# We can't directly pass a Date or DateTime object, so we have to pass it as a string", "# and then parse it inline. For date format parameter meanings, see:", "# http://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html", "# For the semantics of the date() OrientDB SQL function, see:", "# http://orientdb.com/docs/last/SQL-Functions.html#date", "if", "GraphQLDate", ".", "is_same_type", "(", "self", ".", "inferred_type", ")", ":", "return", "u'date(%s, \"%s\")'", "%", "(", "match_variable_name", ",", "STANDARD_DATE_FORMAT", ")", "elif", "GraphQLDateTime", ".", "is_same_type", "(", "self", ".", "inferred_type", ")", ":", "return", "u'date(%s, \"%s\")'", "%", "(", "match_variable_name", ",", "STANDARD_DATETIME_FORMAT", ")", "else", ":", "return", "match_variable_name" ]
Return a unicode object with the MATCH representation of this Variable.
[ "Return", "a", "unicode", "object", "with", "the", "MATCH", "representation", "of", "this", "Variable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L185-L204
246,805
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
GlobalContextField.validate
def validate(self): """Validate that the GlobalContextField is correctly representable.""" if not isinstance(self.location, Location): raise TypeError(u'Expected Location location, got: {} {}' .format(type(self.location).__name__, self.location)) if self.location.field is None: raise AssertionError(u'Received Location without a field: {}' .format(self.location)) if not is_graphql_type(self.field_type): raise ValueError(u'Invalid value of "field_type": {}'.format(self.field_type))
python
def validate(self): if not isinstance(self.location, Location): raise TypeError(u'Expected Location location, got: {} {}' .format(type(self.location).__name__, self.location)) if self.location.field is None: raise AssertionError(u'Received Location without a field: {}' .format(self.location)) if not is_graphql_type(self.field_type): raise ValueError(u'Invalid value of "field_type": {}'.format(self.field_type))
[ "def", "validate", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "location", ",", "Location", ")", ":", "raise", "TypeError", "(", "u'Expected Location location, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "location", ")", ".", "__name__", ",", "self", ".", "location", ")", ")", "if", "self", ".", "location", ".", "field", "is", "None", ":", "raise", "AssertionError", "(", "u'Received Location without a field: {}'", ".", "format", "(", "self", ".", "location", ")", ")", "if", "not", "is_graphql_type", "(", "self", ".", "field_type", ")", ":", "raise", "ValueError", "(", "u'Invalid value of \"field_type\": {}'", ".", "format", "(", "self", ".", "field_type", ")", ")" ]
Validate that the GlobalContextField is correctly representable.
[ "Validate", "that", "the", "GlobalContextField", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L289-L300
246,806
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
GlobalContextField.to_match
def to_match(self): """Return a unicode object with the MATCH representation of this GlobalContextField.""" self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) validate_safe_string(field_name) return u'%s.%s' % (mark_name, field_name)
python
def to_match(self): self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) validate_safe_string(field_name) return u'%s.%s' % (mark_name, field_name)
[ "def", "to_match", "(", "self", ")", ":", "self", ".", "validate", "(", ")", "mark_name", ",", "field_name", "=", "self", ".", "location", ".", "get_location_name", "(", ")", "validate_safe_string", "(", "mark_name", ")", "validate_safe_string", "(", "field_name", ")", "return", "u'%s.%s'", "%", "(", "mark_name", ",", "field_name", ")" ]
Return a unicode object with the MATCH representation of this GlobalContextField.
[ "Return", "a", "unicode", "object", "with", "the", "MATCH", "representation", "of", "this", "GlobalContextField", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L302-L310
246,807
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
ContextField.to_match
def to_match(self): """Return a unicode object with the MATCH representation of this ContextField.""" self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) if field_name is None: return u'$matched.%s' % (mark_name,) else: validate_safe_string(field_name) return u'$matched.%s.%s' % (mark_name, field_name)
python
def to_match(self): self.validate() mark_name, field_name = self.location.get_location_name() validate_safe_string(mark_name) if field_name is None: return u'$matched.%s' % (mark_name,) else: validate_safe_string(field_name) return u'$matched.%s.%s' % (mark_name, field_name)
[ "def", "to_match", "(", "self", ")", ":", "self", ".", "validate", "(", ")", "mark_name", ",", "field_name", "=", "self", ".", "location", ".", "get_location_name", "(", ")", "validate_safe_string", "(", "mark_name", ")", "if", "field_name", "is", "None", ":", "return", "u'$matched.%s'", "%", "(", "mark_name", ",", ")", "else", ":", "validate_safe_string", "(", "field_name", ")", "return", "u'$matched.%s.%s'", "%", "(", "mark_name", ",", "field_name", ")" ]
Return a unicode object with the MATCH representation of this ContextField.
[ "Return", "a", "unicode", "object", "with", "the", "MATCH", "representation", "of", "this", "ContextField", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L350-L361
246,808
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
OutputContextField.validate
def validate(self): """Validate that the OutputContextField is correctly representable.""" if not isinstance(self.location, Location): raise TypeError(u'Expected Location location, got: {} {}'.format( type(self.location).__name__, self.location)) if not self.location.field: raise ValueError(u'Expected Location object that points to a field, got: ' u'{}'.format(self.location)) if not is_graphql_type(self.field_type): raise ValueError(u'Invalid value of "field_type": {}'.format(self.field_type)) stripped_field_type = strip_non_null_from_type(self.field_type) if isinstance(stripped_field_type, GraphQLList): inner_type = strip_non_null_from_type(stripped_field_type.of_type) if GraphQLDate.is_same_type(inner_type) or GraphQLDateTime.is_same_type(inner_type): # This is a compilation error rather than a ValueError as # it can be caused by an invalid GraphQL query on an otherwise valid schema. # In other words, it's an error in writing the GraphQL query, rather than # a programming error within the library. raise GraphQLCompilationError( u'Lists of Date or DateTime cannot currently be represented as ' u'OutputContextField objects: {}'.format(self.field_type))
python
def validate(self): if not isinstance(self.location, Location): raise TypeError(u'Expected Location location, got: {} {}'.format( type(self.location).__name__, self.location)) if not self.location.field: raise ValueError(u'Expected Location object that points to a field, got: ' u'{}'.format(self.location)) if not is_graphql_type(self.field_type): raise ValueError(u'Invalid value of "field_type": {}'.format(self.field_type)) stripped_field_type = strip_non_null_from_type(self.field_type) if isinstance(stripped_field_type, GraphQLList): inner_type = strip_non_null_from_type(stripped_field_type.of_type) if GraphQLDate.is_same_type(inner_type) or GraphQLDateTime.is_same_type(inner_type): # This is a compilation error rather than a ValueError as # it can be caused by an invalid GraphQL query on an otherwise valid schema. # In other words, it's an error in writing the GraphQL query, rather than # a programming error within the library. raise GraphQLCompilationError( u'Lists of Date or DateTime cannot currently be represented as ' u'OutputContextField objects: {}'.format(self.field_type))
[ "def", "validate", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "location", ",", "Location", ")", ":", "raise", "TypeError", "(", "u'Expected Location location, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "location", ")", ".", "__name__", ",", "self", ".", "location", ")", ")", "if", "not", "self", ".", "location", ".", "field", ":", "raise", "ValueError", "(", "u'Expected Location object that points to a field, got: '", "u'{}'", ".", "format", "(", "self", ".", "location", ")", ")", "if", "not", "is_graphql_type", "(", "self", ".", "field_type", ")", ":", "raise", "ValueError", "(", "u'Invalid value of \"field_type\": {}'", ".", "format", "(", "self", ".", "field_type", ")", ")", "stripped_field_type", "=", "strip_non_null_from_type", "(", "self", ".", "field_type", ")", "if", "isinstance", "(", "stripped_field_type", ",", "GraphQLList", ")", ":", "inner_type", "=", "strip_non_null_from_type", "(", "stripped_field_type", ".", "of_type", ")", "if", "GraphQLDate", ".", "is_same_type", "(", "inner_type", ")", "or", "GraphQLDateTime", ".", "is_same_type", "(", "inner_type", ")", ":", "# This is a compilation error rather than a ValueError as", "# it can be caused by an invalid GraphQL query on an otherwise valid schema.", "# In other words, it's an error in writing the GraphQL query, rather than", "# a programming error within the library.", "raise", "GraphQLCompilationError", "(", "u'Lists of Date or DateTime cannot currently be represented as '", "u'OutputContextField objects: {}'", ".", "format", "(", "self", ".", "field_type", ")", ")" ]
Validate that the OutputContextField is correctly representable.
[ "Validate", "that", "the", "OutputContextField", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L404-L427
246,809
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
FoldedContextField.validate
def validate(self): """Validate that the FoldedContextField is correctly representable.""" if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format( type(self.fold_scope_location), self.fold_scope_location)) if self.fold_scope_location.field is None: raise ValueError(u'Expected FoldScopeLocation at a field, but got: {}' .format(self.fold_scope_location)) if self.fold_scope_location.field == COUNT_META_FIELD_NAME: if not GraphQLInt.is_same_type(self.field_type): raise TypeError(u'Expected the _x_count meta-field to be of GraphQLInt type, but ' u'encountered type {} instead: {}' .format(self.field_type, self.fold_scope_location)) else: if not isinstance(self.field_type, GraphQLList): raise ValueError(u'Invalid value of "field_type" for a field that is not ' u'a meta-field, expected a list type but got: {} {}' .format(self.field_type, self.fold_scope_location)) inner_type = strip_non_null_from_type(self.field_type.of_type) if isinstance(inner_type, GraphQLList): raise GraphQLCompilationError( u'Outputting list-valued fields in a @fold context is currently not supported: ' u'{} {}'.format(self.fold_scope_location, self.field_type.of_type))
python
def validate(self): if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format( type(self.fold_scope_location), self.fold_scope_location)) if self.fold_scope_location.field is None: raise ValueError(u'Expected FoldScopeLocation at a field, but got: {}' .format(self.fold_scope_location)) if self.fold_scope_location.field == COUNT_META_FIELD_NAME: if not GraphQLInt.is_same_type(self.field_type): raise TypeError(u'Expected the _x_count meta-field to be of GraphQLInt type, but ' u'encountered type {} instead: {}' .format(self.field_type, self.fold_scope_location)) else: if not isinstance(self.field_type, GraphQLList): raise ValueError(u'Invalid value of "field_type" for a field that is not ' u'a meta-field, expected a list type but got: {} {}' .format(self.field_type, self.fold_scope_location)) inner_type = strip_non_null_from_type(self.field_type.of_type) if isinstance(inner_type, GraphQLList): raise GraphQLCompilationError( u'Outputting list-valued fields in a @fold context is currently not supported: ' u'{} {}'.format(self.fold_scope_location, self.field_type.of_type))
[ "def", "validate", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "fold_scope_location", ",", "FoldScopeLocation", ")", ":", "raise", "TypeError", "(", "u'Expected FoldScopeLocation fold_scope_location, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "fold_scope_location", ")", ",", "self", ".", "fold_scope_location", ")", ")", "if", "self", ".", "fold_scope_location", ".", "field", "is", "None", ":", "raise", "ValueError", "(", "u'Expected FoldScopeLocation at a field, but got: {}'", ".", "format", "(", "self", ".", "fold_scope_location", ")", ")", "if", "self", ".", "fold_scope_location", ".", "field", "==", "COUNT_META_FIELD_NAME", ":", "if", "not", "GraphQLInt", ".", "is_same_type", "(", "self", ".", "field_type", ")", ":", "raise", "TypeError", "(", "u'Expected the _x_count meta-field to be of GraphQLInt type, but '", "u'encountered type {} instead: {}'", ".", "format", "(", "self", ".", "field_type", ",", "self", ".", "fold_scope_location", ")", ")", "else", ":", "if", "not", "isinstance", "(", "self", ".", "field_type", ",", "GraphQLList", ")", ":", "raise", "ValueError", "(", "u'Invalid value of \"field_type\" for a field that is not '", "u'a meta-field, expected a list type but got: {} {}'", ".", "format", "(", "self", ".", "field_type", ",", "self", ".", "fold_scope_location", ")", ")", "inner_type", "=", "strip_non_null_from_type", "(", "self", ".", "field_type", ".", "of_type", ")", "if", "isinstance", "(", "inner_type", ",", "GraphQLList", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Outputting list-valued fields in a @fold context is currently not supported: '", "u'{} {}'", ".", "format", "(", "self", ".", "fold_scope_location", ",", "self", ".", "field_type", ".", "of_type", ")", ")" ]
Validate that the FoldedContextField is correctly representable.
[ "Validate", "that", "the", "FoldedContextField", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L505-L530
246,810
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
FoldCountContextField.validate
def validate(self): """Validate that the FoldCountContextField is correctly representable.""" if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format( type(self.fold_scope_location), self.fold_scope_location)) if self.fold_scope_location.field != COUNT_META_FIELD_NAME: raise AssertionError(u'Unexpected field in the FoldScopeLocation of this ' u'FoldCountContextField object: {} {}' .format(self.fold_scope_location, self))
python
def validate(self): if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u'Expected FoldScopeLocation fold_scope_location, got: {} {}'.format( type(self.fold_scope_location), self.fold_scope_location)) if self.fold_scope_location.field != COUNT_META_FIELD_NAME: raise AssertionError(u'Unexpected field in the FoldScopeLocation of this ' u'FoldCountContextField object: {} {}' .format(self.fold_scope_location, self))
[ "def", "validate", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "fold_scope_location", ",", "FoldScopeLocation", ")", ":", "raise", "TypeError", "(", "u'Expected FoldScopeLocation fold_scope_location, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "fold_scope_location", ")", ",", "self", ".", "fold_scope_location", ")", ")", "if", "self", ".", "fold_scope_location", ".", "field", "!=", "COUNT_META_FIELD_NAME", ":", "raise", "AssertionError", "(", "u'Unexpected field in the FoldScopeLocation of this '", "u'FoldCountContextField object: {} {}'", ".", "format", "(", "self", ".", "fold_scope_location", ",", "self", ")", ")" ]
Validate that the FoldCountContextField is correctly representable.
[ "Validate", "that", "the", "FoldCountContextField", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L596-L605
246,811
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
ContextFieldExistence.validate
def validate(self): """Validate that the ContextFieldExistence is correctly representable.""" if not isinstance(self.location, Location): raise TypeError(u'Expected Location location, got: {} {}'.format( type(self.location).__name__, self.location)) if self.location.field: raise ValueError(u'Expected location to point to a vertex, ' u'but found a field: {}'.format(self.location))
python
def validate(self): if not isinstance(self.location, Location): raise TypeError(u'Expected Location location, got: {} {}'.format( type(self.location).__name__, self.location)) if self.location.field: raise ValueError(u'Expected location to point to a vertex, ' u'but found a field: {}'.format(self.location))
[ "def", "validate", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "location", ",", "Location", ")", ":", "raise", "TypeError", "(", "u'Expected Location location, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "location", ")", ".", "__name__", ",", "self", ".", "location", ")", ")", "if", "self", ".", "location", ".", "field", ":", "raise", "ValueError", "(", "u'Expected location to point to a vertex, '", "u'but found a field: {}'", ".", "format", "(", "self", ".", "location", ")", ")" ]
Validate that the ContextFieldExistence is correctly representable.
[ "Validate", "that", "the", "ContextFieldExistence", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L646-L654
246,812
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
UnaryTransformation.validate
def validate(self): """Validate that the UnaryTransformation is correctly representable.""" _validate_operator_name(self.operator, UnaryTransformation.SUPPORTED_OPERATORS) if not isinstance(self.inner_expression, Expression): raise TypeError(u'Expected Expression inner_expression, got {} {}'.format( type(self.inner_expression).__name__, self.inner_expression))
python
def validate(self): _validate_operator_name(self.operator, UnaryTransformation.SUPPORTED_OPERATORS) if not isinstance(self.inner_expression, Expression): raise TypeError(u'Expected Expression inner_expression, got {} {}'.format( type(self.inner_expression).__name__, self.inner_expression))
[ "def", "validate", "(", "self", ")", ":", "_validate_operator_name", "(", "self", ".", "operator", ",", "UnaryTransformation", ".", "SUPPORTED_OPERATORS", ")", "if", "not", "isinstance", "(", "self", ".", "inner_expression", ",", "Expression", ")", ":", "raise", "TypeError", "(", "u'Expected Expression inner_expression, got {} {}'", ".", "format", "(", "type", "(", "self", ".", "inner_expression", ")", ".", "__name__", ",", "self", ".", "inner_expression", ")", ")" ]
Validate that the UnaryTransformation is correctly representable.
[ "Validate", "that", "the", "UnaryTransformation", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L688-L694
246,813
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
UnaryTransformation.to_match
def to_match(self): """Return a unicode object with the MATCH representation of this UnaryTransformation.""" self.validate() translation_table = { u'size': u'size()', } match_operator = translation_table.get(self.operator) if not match_operator: raise AssertionError(u'Unrecognized operator used: ' u'{} {}'.format(self.operator, self)) template = u'%(inner)s.%(operator)s' args = { 'inner': self.inner_expression.to_match(), 'operator': match_operator, } return template % args
python
def to_match(self): self.validate() translation_table = { u'size': u'size()', } match_operator = translation_table.get(self.operator) if not match_operator: raise AssertionError(u'Unrecognized operator used: ' u'{} {}'.format(self.operator, self)) template = u'%(inner)s.%(operator)s' args = { 'inner': self.inner_expression.to_match(), 'operator': match_operator, } return template % args
[ "def", "to_match", "(", "self", ")", ":", "self", ".", "validate", "(", ")", "translation_table", "=", "{", "u'size'", ":", "u'size()'", ",", "}", "match_operator", "=", "translation_table", ".", "get", "(", "self", ".", "operator", ")", "if", "not", "match_operator", ":", "raise", "AssertionError", "(", "u'Unrecognized operator used: '", "u'{} {}'", ".", "format", "(", "self", ".", "operator", ",", "self", ")", ")", "template", "=", "u'%(inner)s.%(operator)s'", "args", "=", "{", "'inner'", ":", "self", ".", "inner_expression", ".", "to_match", "(", ")", ",", "'operator'", ":", "match_operator", ",", "}", "return", "template", "%", "args" ]
Return a unicode object with the MATCH representation of this UnaryTransformation.
[ "Return", "a", "unicode", "object", "with", "the", "MATCH", "representation", "of", "this", "UnaryTransformation", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L705-L722
246,814
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
BinaryComposition.validate
def validate(self): """Validate that the BinaryComposition is correctly representable.""" _validate_operator_name(self.operator, BinaryComposition.SUPPORTED_OPERATORS) if not isinstance(self.left, Expression): raise TypeError(u'Expected Expression left, got: {} {} {}'.format( type(self.left).__name__, self.left, self)) if not isinstance(self.right, Expression): raise TypeError(u'Expected Expression right, got: {} {}'.format( type(self.right).__name__, self.right))
python
def validate(self): _validate_operator_name(self.operator, BinaryComposition.SUPPORTED_OPERATORS) if not isinstance(self.left, Expression): raise TypeError(u'Expected Expression left, got: {} {} {}'.format( type(self.left).__name__, self.left, self)) if not isinstance(self.right, Expression): raise TypeError(u'Expected Expression right, got: {} {}'.format( type(self.right).__name__, self.right))
[ "def", "validate", "(", "self", ")", ":", "_validate_operator_name", "(", "self", ".", "operator", ",", "BinaryComposition", ".", "SUPPORTED_OPERATORS", ")", "if", "not", "isinstance", "(", "self", ".", "left", ",", "Expression", ")", ":", "raise", "TypeError", "(", "u'Expected Expression left, got: {} {} {}'", ".", "format", "(", "type", "(", "self", ".", "left", ")", ".", "__name__", ",", "self", ".", "left", ",", "self", ")", ")", "if", "not", "isinstance", "(", "self", ".", "right", ",", "Expression", ")", ":", "raise", "TypeError", "(", "u'Expected Expression right, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "right", ")", ".", "__name__", ",", "self", ".", "right", ")", ")" ]
Validate that the BinaryComposition is correctly representable.
[ "Validate", "that", "the", "BinaryComposition", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L769-L779
246,815
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
BinaryComposition.to_match
def to_match(self): """Return a unicode object with the MATCH representation of this BinaryComposition.""" self.validate() # The MATCH versions of some operators require an inverted order of arguments. # pylint: disable=unused-variable regular_operator_format = '(%(left)s %(operator)s %(right)s)' inverted_operator_format = '(%(right)s %(operator)s %(left)s)' # noqa intersects_operator_format = '(%(operator)s(%(left)s, %(right)s).asList().size() > 0)' # pylint: enable=unused-variable # Null literals use 'is/is not' as (in)equality operators, while other values use '=/<>'. if any((isinstance(self.left, Literal) and self.left.value is None, isinstance(self.right, Literal) and self.right.value is None)): translation_table = { u'=': (u'IS', regular_operator_format), u'!=': (u'IS NOT', regular_operator_format), } else: translation_table = { u'=': (u'=', regular_operator_format), u'!=': (u'<>', regular_operator_format), u'>=': (u'>=', regular_operator_format), u'<=': (u'<=', regular_operator_format), u'>': (u'>', regular_operator_format), u'<': (u'<', regular_operator_format), u'+': (u'+', regular_operator_format), u'||': (u'OR', regular_operator_format), u'&&': (u'AND', regular_operator_format), u'contains': (u'CONTAINS', regular_operator_format), u'intersects': (u'intersect', intersects_operator_format), u'has_substring': (None, None), # must be lowered into compatible form using LIKE # MATCH-specific operators u'LIKE': (u'LIKE', regular_operator_format), u'INSTANCEOF': (u'INSTANCEOF', regular_operator_format), } match_operator, format_spec = translation_table.get(self.operator, (None, None)) if not match_operator: raise AssertionError(u'Unrecognized operator used: ' u'{} {}'.format(self.operator, self)) return format_spec % dict(operator=match_operator, left=self.left.to_match(), right=self.right.to_match())
python
def to_match(self): self.validate() # The MATCH versions of some operators require an inverted order of arguments. # pylint: disable=unused-variable regular_operator_format = '(%(left)s %(operator)s %(right)s)' inverted_operator_format = '(%(right)s %(operator)s %(left)s)' # noqa intersects_operator_format = '(%(operator)s(%(left)s, %(right)s).asList().size() > 0)' # pylint: enable=unused-variable # Null literals use 'is/is not' as (in)equality operators, while other values use '=/<>'. if any((isinstance(self.left, Literal) and self.left.value is None, isinstance(self.right, Literal) and self.right.value is None)): translation_table = { u'=': (u'IS', regular_operator_format), u'!=': (u'IS NOT', regular_operator_format), } else: translation_table = { u'=': (u'=', regular_operator_format), u'!=': (u'<>', regular_operator_format), u'>=': (u'>=', regular_operator_format), u'<=': (u'<=', regular_operator_format), u'>': (u'>', regular_operator_format), u'<': (u'<', regular_operator_format), u'+': (u'+', regular_operator_format), u'||': (u'OR', regular_operator_format), u'&&': (u'AND', regular_operator_format), u'contains': (u'CONTAINS', regular_operator_format), u'intersects': (u'intersect', intersects_operator_format), u'has_substring': (None, None), # must be lowered into compatible form using LIKE # MATCH-specific operators u'LIKE': (u'LIKE', regular_operator_format), u'INSTANCEOF': (u'INSTANCEOF', regular_operator_format), } match_operator, format_spec = translation_table.get(self.operator, (None, None)) if not match_operator: raise AssertionError(u'Unrecognized operator used: ' u'{} {}'.format(self.operator, self)) return format_spec % dict(operator=match_operator, left=self.left.to_match(), right=self.right.to_match())
[ "def", "to_match", "(", "self", ")", ":", "self", ".", "validate", "(", ")", "# The MATCH versions of some operators require an inverted order of arguments.", "# pylint: disable=unused-variable", "regular_operator_format", "=", "'(%(left)s %(operator)s %(right)s)'", "inverted_operator_format", "=", "'(%(right)s %(operator)s %(left)s)'", "# noqa", "intersects_operator_format", "=", "'(%(operator)s(%(left)s, %(right)s).asList().size() > 0)'", "# pylint: enable=unused-variable", "# Null literals use 'is/is not' as (in)equality operators, while other values use '=/<>'.", "if", "any", "(", "(", "isinstance", "(", "self", ".", "left", ",", "Literal", ")", "and", "self", ".", "left", ".", "value", "is", "None", ",", "isinstance", "(", "self", ".", "right", ",", "Literal", ")", "and", "self", ".", "right", ".", "value", "is", "None", ")", ")", ":", "translation_table", "=", "{", "u'='", ":", "(", "u'IS'", ",", "regular_operator_format", ")", ",", "u'!='", ":", "(", "u'IS NOT'", ",", "regular_operator_format", ")", ",", "}", "else", ":", "translation_table", "=", "{", "u'='", ":", "(", "u'='", ",", "regular_operator_format", ")", ",", "u'!='", ":", "(", "u'<>'", ",", "regular_operator_format", ")", ",", "u'>='", ":", "(", "u'>='", ",", "regular_operator_format", ")", ",", "u'<='", ":", "(", "u'<='", ",", "regular_operator_format", ")", ",", "u'>'", ":", "(", "u'>'", ",", "regular_operator_format", ")", ",", "u'<'", ":", "(", "u'<'", ",", "regular_operator_format", ")", ",", "u'+'", ":", "(", "u'+'", ",", "regular_operator_format", ")", ",", "u'||'", ":", "(", "u'OR'", ",", "regular_operator_format", ")", ",", "u'&&'", ":", "(", "u'AND'", ",", "regular_operator_format", ")", ",", "u'contains'", ":", "(", "u'CONTAINS'", ",", "regular_operator_format", ")", ",", "u'intersects'", ":", "(", "u'intersect'", ",", "intersects_operator_format", ")", ",", "u'has_substring'", ":", "(", "None", ",", "None", ")", ",", "# must be lowered into compatible form using LIKE", "# MATCH-specific operators", "u'LIKE'", ":", "(", "u'LIKE'", ",", "regular_operator_format", ")", ",", "u'INSTANCEOF'", ":", "(", "u'INSTANCEOF'", ",", "regular_operator_format", ")", ",", "}", "match_operator", ",", "format_spec", "=", "translation_table", ".", "get", "(", "self", ".", "operator", ",", "(", "None", ",", "None", ")", ")", "if", "not", "match_operator", ":", "raise", "AssertionError", "(", "u'Unrecognized operator used: '", "u'{} {}'", ".", "format", "(", "self", ".", "operator", ",", "self", ")", ")", "return", "format_spec", "%", "dict", "(", "operator", "=", "match_operator", ",", "left", "=", "self", ".", "left", ".", "to_match", "(", ")", ",", "right", "=", "self", ".", "right", ".", "to_match", "(", ")", ")" ]
Return a unicode object with the MATCH representation of this BinaryComposition.
[ "Return", "a", "unicode", "object", "with", "the", "MATCH", "representation", "of", "this", "BinaryComposition", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L791-L836
246,816
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
TernaryConditional.validate
def validate(self): """Validate that the TernaryConditional is correctly representable.""" if not isinstance(self.predicate, Expression): raise TypeError(u'Expected Expression predicate, got: {} {}'.format( type(self.predicate).__name__, self.predicate)) if not isinstance(self.if_true, Expression): raise TypeError(u'Expected Expression if_true, got: {} {}'.format( type(self.if_true).__name__, self.if_true)) if not isinstance(self.if_false, Expression): raise TypeError(u'Expected Expression if_false, got: {} {}'.format( type(self.if_false).__name__, self.if_false))
python
def validate(self): if not isinstance(self.predicate, Expression): raise TypeError(u'Expected Expression predicate, got: {} {}'.format( type(self.predicate).__name__, self.predicate)) if not isinstance(self.if_true, Expression): raise TypeError(u'Expected Expression if_true, got: {} {}'.format( type(self.if_true).__name__, self.if_true)) if not isinstance(self.if_false, Expression): raise TypeError(u'Expected Expression if_false, got: {} {}'.format( type(self.if_false).__name__, self.if_false))
[ "def", "validate", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "predicate", ",", "Expression", ")", ":", "raise", "TypeError", "(", "u'Expected Expression predicate, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "predicate", ")", ".", "__name__", ",", "self", ".", "predicate", ")", ")", "if", "not", "isinstance", "(", "self", ".", "if_true", ",", "Expression", ")", ":", "raise", "TypeError", "(", "u'Expected Expression if_true, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "if_true", ")", ".", "__name__", ",", "self", ".", "if_true", ")", ")", "if", "not", "isinstance", "(", "self", ".", "if_false", ",", "Expression", ")", ":", "raise", "TypeError", "(", "u'Expected Expression if_false, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "if_false", ")", ".", "__name__", ",", "self", ".", "if_false", ")", ")" ]
Validate that the TernaryConditional is correctly representable.
[ "Validate", "that", "the", "TernaryConditional", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L893-L903
246,817
kensho-technologies/graphql-compiler
graphql_compiler/compiler/expressions.py
TernaryConditional.to_match
def to_match(self): """Return a unicode object with the MATCH representation of this TernaryConditional.""" self.validate() # For MATCH, an additional validation step is needed -- we currently do not support # emitting MATCH code for TernaryConditional that contains another TernaryConditional # anywhere within the predicate expression. This is because the predicate expression # must be surrounded in quotes, and it is unclear whether nested/escaped quotes would work. def visitor_fn(expression): """Visitor function that ensures the predicate does not contain TernaryConditionals.""" if isinstance(expression, TernaryConditional): raise ValueError(u'Cannot emit MATCH code for TernaryConditional that contains ' u'in its predicate another TernaryConditional: ' u'{} {}'.format(expression, self)) return expression self.predicate.visit_and_update(visitor_fn) format_spec = u'if(eval("%(predicate)s"), %(if_true)s, %(if_false)s)' predicate_string = self.predicate.to_match() if u'"' in predicate_string: raise AssertionError(u'Found a double-quote within the predicate string, this would ' u'have terminated the if(eval()) early and should be fixed: ' u'{} {}'.format(predicate_string, self)) return format_spec % dict(predicate=predicate_string, if_true=self.if_true.to_match(), if_false=self.if_false.to_match())
python
def to_match(self): self.validate() # For MATCH, an additional validation step is needed -- we currently do not support # emitting MATCH code for TernaryConditional that contains another TernaryConditional # anywhere within the predicate expression. This is because the predicate expression # must be surrounded in quotes, and it is unclear whether nested/escaped quotes would work. def visitor_fn(expression): """Visitor function that ensures the predicate does not contain TernaryConditionals.""" if isinstance(expression, TernaryConditional): raise ValueError(u'Cannot emit MATCH code for TernaryConditional that contains ' u'in its predicate another TernaryConditional: ' u'{} {}'.format(expression, self)) return expression self.predicate.visit_and_update(visitor_fn) format_spec = u'if(eval("%(predicate)s"), %(if_true)s, %(if_false)s)' predicate_string = self.predicate.to_match() if u'"' in predicate_string: raise AssertionError(u'Found a double-quote within the predicate string, this would ' u'have terminated the if(eval()) early and should be fixed: ' u'{} {}'.format(predicate_string, self)) return format_spec % dict(predicate=predicate_string, if_true=self.if_true.to_match(), if_false=self.if_false.to_match())
[ "def", "to_match", "(", "self", ")", ":", "self", ".", "validate", "(", ")", "# For MATCH, an additional validation step is needed -- we currently do not support", "# emitting MATCH code for TernaryConditional that contains another TernaryConditional", "# anywhere within the predicate expression. This is because the predicate expression", "# must be surrounded in quotes, and it is unclear whether nested/escaped quotes would work.", "def", "visitor_fn", "(", "expression", ")", ":", "\"\"\"Visitor function that ensures the predicate does not contain TernaryConditionals.\"\"\"", "if", "isinstance", "(", "expression", ",", "TernaryConditional", ")", ":", "raise", "ValueError", "(", "u'Cannot emit MATCH code for TernaryConditional that contains '", "u'in its predicate another TernaryConditional: '", "u'{} {}'", ".", "format", "(", "expression", ",", "self", ")", ")", "return", "expression", "self", ".", "predicate", ".", "visit_and_update", "(", "visitor_fn", ")", "format_spec", "=", "u'if(eval(\"%(predicate)s\"), %(if_true)s, %(if_false)s)'", "predicate_string", "=", "self", ".", "predicate", ".", "to_match", "(", ")", "if", "u'\"'", "in", "predicate_string", ":", "raise", "AssertionError", "(", "u'Found a double-quote within the predicate string, this would '", "u'have terminated the if(eval()) early and should be fixed: '", "u'{} {}'", ".", "format", "(", "predicate_string", ",", "self", ")", ")", "return", "format_spec", "%", "dict", "(", "predicate", "=", "predicate_string", ",", "if_true", "=", "self", ".", "if_true", ".", "to_match", "(", ")", ",", "if_false", "=", "self", ".", "if_false", ".", "to_match", "(", ")", ")" ]
Return a unicode object with the MATCH representation of this TernaryConditional.
[ "Return", "a", "unicode", "object", "with", "the", "MATCH", "representation", "of", "this", "TernaryConditional", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/expressions.py#L918-L945
246,818
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
sanity_check_ir_blocks_from_frontend
def sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table): """Assert that IR blocks originating from the frontend do not have nonsensical structure. Args: ir_blocks: list of BasicBlocks representing the IR to sanity-check Raises: AssertionError, if the IR has unexpected structure. If the IR produced by the front-end cannot be successfully and correctly used to generate MATCH or Gremlin due to a bug, this is the method that should catch the problem. """ if not ir_blocks: raise AssertionError(u'Received no ir_blocks: {}'.format(ir_blocks)) _sanity_check_fold_scope_locations_are_unique(ir_blocks) _sanity_check_no_nested_folds(ir_blocks) _sanity_check_query_root_block(ir_blocks) _sanity_check_output_source_follower_blocks(ir_blocks) _sanity_check_block_pairwise_constraints(ir_blocks) _sanity_check_mark_location_preceding_optional_traverse(ir_blocks) _sanity_check_every_location_is_marked(ir_blocks) _sanity_check_coerce_type_outside_of_fold(ir_blocks) _sanity_check_all_marked_locations_are_registered(ir_blocks, query_metadata_table) _sanity_check_registered_locations_parent_locations(query_metadata_table)
python
def sanity_check_ir_blocks_from_frontend(ir_blocks, query_metadata_table): if not ir_blocks: raise AssertionError(u'Received no ir_blocks: {}'.format(ir_blocks)) _sanity_check_fold_scope_locations_are_unique(ir_blocks) _sanity_check_no_nested_folds(ir_blocks) _sanity_check_query_root_block(ir_blocks) _sanity_check_output_source_follower_blocks(ir_blocks) _sanity_check_block_pairwise_constraints(ir_blocks) _sanity_check_mark_location_preceding_optional_traverse(ir_blocks) _sanity_check_every_location_is_marked(ir_blocks) _sanity_check_coerce_type_outside_of_fold(ir_blocks) _sanity_check_all_marked_locations_are_registered(ir_blocks, query_metadata_table) _sanity_check_registered_locations_parent_locations(query_metadata_table)
[ "def", "sanity_check_ir_blocks_from_frontend", "(", "ir_blocks", ",", "query_metadata_table", ")", ":", "if", "not", "ir_blocks", ":", "raise", "AssertionError", "(", "u'Received no ir_blocks: {}'", ".", "format", "(", "ir_blocks", ")", ")", "_sanity_check_fold_scope_locations_are_unique", "(", "ir_blocks", ")", "_sanity_check_no_nested_folds", "(", "ir_blocks", ")", "_sanity_check_query_root_block", "(", "ir_blocks", ")", "_sanity_check_output_source_follower_blocks", "(", "ir_blocks", ")", "_sanity_check_block_pairwise_constraints", "(", "ir_blocks", ")", "_sanity_check_mark_location_preceding_optional_traverse", "(", "ir_blocks", ")", "_sanity_check_every_location_is_marked", "(", "ir_blocks", ")", "_sanity_check_coerce_type_outside_of_fold", "(", "ir_blocks", ")", "_sanity_check_all_marked_locations_are_registered", "(", "ir_blocks", ",", "query_metadata_table", ")", "_sanity_check_registered_locations_parent_locations", "(", "query_metadata_table", ")" ]
Assert that IR blocks originating from the frontend do not have nonsensical structure. Args: ir_blocks: list of BasicBlocks representing the IR to sanity-check Raises: AssertionError, if the IR has unexpected structure. If the IR produced by the front-end cannot be successfully and correctly used to generate MATCH or Gremlin due to a bug, this is the method that should catch the problem.
[ "Assert", "that", "IR", "blocks", "originating", "from", "the", "frontend", "do", "not", "have", "nonsensical", "structure", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L13-L36
246,819
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
_sanity_check_registered_locations_parent_locations
def _sanity_check_registered_locations_parent_locations(query_metadata_table): """Assert that all registered locations' parent locations are also registered.""" for location, location_info in query_metadata_table.registered_locations: if (location != query_metadata_table.root_location and not query_metadata_table.root_location.is_revisited_at(location)): # If the location is not the root location and is not a revisit of the root, # then it must have a parent location. if location_info.parent_location is None: raise AssertionError(u'Found a location that is not the root location of the query ' u'or a revisit of the root, but does not have a parent: ' u'{} {}'.format(location, location_info)) if location_info.parent_location is not None: # Make sure the parent_location is also registered. # If the location is not registered, the following line will raise an error. query_metadata_table.get_location_info(location_info.parent_location)
python
def _sanity_check_registered_locations_parent_locations(query_metadata_table): for location, location_info in query_metadata_table.registered_locations: if (location != query_metadata_table.root_location and not query_metadata_table.root_location.is_revisited_at(location)): # If the location is not the root location and is not a revisit of the root, # then it must have a parent location. if location_info.parent_location is None: raise AssertionError(u'Found a location that is not the root location of the query ' u'or a revisit of the root, but does not have a parent: ' u'{} {}'.format(location, location_info)) if location_info.parent_location is not None: # Make sure the parent_location is also registered. # If the location is not registered, the following line will raise an error. query_metadata_table.get_location_info(location_info.parent_location)
[ "def", "_sanity_check_registered_locations_parent_locations", "(", "query_metadata_table", ")", ":", "for", "location", ",", "location_info", "in", "query_metadata_table", ".", "registered_locations", ":", "if", "(", "location", "!=", "query_metadata_table", ".", "root_location", "and", "not", "query_metadata_table", ".", "root_location", ".", "is_revisited_at", "(", "location", ")", ")", ":", "# If the location is not the root location and is not a revisit of the root,", "# then it must have a parent location.", "if", "location_info", ".", "parent_location", "is", "None", ":", "raise", "AssertionError", "(", "u'Found a location that is not the root location of the query '", "u'or a revisit of the root, but does not have a parent: '", "u'{} {}'", ".", "format", "(", "location", ",", "location_info", ")", ")", "if", "location_info", ".", "parent_location", "is", "not", "None", ":", "# Make sure the parent_location is also registered.", "# If the location is not registered, the following line will raise an error.", "query_metadata_table", ".", "get_location_info", "(", "location_info", ".", "parent_location", ")" ]
Assert that all registered locations' parent locations are also registered.
[ "Assert", "that", "all", "registered", "locations", "parent", "locations", "are", "also", "registered", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L39-L54
246,820
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
_sanity_check_all_marked_locations_are_registered
def _sanity_check_all_marked_locations_are_registered(ir_blocks, query_metadata_table): """Assert that all locations in MarkLocation blocks have registered and valid metadata.""" # Grab all the registered locations, then make sure that: # - Any location that appears in a MarkLocation block is also registered. # - There are no registered locations that do not appear in a MarkLocation block. registered_locations = { location for location, _ in query_metadata_table.registered_locations } ir_encountered_locations = { block.location for block in ir_blocks if isinstance(block, MarkLocation) } unregistered_locations = ir_encountered_locations - registered_locations unencountered_locations = registered_locations - ir_encountered_locations if unregistered_locations: raise AssertionError(u'IR blocks unexpectedly contain locations not registered in the ' u'QueryMetadataTable: {}'.format(unregistered_locations)) if unencountered_locations: raise AssertionError(u'QueryMetadataTable unexpectedly contains registered locations that ' u'never appear in the IR blocks: {}'.format(unencountered_locations))
python
def _sanity_check_all_marked_locations_are_registered(ir_blocks, query_metadata_table): # Grab all the registered locations, then make sure that: # - Any location that appears in a MarkLocation block is also registered. # - There are no registered locations that do not appear in a MarkLocation block. registered_locations = { location for location, _ in query_metadata_table.registered_locations } ir_encountered_locations = { block.location for block in ir_blocks if isinstance(block, MarkLocation) } unregistered_locations = ir_encountered_locations - registered_locations unencountered_locations = registered_locations - ir_encountered_locations if unregistered_locations: raise AssertionError(u'IR blocks unexpectedly contain locations not registered in the ' u'QueryMetadataTable: {}'.format(unregistered_locations)) if unencountered_locations: raise AssertionError(u'QueryMetadataTable unexpectedly contains registered locations that ' u'never appear in the IR blocks: {}'.format(unencountered_locations))
[ "def", "_sanity_check_all_marked_locations_are_registered", "(", "ir_blocks", ",", "query_metadata_table", ")", ":", "# Grab all the registered locations, then make sure that:", "# - Any location that appears in a MarkLocation block is also registered.", "# - There are no registered locations that do not appear in a MarkLocation block.", "registered_locations", "=", "{", "location", "for", "location", ",", "_", "in", "query_metadata_table", ".", "registered_locations", "}", "ir_encountered_locations", "=", "{", "block", ".", "location", "for", "block", "in", "ir_blocks", "if", "isinstance", "(", "block", ",", "MarkLocation", ")", "}", "unregistered_locations", "=", "ir_encountered_locations", "-", "registered_locations", "unencountered_locations", "=", "registered_locations", "-", "ir_encountered_locations", "if", "unregistered_locations", ":", "raise", "AssertionError", "(", "u'IR blocks unexpectedly contain locations not registered in the '", "u'QueryMetadataTable: {}'", ".", "format", "(", "unregistered_locations", ")", ")", "if", "unencountered_locations", ":", "raise", "AssertionError", "(", "u'QueryMetadataTable unexpectedly contains registered locations that '", "u'never appear in the IR blocks: {}'", ".", "format", "(", "unencountered_locations", ")", ")" ]
Assert that all locations in MarkLocation blocks have registered and valid metadata.
[ "Assert", "that", "all", "locations", "in", "MarkLocation", "blocks", "have", "registered", "and", "valid", "metadata", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L57-L80
246,821
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
_sanity_check_fold_scope_locations_are_unique
def _sanity_check_fold_scope_locations_are_unique(ir_blocks): """Assert that every FoldScopeLocation that exists on a Fold block is unique.""" observed_locations = dict() for block in ir_blocks: if isinstance(block, Fold): alternate = observed_locations.get(block.fold_scope_location, None) if alternate is not None: raise AssertionError(u'Found two Fold blocks with identical FoldScopeLocations: ' u'{} {} {}'.format(alternate, block, ir_blocks)) observed_locations[block.fold_scope_location] = block
python
def _sanity_check_fold_scope_locations_are_unique(ir_blocks): observed_locations = dict() for block in ir_blocks: if isinstance(block, Fold): alternate = observed_locations.get(block.fold_scope_location, None) if alternate is not None: raise AssertionError(u'Found two Fold blocks with identical FoldScopeLocations: ' u'{} {} {}'.format(alternate, block, ir_blocks)) observed_locations[block.fold_scope_location] = block
[ "def", "_sanity_check_fold_scope_locations_are_unique", "(", "ir_blocks", ")", ":", "observed_locations", "=", "dict", "(", ")", "for", "block", "in", "ir_blocks", ":", "if", "isinstance", "(", "block", ",", "Fold", ")", ":", "alternate", "=", "observed_locations", ".", "get", "(", "block", ".", "fold_scope_location", ",", "None", ")", "if", "alternate", "is", "not", "None", ":", "raise", "AssertionError", "(", "u'Found two Fold blocks with identical FoldScopeLocations: '", "u'{} {} {}'", ".", "format", "(", "alternate", ",", "block", ",", "ir_blocks", ")", ")", "observed_locations", "[", "block", ".", "fold_scope_location", "]", "=", "block" ]
Assert that every FoldScopeLocation that exists on a Fold block is unique.
[ "Assert", "that", "every", "FoldScopeLocation", "that", "exists", "on", "a", "Fold", "block", "is", "unique", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L83-L92
246,822
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
_sanity_check_no_nested_folds
def _sanity_check_no_nested_folds(ir_blocks): """Assert that there are no nested Fold contexts, and that every Fold has a matching Unfold.""" fold_seen = False for block in ir_blocks: if isinstance(block, Fold): if fold_seen: raise AssertionError(u'Found a nested Fold contexts: {}'.format(ir_blocks)) else: fold_seen = True elif isinstance(block, Unfold): if not fold_seen: raise AssertionError(u'Found an Unfold block without a matching Fold: ' u'{}'.format(ir_blocks)) else: fold_seen = False
python
def _sanity_check_no_nested_folds(ir_blocks): fold_seen = False for block in ir_blocks: if isinstance(block, Fold): if fold_seen: raise AssertionError(u'Found a nested Fold contexts: {}'.format(ir_blocks)) else: fold_seen = True elif isinstance(block, Unfold): if not fold_seen: raise AssertionError(u'Found an Unfold block without a matching Fold: ' u'{}'.format(ir_blocks)) else: fold_seen = False
[ "def", "_sanity_check_no_nested_folds", "(", "ir_blocks", ")", ":", "fold_seen", "=", "False", "for", "block", "in", "ir_blocks", ":", "if", "isinstance", "(", "block", ",", "Fold", ")", ":", "if", "fold_seen", ":", "raise", "AssertionError", "(", "u'Found a nested Fold contexts: {}'", ".", "format", "(", "ir_blocks", ")", ")", "else", ":", "fold_seen", "=", "True", "elif", "isinstance", "(", "block", ",", "Unfold", ")", ":", "if", "not", "fold_seen", ":", "raise", "AssertionError", "(", "u'Found an Unfold block without a matching Fold: '", "u'{}'", ".", "format", "(", "ir_blocks", ")", ")", "else", ":", "fold_seen", "=", "False" ]
Assert that there are no nested Fold contexts, and that every Fold has a matching Unfold.
[ "Assert", "that", "there", "are", "no", "nested", "Fold", "contexts", "and", "that", "every", "Fold", "has", "a", "matching", "Unfold", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L95-L109
246,823
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
_sanity_check_query_root_block
def _sanity_check_query_root_block(ir_blocks): """Assert that QueryRoot is always the first block, and only the first block.""" if not isinstance(ir_blocks[0], QueryRoot): raise AssertionError(u'The first block was not QueryRoot: {}'.format(ir_blocks)) for block in ir_blocks[1:]: if isinstance(block, QueryRoot): raise AssertionError(u'Found QueryRoot after the first block: {}'.format(ir_blocks))
python
def _sanity_check_query_root_block(ir_blocks): if not isinstance(ir_blocks[0], QueryRoot): raise AssertionError(u'The first block was not QueryRoot: {}'.format(ir_blocks)) for block in ir_blocks[1:]: if isinstance(block, QueryRoot): raise AssertionError(u'Found QueryRoot after the first block: {}'.format(ir_blocks))
[ "def", "_sanity_check_query_root_block", "(", "ir_blocks", ")", ":", "if", "not", "isinstance", "(", "ir_blocks", "[", "0", "]", ",", "QueryRoot", ")", ":", "raise", "AssertionError", "(", "u'The first block was not QueryRoot: {}'", ".", "format", "(", "ir_blocks", ")", ")", "for", "block", "in", "ir_blocks", "[", "1", ":", "]", ":", "if", "isinstance", "(", "block", ",", "QueryRoot", ")", ":", "raise", "AssertionError", "(", "u'Found QueryRoot after the first block: {}'", ".", "format", "(", "ir_blocks", ")", ")" ]
Assert that QueryRoot is always the first block, and only the first block.
[ "Assert", "that", "QueryRoot", "is", "always", "the", "first", "block", "and", "only", "the", "first", "block", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L112-L118
246,824
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
_sanity_check_construct_result_block
def _sanity_check_construct_result_block(ir_blocks): """Assert that ConstructResult is always the last block, and only the last block.""" if not isinstance(ir_blocks[-1], ConstructResult): raise AssertionError(u'The last block was not ConstructResult: {}'.format(ir_blocks)) for block in ir_blocks[:-1]: if isinstance(block, ConstructResult): raise AssertionError(u'Found ConstructResult before the last block: ' u'{}'.format(ir_blocks))
python
def _sanity_check_construct_result_block(ir_blocks): if not isinstance(ir_blocks[-1], ConstructResult): raise AssertionError(u'The last block was not ConstructResult: {}'.format(ir_blocks)) for block in ir_blocks[:-1]: if isinstance(block, ConstructResult): raise AssertionError(u'Found ConstructResult before the last block: ' u'{}'.format(ir_blocks))
[ "def", "_sanity_check_construct_result_block", "(", "ir_blocks", ")", ":", "if", "not", "isinstance", "(", "ir_blocks", "[", "-", "1", "]", ",", "ConstructResult", ")", ":", "raise", "AssertionError", "(", "u'The last block was not ConstructResult: {}'", ".", "format", "(", "ir_blocks", ")", ")", "for", "block", "in", "ir_blocks", "[", ":", "-", "1", "]", ":", "if", "isinstance", "(", "block", ",", "ConstructResult", ")", ":", "raise", "AssertionError", "(", "u'Found ConstructResult before the last block: '", "u'{}'", ".", "format", "(", "ir_blocks", ")", ")" ]
Assert that ConstructResult is always the last block, and only the last block.
[ "Assert", "that", "ConstructResult", "is", "always", "the", "last", "block", "and", "only", "the", "last", "block", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L121-L128
246,825
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
_sanity_check_block_pairwise_constraints
def _sanity_check_block_pairwise_constraints(ir_blocks): """Assert that adjacent blocks obey all invariants.""" for first_block, second_block in pairwise(ir_blocks): # Always Filter before MarkLocation, never after. if isinstance(first_block, MarkLocation) and isinstance(second_block, Filter): raise AssertionError(u'Found Filter after MarkLocation block: {}'.format(ir_blocks)) # There's no point in marking the same location twice in a row. if isinstance(first_block, MarkLocation) and isinstance(second_block, MarkLocation): raise AssertionError(u'Found consecutive MarkLocation blocks: {}'.format(ir_blocks)) # Traverse blocks with optional=True are immediately followed # by a MarkLocation, CoerceType or Filter block. if isinstance(first_block, Traverse) and first_block.optional: if not isinstance(second_block, (MarkLocation, CoerceType, Filter)): raise AssertionError(u'Expected MarkLocation, CoerceType or Filter after Traverse ' u'with optional=True. Found: {}'.format(ir_blocks)) # Backtrack blocks with optional=True are immediately followed by a MarkLocation block. if isinstance(first_block, Backtrack) and first_block.optional: if not isinstance(second_block, MarkLocation): raise AssertionError(u'Expected MarkLocation after Backtrack with optional=True, ' u'but none was found: {}'.format(ir_blocks)) # Recurse blocks are immediately preceded by a MarkLocation or Backtrack block. if isinstance(second_block, Recurse): if not (isinstance(first_block, MarkLocation) or isinstance(first_block, Backtrack)): raise AssertionError(u'Expected MarkLocation or Backtrack before Recurse, but none ' u'was found: {}'.format(ir_blocks))
python
def _sanity_check_block_pairwise_constraints(ir_blocks): for first_block, second_block in pairwise(ir_blocks): # Always Filter before MarkLocation, never after. if isinstance(first_block, MarkLocation) and isinstance(second_block, Filter): raise AssertionError(u'Found Filter after MarkLocation block: {}'.format(ir_blocks)) # There's no point in marking the same location twice in a row. if isinstance(first_block, MarkLocation) and isinstance(second_block, MarkLocation): raise AssertionError(u'Found consecutive MarkLocation blocks: {}'.format(ir_blocks)) # Traverse blocks with optional=True are immediately followed # by a MarkLocation, CoerceType or Filter block. if isinstance(first_block, Traverse) and first_block.optional: if not isinstance(second_block, (MarkLocation, CoerceType, Filter)): raise AssertionError(u'Expected MarkLocation, CoerceType or Filter after Traverse ' u'with optional=True. Found: {}'.format(ir_blocks)) # Backtrack blocks with optional=True are immediately followed by a MarkLocation block. if isinstance(first_block, Backtrack) and first_block.optional: if not isinstance(second_block, MarkLocation): raise AssertionError(u'Expected MarkLocation after Backtrack with optional=True, ' u'but none was found: {}'.format(ir_blocks)) # Recurse blocks are immediately preceded by a MarkLocation or Backtrack block. if isinstance(second_block, Recurse): if not (isinstance(first_block, MarkLocation) or isinstance(first_block, Backtrack)): raise AssertionError(u'Expected MarkLocation or Backtrack before Recurse, but none ' u'was found: {}'.format(ir_blocks))
[ "def", "_sanity_check_block_pairwise_constraints", "(", "ir_blocks", ")", ":", "for", "first_block", ",", "second_block", "in", "pairwise", "(", "ir_blocks", ")", ":", "# Always Filter before MarkLocation, never after.", "if", "isinstance", "(", "first_block", ",", "MarkLocation", ")", "and", "isinstance", "(", "second_block", ",", "Filter", ")", ":", "raise", "AssertionError", "(", "u'Found Filter after MarkLocation block: {}'", ".", "format", "(", "ir_blocks", ")", ")", "# There's no point in marking the same location twice in a row.", "if", "isinstance", "(", "first_block", ",", "MarkLocation", ")", "and", "isinstance", "(", "second_block", ",", "MarkLocation", ")", ":", "raise", "AssertionError", "(", "u'Found consecutive MarkLocation blocks: {}'", ".", "format", "(", "ir_blocks", ")", ")", "# Traverse blocks with optional=True are immediately followed", "# by a MarkLocation, CoerceType or Filter block.", "if", "isinstance", "(", "first_block", ",", "Traverse", ")", "and", "first_block", ".", "optional", ":", "if", "not", "isinstance", "(", "second_block", ",", "(", "MarkLocation", ",", "CoerceType", ",", "Filter", ")", ")", ":", "raise", "AssertionError", "(", "u'Expected MarkLocation, CoerceType or Filter after Traverse '", "u'with optional=True. Found: {}'", ".", "format", "(", "ir_blocks", ")", ")", "# Backtrack blocks with optional=True are immediately followed by a MarkLocation block.", "if", "isinstance", "(", "first_block", ",", "Backtrack", ")", "and", "first_block", ".", "optional", ":", "if", "not", "isinstance", "(", "second_block", ",", "MarkLocation", ")", ":", "raise", "AssertionError", "(", "u'Expected MarkLocation after Backtrack with optional=True, '", "u'but none was found: {}'", ".", "format", "(", "ir_blocks", ")", ")", "# Recurse blocks are immediately preceded by a MarkLocation or Backtrack block.", "if", "isinstance", "(", "second_block", ",", "Recurse", ")", ":", "if", "not", "(", "isinstance", "(", "first_block", ",", "MarkLocation", ")", "or", "isinstance", "(", "first_block", ",", "Backtrack", ")", ")", ":", "raise", "AssertionError", "(", "u'Expected MarkLocation or Backtrack before Recurse, but none '", "u'was found: {}'", ".", "format", "(", "ir_blocks", ")", ")" ]
Assert that adjacent blocks obey all invariants.
[ "Assert", "that", "adjacent", "blocks", "obey", "all", "invariants", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L144-L172
246,826
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
_sanity_check_mark_location_preceding_optional_traverse
def _sanity_check_mark_location_preceding_optional_traverse(ir_blocks): """Assert that optional Traverse blocks are preceded by a MarkLocation.""" # Once all fold blocks are removed, each optional Traverse must have # a MarkLocation block immediately before it. _, new_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) for first_block, second_block in pairwise(new_ir_blocks): # Traverse blocks with optional=True are immediately preceded by a MarkLocation block. if isinstance(second_block, Traverse) and second_block.optional: if not isinstance(first_block, MarkLocation): raise AssertionError(u'Expected MarkLocation before Traverse with optional=True, ' u'but none was found: {}'.format(ir_blocks))
python
def _sanity_check_mark_location_preceding_optional_traverse(ir_blocks): # Once all fold blocks are removed, each optional Traverse must have # a MarkLocation block immediately before it. _, new_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) for first_block, second_block in pairwise(new_ir_blocks): # Traverse blocks with optional=True are immediately preceded by a MarkLocation block. if isinstance(second_block, Traverse) and second_block.optional: if not isinstance(first_block, MarkLocation): raise AssertionError(u'Expected MarkLocation before Traverse with optional=True, ' u'but none was found: {}'.format(ir_blocks))
[ "def", "_sanity_check_mark_location_preceding_optional_traverse", "(", "ir_blocks", ")", ":", "# Once all fold blocks are removed, each optional Traverse must have", "# a MarkLocation block immediately before it.", "_", ",", "new_ir_blocks", "=", "extract_folds_from_ir_blocks", "(", "ir_blocks", ")", "for", "first_block", ",", "second_block", "in", "pairwise", "(", "new_ir_blocks", ")", ":", "# Traverse blocks with optional=True are immediately preceded by a MarkLocation block.", "if", "isinstance", "(", "second_block", ",", "Traverse", ")", "and", "second_block", ".", "optional", ":", "if", "not", "isinstance", "(", "first_block", ",", "MarkLocation", ")", ":", "raise", "AssertionError", "(", "u'Expected MarkLocation before Traverse with optional=True, '", "u'but none was found: {}'", ".", "format", "(", "ir_blocks", ")", ")" ]
Assert that optional Traverse blocks are preceded by a MarkLocation.
[ "Assert", "that", "optional", "Traverse", "blocks", "are", "preceded", "by", "a", "MarkLocation", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L175-L185
246,827
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
_sanity_check_every_location_is_marked
def _sanity_check_every_location_is_marked(ir_blocks): """Ensure that every new location is marked with a MarkLocation block.""" # Exactly one MarkLocation block is found between any block that starts an interval of blocks # that all affect the same query position, and the first subsequent block that affects a # different position in the query. Such intervals include the following examples: # - from Fold to Unfold # - from QueryRoot to Traverse/Recurse # - from one Traverse to the next Traverse # - from Traverse to Backtrack found_start_block = False mark_location_blocks_count = 0 start_interval_types = (QueryRoot, Traverse, Recurse, Fold) end_interval_types = (Backtrack, ConstructResult, Recurse, Traverse, Unfold) for block in ir_blocks: # Terminate started intervals before opening new ones. if isinstance(block, end_interval_types) and found_start_block: found_start_block = False if mark_location_blocks_count != 1: raise AssertionError(u'Expected 1 MarkLocation block between traversals, found: ' u'{} {}'.format(mark_location_blocks_count, ir_blocks)) # Now consider opening new intervals or processing MarkLocation blocks. if isinstance(block, MarkLocation): mark_location_blocks_count += 1 elif isinstance(block, start_interval_types): found_start_block = True mark_location_blocks_count = 0
python
def _sanity_check_every_location_is_marked(ir_blocks): # Exactly one MarkLocation block is found between any block that starts an interval of blocks # that all affect the same query position, and the first subsequent block that affects a # different position in the query. Such intervals include the following examples: # - from Fold to Unfold # - from QueryRoot to Traverse/Recurse # - from one Traverse to the next Traverse # - from Traverse to Backtrack found_start_block = False mark_location_blocks_count = 0 start_interval_types = (QueryRoot, Traverse, Recurse, Fold) end_interval_types = (Backtrack, ConstructResult, Recurse, Traverse, Unfold) for block in ir_blocks: # Terminate started intervals before opening new ones. if isinstance(block, end_interval_types) and found_start_block: found_start_block = False if mark_location_blocks_count != 1: raise AssertionError(u'Expected 1 MarkLocation block between traversals, found: ' u'{} {}'.format(mark_location_blocks_count, ir_blocks)) # Now consider opening new intervals or processing MarkLocation blocks. if isinstance(block, MarkLocation): mark_location_blocks_count += 1 elif isinstance(block, start_interval_types): found_start_block = True mark_location_blocks_count = 0
[ "def", "_sanity_check_every_location_is_marked", "(", "ir_blocks", ")", ":", "# Exactly one MarkLocation block is found between any block that starts an interval of blocks", "# that all affect the same query position, and the first subsequent block that affects a", "# different position in the query. Such intervals include the following examples:", "# - from Fold to Unfold", "# - from QueryRoot to Traverse/Recurse", "# - from one Traverse to the next Traverse", "# - from Traverse to Backtrack", "found_start_block", "=", "False", "mark_location_blocks_count", "=", "0", "start_interval_types", "=", "(", "QueryRoot", ",", "Traverse", ",", "Recurse", ",", "Fold", ")", "end_interval_types", "=", "(", "Backtrack", ",", "ConstructResult", ",", "Recurse", ",", "Traverse", ",", "Unfold", ")", "for", "block", "in", "ir_blocks", ":", "# Terminate started intervals before opening new ones.", "if", "isinstance", "(", "block", ",", "end_interval_types", ")", "and", "found_start_block", ":", "found_start_block", "=", "False", "if", "mark_location_blocks_count", "!=", "1", ":", "raise", "AssertionError", "(", "u'Expected 1 MarkLocation block between traversals, found: '", "u'{} {}'", ".", "format", "(", "mark_location_blocks_count", ",", "ir_blocks", ")", ")", "# Now consider opening new intervals or processing MarkLocation blocks.", "if", "isinstance", "(", "block", ",", "MarkLocation", ")", ":", "mark_location_blocks_count", "+=", "1", "elif", "isinstance", "(", "block", ",", "start_interval_types", ")", ":", "found_start_block", "=", "True", "mark_location_blocks_count", "=", "0" ]
Ensure that every new location is marked with a MarkLocation block.
[ "Ensure", "that", "every", "new", "location", "is", "marked", "with", "a", "MarkLocation", "block", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L188-L216
246,828
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_sanity_checks.py
_sanity_check_coerce_type_outside_of_fold
def _sanity_check_coerce_type_outside_of_fold(ir_blocks): """Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block.""" is_in_fold = False for first_block, second_block in pairwise(ir_blocks): if isinstance(first_block, Fold): is_in_fold = True if not is_in_fold and isinstance(first_block, CoerceType): if not isinstance(second_block, (MarkLocation, Filter)): raise AssertionError(u'Expected MarkLocation or Filter after CoerceType, ' u'but none was found: {}'.format(ir_blocks)) if isinstance(second_block, Unfold): is_in_fold = False
python
def _sanity_check_coerce_type_outside_of_fold(ir_blocks): is_in_fold = False for first_block, second_block in pairwise(ir_blocks): if isinstance(first_block, Fold): is_in_fold = True if not is_in_fold and isinstance(first_block, CoerceType): if not isinstance(second_block, (MarkLocation, Filter)): raise AssertionError(u'Expected MarkLocation or Filter after CoerceType, ' u'but none was found: {}'.format(ir_blocks)) if isinstance(second_block, Unfold): is_in_fold = False
[ "def", "_sanity_check_coerce_type_outside_of_fold", "(", "ir_blocks", ")", ":", "is_in_fold", "=", "False", "for", "first_block", ",", "second_block", "in", "pairwise", "(", "ir_blocks", ")", ":", "if", "isinstance", "(", "first_block", ",", "Fold", ")", ":", "is_in_fold", "=", "True", "if", "not", "is_in_fold", "and", "isinstance", "(", "first_block", ",", "CoerceType", ")", ":", "if", "not", "isinstance", "(", "second_block", ",", "(", "MarkLocation", ",", "Filter", ")", ")", ":", "raise", "AssertionError", "(", "u'Expected MarkLocation or Filter after CoerceType, '", "u'but none was found: {}'", ".", "format", "(", "ir_blocks", ")", ")", "if", "isinstance", "(", "second_block", ",", "Unfold", ")", ":", "is_in_fold", "=", "False" ]
Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block.
[ "Ensure", "that", "CoerceType", "not", "in", "a" ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_sanity_checks.py#L219-L232
246,829
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/schema_properties.py
validate_supported_property_type_id
def validate_supported_property_type_id(property_name, property_type_id): """Ensure that the given property type_id is supported by the graph.""" if property_type_id not in PROPERTY_TYPE_ID_TO_NAME: raise AssertionError(u'Property "{}" has unsupported property type id: ' u'{}'.format(property_name, property_type_id))
python
def validate_supported_property_type_id(property_name, property_type_id): if property_type_id not in PROPERTY_TYPE_ID_TO_NAME: raise AssertionError(u'Property "{}" has unsupported property type id: ' u'{}'.format(property_name, property_type_id))
[ "def", "validate_supported_property_type_id", "(", "property_name", ",", "property_type_id", ")", ":", "if", "property_type_id", "not", "in", "PROPERTY_TYPE_ID_TO_NAME", ":", "raise", "AssertionError", "(", "u'Property \"{}\" has unsupported property type id: '", "u'{}'", ".", "format", "(", "property_name", ",", "property_type_id", ")", ")" ]
Ensure that the given property type_id is supported by the graph.
[ "Ensure", "that", "the", "given", "property", "type_id", "is", "supported", "by", "the", "graph", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_properties.py#L96-L100
246,830
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/schema_properties.py
_parse_bool_default_value
def _parse_bool_default_value(property_name, default_value_string): """Parse and return the default value for a boolean property.""" lowercased_value_string = default_value_string.lower() if lowercased_value_string in {'0', 'false'}: return False elif lowercased_value_string in {'1', 'true'}: return True else: raise AssertionError(u'Unsupported default value for boolean property "{}": ' u'{}'.format(property_name, default_value_string))
python
def _parse_bool_default_value(property_name, default_value_string): lowercased_value_string = default_value_string.lower() if lowercased_value_string in {'0', 'false'}: return False elif lowercased_value_string in {'1', 'true'}: return True else: raise AssertionError(u'Unsupported default value for boolean property "{}": ' u'{}'.format(property_name, default_value_string))
[ "def", "_parse_bool_default_value", "(", "property_name", ",", "default_value_string", ")", ":", "lowercased_value_string", "=", "default_value_string", ".", "lower", "(", ")", "if", "lowercased_value_string", "in", "{", "'0'", ",", "'false'", "}", ":", "return", "False", "elif", "lowercased_value_string", "in", "{", "'1'", ",", "'true'", "}", ":", "return", "True", "else", ":", "raise", "AssertionError", "(", "u'Unsupported default value for boolean property \"{}\": '", "u'{}'", ".", "format", "(", "property_name", ",", "default_value_string", ")", ")" ]
Parse and return the default value for a boolean property.
[ "Parse", "and", "return", "the", "default", "value", "for", "a", "boolean", "property", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_properties.py#L103-L112
246,831
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/schema_properties.py
_parse_datetime_default_value
def _parse_datetime_default_value(property_name, default_value_string): """Parse and return the default value for a datetime property.""" # OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually # and then turn it into a python datetime object. strptime() will raise an exception # if the provided value cannot be parsed correctly. parsed_value = time.strptime(default_value_string, ORIENTDB_DATETIME_FORMAT) return datetime.datetime( parsed_value.tm_year, parsed_value.tm_mon, parsed_value.tm_mday, parsed_value.tm_hour, parsed_value.tm_min, parsed_value.tm_sec, 0, None)
python
def _parse_datetime_default_value(property_name, default_value_string): # OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually # and then turn it into a python datetime object. strptime() will raise an exception # if the provided value cannot be parsed correctly. parsed_value = time.strptime(default_value_string, ORIENTDB_DATETIME_FORMAT) return datetime.datetime( parsed_value.tm_year, parsed_value.tm_mon, parsed_value.tm_mday, parsed_value.tm_hour, parsed_value.tm_min, parsed_value.tm_sec, 0, None)
[ "def", "_parse_datetime_default_value", "(", "property_name", ",", "default_value_string", ")", ":", "# OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually", "# and then turn it into a python datetime object. strptime() will raise an exception", "# if the provided value cannot be parsed correctly.", "parsed_value", "=", "time", ".", "strptime", "(", "default_value_string", ",", "ORIENTDB_DATETIME_FORMAT", ")", "return", "datetime", ".", "datetime", "(", "parsed_value", ".", "tm_year", ",", "parsed_value", ".", "tm_mon", ",", "parsed_value", ".", "tm_mday", ",", "parsed_value", ".", "tm_hour", ",", "parsed_value", ".", "tm_min", ",", "parsed_value", ".", "tm_sec", ",", "0", ",", "None", ")" ]
Parse and return the default value for a datetime property.
[ "Parse", "and", "return", "the", "default", "value", "for", "a", "datetime", "property", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_properties.py#L115-L123
246,832
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/schema_properties.py
_parse_date_default_value
def _parse_date_default_value(property_name, default_value_string): """Parse and return the default value for a date property.""" # OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually # and then turn it into a python datetime object. strptime() will raise an exception # if the provided value cannot be parsed correctly. parsed_value = time.strptime(default_value_string, ORIENTDB_DATE_FORMAT) return datetime.date(parsed_value.tm_year, parsed_value.tm_mon, parsed_value.tm_mday)
python
def _parse_date_default_value(property_name, default_value_string): # OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually # and then turn it into a python datetime object. strptime() will raise an exception # if the provided value cannot be parsed correctly. parsed_value = time.strptime(default_value_string, ORIENTDB_DATE_FORMAT) return datetime.date(parsed_value.tm_year, parsed_value.tm_mon, parsed_value.tm_mday)
[ "def", "_parse_date_default_value", "(", "property_name", ",", "default_value_string", ")", ":", "# OrientDB doesn't use ISO-8601 datetime format, so we have to parse it manually", "# and then turn it into a python datetime object. strptime() will raise an exception", "# if the provided value cannot be parsed correctly.", "parsed_value", "=", "time", ".", "strptime", "(", "default_value_string", ",", "ORIENTDB_DATE_FORMAT", ")", "return", "datetime", ".", "date", "(", "parsed_value", ".", "tm_year", ",", "parsed_value", ".", "tm_mon", ",", "parsed_value", ".", "tm_mday", ")" ]
Parse and return the default value for a date property.
[ "Parse", "and", "return", "the", "default", "value", "for", "a", "date", "property", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_properties.py#L126-L132
246,833
kensho-technologies/graphql-compiler
graphql_compiler/schema_generation/schema_properties.py
parse_default_property_value
def parse_default_property_value(property_name, property_type_id, default_value_string): """Parse the default value string into its proper form given the property type ID. Args: property_name: string, the name of the property whose default value is being parsed. Used primarily to construct meaningful error messages, should the default value prove invalid. property_type_id: int, one of the property type ID constants defined in this file that OrientDB uses to designate the native type of a given property. default_value_string: string, the textual representation of the default value for for the property, as returned by OrientDB's schema introspection code. Returns: an object of type matching the property that can be used as the property's default value. For example, if the property is of string type, the return type will be a string, and if the property is of list type, the return type will be a list. Raises: AssertionError, if the default value is not supported or does not match the property's declared type (e.g. if a default of "[]" is set on an integer property). """ if property_type_id == PROPERTY_TYPE_EMBEDDED_SET_ID and default_value_string == '{}': return set() elif property_type_id == PROPERTY_TYPE_EMBEDDED_LIST_ID and default_value_string == '[]': return list() elif (property_type_id == PROPERTY_TYPE_STRING_ID and isinstance(default_value_string, six.string_types)): return default_value_string elif property_type_id == PROPERTY_TYPE_BOOLEAN_ID: return _parse_bool_default_value(property_name, default_value_string) elif property_type_id == PROPERTY_TYPE_DATETIME_ID: return _parse_datetime_default_value(property_name, default_value_string) elif property_type_id == PROPERTY_TYPE_DATE_ID: return _parse_date_default_value(property_name, default_value_string) else: raise AssertionError(u'Unsupported default value for property "{}" with type id {}: ' u'{}'.format(property_name, property_type_id, default_value_string))
python
def parse_default_property_value(property_name, property_type_id, default_value_string): if property_type_id == PROPERTY_TYPE_EMBEDDED_SET_ID and default_value_string == '{}': return set() elif property_type_id == PROPERTY_TYPE_EMBEDDED_LIST_ID and default_value_string == '[]': return list() elif (property_type_id == PROPERTY_TYPE_STRING_ID and isinstance(default_value_string, six.string_types)): return default_value_string elif property_type_id == PROPERTY_TYPE_BOOLEAN_ID: return _parse_bool_default_value(property_name, default_value_string) elif property_type_id == PROPERTY_TYPE_DATETIME_ID: return _parse_datetime_default_value(property_name, default_value_string) elif property_type_id == PROPERTY_TYPE_DATE_ID: return _parse_date_default_value(property_name, default_value_string) else: raise AssertionError(u'Unsupported default value for property "{}" with type id {}: ' u'{}'.format(property_name, property_type_id, default_value_string))
[ "def", "parse_default_property_value", "(", "property_name", ",", "property_type_id", ",", "default_value_string", ")", ":", "if", "property_type_id", "==", "PROPERTY_TYPE_EMBEDDED_SET_ID", "and", "default_value_string", "==", "'{}'", ":", "return", "set", "(", ")", "elif", "property_type_id", "==", "PROPERTY_TYPE_EMBEDDED_LIST_ID", "and", "default_value_string", "==", "'[]'", ":", "return", "list", "(", ")", "elif", "(", "property_type_id", "==", "PROPERTY_TYPE_STRING_ID", "and", "isinstance", "(", "default_value_string", ",", "six", ".", "string_types", ")", ")", ":", "return", "default_value_string", "elif", "property_type_id", "==", "PROPERTY_TYPE_BOOLEAN_ID", ":", "return", "_parse_bool_default_value", "(", "property_name", ",", "default_value_string", ")", "elif", "property_type_id", "==", "PROPERTY_TYPE_DATETIME_ID", ":", "return", "_parse_datetime_default_value", "(", "property_name", ",", "default_value_string", ")", "elif", "property_type_id", "==", "PROPERTY_TYPE_DATE_ID", ":", "return", "_parse_date_default_value", "(", "property_name", ",", "default_value_string", ")", "else", ":", "raise", "AssertionError", "(", "u'Unsupported default value for property \"{}\" with type id {}: '", "u'{}'", ".", "format", "(", "property_name", ",", "property_type_id", ",", "default_value_string", ")", ")" ]
Parse the default value string into its proper form given the property type ID. Args: property_name: string, the name of the property whose default value is being parsed. Used primarily to construct meaningful error messages, should the default value prove invalid. property_type_id: int, one of the property type ID constants defined in this file that OrientDB uses to designate the native type of a given property. default_value_string: string, the textual representation of the default value for for the property, as returned by OrientDB's schema introspection code. Returns: an object of type matching the property that can be used as the property's default value. For example, if the property is of string type, the return type will be a string, and if the property is of list type, the return type will be a list. Raises: AssertionError, if the default value is not supported or does not match the property's declared type (e.g. if a default of "[]" is set on an integer property).
[ "Parse", "the", "default", "value", "string", "into", "its", "proper", "form", "given", "the", "property", "type", "ID", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_properties.py#L135-L171
246,834
kensho-technologies/graphql-compiler
graphql_compiler/compiler/common.py
_compile_graphql_generic
def _compile_graphql_generic(language, lowering_func, query_emitter_func, schema, graphql_string, type_equivalence_hints, compiler_metadata): """Compile the GraphQL input, lowering and emitting the query using the given functions. Args: language: string indicating the target language to compile to. lowering_func: Function to lower the compiler IR into a compatible form for the target language backend. query_emitter_func: Function that emits a query in the target language from the lowered IR. schema: GraphQL schema object describing the schema of the graph to be queried. graphql_string: the GraphQL query to compile to the target language, as a string. type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. compiler_metadata: optional target specific metadata for usage by the query_emitter_func. Returns: a CompilationResult object """ ir_and_metadata = graphql_to_ir( schema, graphql_string, type_equivalence_hints=type_equivalence_hints) lowered_ir_blocks = lowering_func( ir_and_metadata.ir_blocks, ir_and_metadata.query_metadata_table, type_equivalence_hints=type_equivalence_hints) query = query_emitter_func(lowered_ir_blocks, compiler_metadata) return CompilationResult( query=query, language=language, output_metadata=ir_and_metadata.output_metadata, input_metadata=ir_and_metadata.input_metadata)
python
def _compile_graphql_generic(language, lowering_func, query_emitter_func, schema, graphql_string, type_equivalence_hints, compiler_metadata): ir_and_metadata = graphql_to_ir( schema, graphql_string, type_equivalence_hints=type_equivalence_hints) lowered_ir_blocks = lowering_func( ir_and_metadata.ir_blocks, ir_and_metadata.query_metadata_table, type_equivalence_hints=type_equivalence_hints) query = query_emitter_func(lowered_ir_blocks, compiler_metadata) return CompilationResult( query=query, language=language, output_metadata=ir_and_metadata.output_metadata, input_metadata=ir_and_metadata.input_metadata)
[ "def", "_compile_graphql_generic", "(", "language", ",", "lowering_func", ",", "query_emitter_func", ",", "schema", ",", "graphql_string", ",", "type_equivalence_hints", ",", "compiler_metadata", ")", ":", "ir_and_metadata", "=", "graphql_to_ir", "(", "schema", ",", "graphql_string", ",", "type_equivalence_hints", "=", "type_equivalence_hints", ")", "lowered_ir_blocks", "=", "lowering_func", "(", "ir_and_metadata", ".", "ir_blocks", ",", "ir_and_metadata", ".", "query_metadata_table", ",", "type_equivalence_hints", "=", "type_equivalence_hints", ")", "query", "=", "query_emitter_func", "(", "lowered_ir_blocks", ",", "compiler_metadata", ")", "return", "CompilationResult", "(", "query", "=", "query", ",", "language", "=", "language", ",", "output_metadata", "=", "ir_and_metadata", ".", "output_metadata", ",", "input_metadata", "=", "ir_and_metadata", ".", "input_metadata", ")" ]
Compile the GraphQL input, lowering and emitting the query using the given functions. Args: language: string indicating the target language to compile to. lowering_func: Function to lower the compiler IR into a compatible form for the target language backend. query_emitter_func: Function that emits a query in the target language from the lowered IR. schema: GraphQL schema object describing the schema of the graph to be queried. graphql_string: the GraphQL query to compile to the target language, as a string. type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union. compiler_metadata: optional target specific metadata for usage by the query_emitter_func. Returns: a CompilationResult object
[ "Compile", "the", "GraphQL", "input", "lowering", "and", "emitting", "the", "query", "using", "the", "given", "functions", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/common.py#L122-L152
246,835
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
scalar_leaf_only
def scalar_leaf_only(operator): """Ensure the filter function is only applied to scalar leaf types.""" def decorator(f): """Decorate the supplied function with the "scalar_leaf_only" logic.""" @wraps(f) def wrapper(filter_operation_info, context, parameters, *args, **kwargs): """Check that the type on which the operator operates is a scalar leaf type.""" if 'operator' in kwargs: current_operator = kwargs['operator'] else: # Because "operator" is from an enclosing scope, it is immutable in Python 2.x. current_operator = operator if not is_leaf_type(filter_operation_info.field_type): raise GraphQLCompilationError(u'Cannot apply "{}" filter to non-leaf type' u'{}'.format(current_operator, filter_operation_info)) return f(filter_operation_info, context, parameters, *args, **kwargs) return wrapper return decorator
python
def scalar_leaf_only(operator): def decorator(f): """Decorate the supplied function with the "scalar_leaf_only" logic.""" @wraps(f) def wrapper(filter_operation_info, context, parameters, *args, **kwargs): """Check that the type on which the operator operates is a scalar leaf type.""" if 'operator' in kwargs: current_operator = kwargs['operator'] else: # Because "operator" is from an enclosing scope, it is immutable in Python 2.x. current_operator = operator if not is_leaf_type(filter_operation_info.field_type): raise GraphQLCompilationError(u'Cannot apply "{}" filter to non-leaf type' u'{}'.format(current_operator, filter_operation_info)) return f(filter_operation_info, context, parameters, *args, **kwargs) return wrapper return decorator
[ "def", "scalar_leaf_only", "(", "operator", ")", ":", "def", "decorator", "(", "f", ")", ":", "\"\"\"Decorate the supplied function with the \"scalar_leaf_only\" logic.\"\"\"", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "filter_operation_info", ",", "context", ",", "parameters", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Check that the type on which the operator operates is a scalar leaf type.\"\"\"", "if", "'operator'", "in", "kwargs", ":", "current_operator", "=", "kwargs", "[", "'operator'", "]", "else", ":", "# Because \"operator\" is from an enclosing scope, it is immutable in Python 2.x.", "current_operator", "=", "operator", "if", "not", "is_leaf_type", "(", "filter_operation_info", ".", "field_type", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot apply \"{}\" filter to non-leaf type'", "u'{}'", ".", "format", "(", "current_operator", ",", "filter_operation_info", ")", ")", "return", "f", "(", "filter_operation_info", ",", "context", ",", "parameters", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
Ensure the filter function is only applied to scalar leaf types.
[ "Ensure", "the", "filter", "function", "is", "only", "applied", "to", "scalar", "leaf", "types", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L17-L37
246,836
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
vertex_field_only
def vertex_field_only(operator): """Ensure the filter function is only applied to vertex field types.""" def decorator(f): """Decorate the supplied function with the "vertex_field_only" logic.""" @wraps(f) def wrapper(filter_operation_info, context, parameters, *args, **kwargs): """Check that the type on which the operator operates is a vertex field type.""" if 'operator' in kwargs: current_operator = kwargs['operator'] else: # Because "operator" is from an enclosing scope, it is immutable in Python 2.x. current_operator = operator if not is_vertex_field_type(filter_operation_info.field_type): raise GraphQLCompilationError( u'Cannot apply "{}" filter to non-vertex field: ' u'{}'.format(current_operator, filter_operation_info.field_name)) return f(filter_operation_info, context, parameters, *args, **kwargs) return wrapper return decorator
python
def vertex_field_only(operator): def decorator(f): """Decorate the supplied function with the "vertex_field_only" logic.""" @wraps(f) def wrapper(filter_operation_info, context, parameters, *args, **kwargs): """Check that the type on which the operator operates is a vertex field type.""" if 'operator' in kwargs: current_operator = kwargs['operator'] else: # Because "operator" is from an enclosing scope, it is immutable in Python 2.x. current_operator = operator if not is_vertex_field_type(filter_operation_info.field_type): raise GraphQLCompilationError( u'Cannot apply "{}" filter to non-vertex field: ' u'{}'.format(current_operator, filter_operation_info.field_name)) return f(filter_operation_info, context, parameters, *args, **kwargs) return wrapper return decorator
[ "def", "vertex_field_only", "(", "operator", ")", ":", "def", "decorator", "(", "f", ")", ":", "\"\"\"Decorate the supplied function with the \"vertex_field_only\" logic.\"\"\"", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "filter_operation_info", ",", "context", ",", "parameters", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Check that the type on which the operator operates is a vertex field type.\"\"\"", "if", "'operator'", "in", "kwargs", ":", "current_operator", "=", "kwargs", "[", "'operator'", "]", "else", ":", "# Because \"operator\" is from an enclosing scope, it is immutable in Python 2.x.", "current_operator", "=", "operator", "if", "not", "is_vertex_field_type", "(", "filter_operation_info", ".", "field_type", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot apply \"{}\" filter to non-vertex field: '", "u'{}'", ".", "format", "(", "current_operator", ",", "filter_operation_info", ".", "field_name", ")", ")", "return", "f", "(", "filter_operation_info", ",", "context", ",", "parameters", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
Ensure the filter function is only applied to vertex field types.
[ "Ensure", "the", "filter", "function", "is", "only", "applied", "to", "vertex", "field", "types", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L40-L61
246,837
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
takes_parameters
def takes_parameters(count): """Ensure the filter function has "count" parameters specified.""" def decorator(f): """Decorate the supplied function with the "takes_parameters" logic.""" @wraps(f) def wrapper(filter_operation_info, location, context, parameters, *args, **kwargs): """Check that the supplied number of parameters equals the expected number.""" if len(parameters) != count: raise GraphQLCompilationError(u'Incorrect number of parameters, expected {} got ' u'{}: {}'.format(count, len(parameters), parameters)) return f(filter_operation_info, location, context, parameters, *args, **kwargs) return wrapper return decorator
python
def takes_parameters(count): def decorator(f): """Decorate the supplied function with the "takes_parameters" logic.""" @wraps(f) def wrapper(filter_operation_info, location, context, parameters, *args, **kwargs): """Check that the supplied number of parameters equals the expected number.""" if len(parameters) != count: raise GraphQLCompilationError(u'Incorrect number of parameters, expected {} got ' u'{}: {}'.format(count, len(parameters), parameters)) return f(filter_operation_info, location, context, parameters, *args, **kwargs) return wrapper return decorator
[ "def", "takes_parameters", "(", "count", ")", ":", "def", "decorator", "(", "f", ")", ":", "\"\"\"Decorate the supplied function with the \"takes_parameters\" logic.\"\"\"", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Check that the supplied number of parameters equals the expected number.\"\"\"", "if", "len", "(", "parameters", ")", "!=", "count", ":", "raise", "GraphQLCompilationError", "(", "u'Incorrect number of parameters, expected {} got '", "u'{}: {}'", ".", "format", "(", "count", ",", "len", "(", "parameters", ")", ",", "parameters", ")", ")", "return", "f", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "return", "decorator" ]
Ensure the filter function has "count" parameters specified.
[ "Ensure", "the", "filter", "function", "has", "count", "parameters", "specified", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L64-L79
246,838
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
_represent_argument
def _represent_argument(directive_location, context, argument, inferred_type): """Return a two-element tuple that represents the argument to the directive being processed. Args: directive_location: Location where the directive is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! argument: string, the name of the argument to the directive inferred_type: GraphQL type object specifying the inferred type of the argument Returns: (argument_expression, non_existence_expression) - argument_expression: an Expression object that captures the semantics of the argument - non_existence_expression: None or Expression object; If the current block is not optional, this is set to None. Otherwise, it is an expression that will evaluate to True if the argument is skipped as optional and therefore not present, and False otherwise. """ # Regardless of what kind of variable we are dealing with, # we want to ensure its name is valid. argument_name = argument[1:] validate_safe_string(argument_name) if is_variable_argument(argument): existing_type = context['inputs'].get(argument_name, inferred_type) if not inferred_type.is_same_type(existing_type): raise GraphQLCompilationError(u'Incompatible types inferred for argument {}. ' u'The argument cannot simultaneously be ' u'{} and {}.'.format(argument, existing_type, inferred_type)) context['inputs'][argument_name] = inferred_type return (expressions.Variable(argument, inferred_type), None) elif is_tag_argument(argument): argument_context = context['tags'].get(argument_name, None) if argument_context is None: raise GraphQLCompilationError(u'Undeclared argument used: {}'.format(argument)) location = argument_context['location'] optional = argument_context['optional'] tag_inferred_type = argument_context['type'] if location is None: raise AssertionError(u'Argument declared without location: {}'.format(argument_name)) if location.field is None: raise AssertionError(u'Argument location is not a property field: {}'.format(location)) if not inferred_type.is_same_type(tag_inferred_type): raise GraphQLCompilationError(u'The inferred type of the matching @tag directive does ' u'not match the inferred required type for this filter: ' u'{} vs {}'.format(tag_inferred_type, inferred_type)) # Check whether the argument is a field on the vertex on which the directive is applied. field_is_local = directive_location.at_vertex() == location.at_vertex() non_existence_expression = None if optional: if field_is_local: non_existence_expression = expressions.FalseLiteral else: non_existence_expression = expressions.BinaryComposition( u'=', expressions.ContextFieldExistence(location.at_vertex()), expressions.FalseLiteral) if field_is_local: representation = expressions.LocalField(argument_name) else: representation = expressions.ContextField(location, tag_inferred_type) return (representation, non_existence_expression) else: # If we want to support literal arguments, add them here. raise GraphQLCompilationError(u'Non-argument type found: {}'.format(argument))
python
def _represent_argument(directive_location, context, argument, inferred_type): # Regardless of what kind of variable we are dealing with, # we want to ensure its name is valid. argument_name = argument[1:] validate_safe_string(argument_name) if is_variable_argument(argument): existing_type = context['inputs'].get(argument_name, inferred_type) if not inferred_type.is_same_type(existing_type): raise GraphQLCompilationError(u'Incompatible types inferred for argument {}. ' u'The argument cannot simultaneously be ' u'{} and {}.'.format(argument, existing_type, inferred_type)) context['inputs'][argument_name] = inferred_type return (expressions.Variable(argument, inferred_type), None) elif is_tag_argument(argument): argument_context = context['tags'].get(argument_name, None) if argument_context is None: raise GraphQLCompilationError(u'Undeclared argument used: {}'.format(argument)) location = argument_context['location'] optional = argument_context['optional'] tag_inferred_type = argument_context['type'] if location is None: raise AssertionError(u'Argument declared without location: {}'.format(argument_name)) if location.field is None: raise AssertionError(u'Argument location is not a property field: {}'.format(location)) if not inferred_type.is_same_type(tag_inferred_type): raise GraphQLCompilationError(u'The inferred type of the matching @tag directive does ' u'not match the inferred required type for this filter: ' u'{} vs {}'.format(tag_inferred_type, inferred_type)) # Check whether the argument is a field on the vertex on which the directive is applied. field_is_local = directive_location.at_vertex() == location.at_vertex() non_existence_expression = None if optional: if field_is_local: non_existence_expression = expressions.FalseLiteral else: non_existence_expression = expressions.BinaryComposition( u'=', expressions.ContextFieldExistence(location.at_vertex()), expressions.FalseLiteral) if field_is_local: representation = expressions.LocalField(argument_name) else: representation = expressions.ContextField(location, tag_inferred_type) return (representation, non_existence_expression) else: # If we want to support literal arguments, add them here. raise GraphQLCompilationError(u'Non-argument type found: {}'.format(argument))
[ "def", "_represent_argument", "(", "directive_location", ",", "context", ",", "argument", ",", "inferred_type", ")", ":", "# Regardless of what kind of variable we are dealing with,", "# we want to ensure its name is valid.", "argument_name", "=", "argument", "[", "1", ":", "]", "validate_safe_string", "(", "argument_name", ")", "if", "is_variable_argument", "(", "argument", ")", ":", "existing_type", "=", "context", "[", "'inputs'", "]", ".", "get", "(", "argument_name", ",", "inferred_type", ")", "if", "not", "inferred_type", ".", "is_same_type", "(", "existing_type", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Incompatible types inferred for argument {}. '", "u'The argument cannot simultaneously be '", "u'{} and {}.'", ".", "format", "(", "argument", ",", "existing_type", ",", "inferred_type", ")", ")", "context", "[", "'inputs'", "]", "[", "argument_name", "]", "=", "inferred_type", "return", "(", "expressions", ".", "Variable", "(", "argument", ",", "inferred_type", ")", ",", "None", ")", "elif", "is_tag_argument", "(", "argument", ")", ":", "argument_context", "=", "context", "[", "'tags'", "]", ".", "get", "(", "argument_name", ",", "None", ")", "if", "argument_context", "is", "None", ":", "raise", "GraphQLCompilationError", "(", "u'Undeclared argument used: {}'", ".", "format", "(", "argument", ")", ")", "location", "=", "argument_context", "[", "'location'", "]", "optional", "=", "argument_context", "[", "'optional'", "]", "tag_inferred_type", "=", "argument_context", "[", "'type'", "]", "if", "location", "is", "None", ":", "raise", "AssertionError", "(", "u'Argument declared without location: {}'", ".", "format", "(", "argument_name", ")", ")", "if", "location", ".", "field", "is", "None", ":", "raise", "AssertionError", "(", "u'Argument location is not a property field: {}'", ".", "format", "(", "location", ")", ")", "if", "not", "inferred_type", ".", "is_same_type", "(", "tag_inferred_type", ")", ":", "raise", "GraphQLCompilationError", "(", "u'The inferred type of the matching @tag directive does '", "u'not match the inferred required type for this filter: '", "u'{} vs {}'", ".", "format", "(", "tag_inferred_type", ",", "inferred_type", ")", ")", "# Check whether the argument is a field on the vertex on which the directive is applied.", "field_is_local", "=", "directive_location", ".", "at_vertex", "(", ")", "==", "location", ".", "at_vertex", "(", ")", "non_existence_expression", "=", "None", "if", "optional", ":", "if", "field_is_local", ":", "non_existence_expression", "=", "expressions", ".", "FalseLiteral", "else", ":", "non_existence_expression", "=", "expressions", ".", "BinaryComposition", "(", "u'='", ",", "expressions", ".", "ContextFieldExistence", "(", "location", ".", "at_vertex", "(", ")", ")", ",", "expressions", ".", "FalseLiteral", ")", "if", "field_is_local", ":", "representation", "=", "expressions", ".", "LocalField", "(", "argument_name", ")", "else", ":", "representation", "=", "expressions", ".", "ContextField", "(", "location", ",", "tag_inferred_type", ")", "return", "(", "representation", ",", "non_existence_expression", ")", "else", ":", "# If we want to support literal arguments, add them here.", "raise", "GraphQLCompilationError", "(", "u'Non-argument type found: {}'", ".", "format", "(", "argument", ")", ")" ]
Return a two-element tuple that represents the argument to the directive being processed. Args: directive_location: Location where the directive is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! argument: string, the name of the argument to the directive inferred_type: GraphQL type object specifying the inferred type of the argument Returns: (argument_expression, non_existence_expression) - argument_expression: an Expression object that captures the semantics of the argument - non_existence_expression: None or Expression object; If the current block is not optional, this is set to None. Otherwise, it is an expression that will evaluate to True if the argument is skipped as optional and therefore not present, and False otherwise.
[ "Return", "a", "two", "-", "element", "tuple", "that", "represents", "the", "argument", "to", "the", "directive", "being", "processed", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L82-L156
246,839
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
_process_comparison_filter_directive
def _process_comparison_filter_directive(filter_operation_info, location, context, parameters, operator=None): """Return a Filter basic block that performs the given comparison against the property field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to perform the comparison against; if the parameter is optional and missing, the check will return True operator: unicode, a comparison operator, like '=', '!=', '>=' etc. This is a kwarg only to preserve the same positional arguments in the function signature, to ease validation. Returns: a Filter basic block that performs the requested comparison """ comparison_operators = {u'=', u'!=', u'>', u'<', u'>=', u'<='} if operator not in comparison_operators: raise AssertionError(u'Expected a valid comparison operator ({}), but got ' u'{}'.format(comparison_operators, operator)) filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = strip_non_null_from_type(filtered_field_type) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) comparison_expression = expressions.BinaryComposition( operator, expressions.LocalField(filtered_field_name), argument_expression) final_expression = None if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. final_expression = expressions.BinaryComposition( u'||', non_existence_expression, comparison_expression) else: final_expression = comparison_expression return blocks.Filter(final_expression)
python
def _process_comparison_filter_directive(filter_operation_info, location, context, parameters, operator=None): comparison_operators = {u'=', u'!=', u'>', u'<', u'>=', u'<='} if operator not in comparison_operators: raise AssertionError(u'Expected a valid comparison operator ({}), but got ' u'{}'.format(comparison_operators, operator)) filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = strip_non_null_from_type(filtered_field_type) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) comparison_expression = expressions.BinaryComposition( operator, expressions.LocalField(filtered_field_name), argument_expression) final_expression = None if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. final_expression = expressions.BinaryComposition( u'||', non_existence_expression, comparison_expression) else: final_expression = comparison_expression return blocks.Filter(final_expression)
[ "def", "_process_comparison_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ",", "operator", "=", "None", ")", ":", "comparison_operators", "=", "{", "u'='", ",", "u'!='", ",", "u'>'", ",", "u'<'", ",", "u'>='", ",", "u'<='", "}", "if", "operator", "not", "in", "comparison_operators", ":", "raise", "AssertionError", "(", "u'Expected a valid comparison operator ({}), but got '", "u'{}'", ".", "format", "(", "comparison_operators", ",", "operator", ")", ")", "filtered_field_type", "=", "filter_operation_info", ".", "field_type", "filtered_field_name", "=", "filter_operation_info", ".", "field_name", "argument_inferred_type", "=", "strip_non_null_from_type", "(", "filtered_field_type", ")", "argument_expression", ",", "non_existence_expression", "=", "_represent_argument", "(", "location", ",", "context", ",", "parameters", "[", "0", "]", ",", "argument_inferred_type", ")", "comparison_expression", "=", "expressions", ".", "BinaryComposition", "(", "operator", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ",", "argument_expression", ")", "final_expression", "=", "None", "if", "non_existence_expression", "is", "not", "None", ":", "# The argument comes from an optional block and might not exist,", "# in which case the filter expression should evaluate to True.", "final_expression", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "non_existence_expression", ",", "comparison_expression", ")", "else", ":", "final_expression", "=", "comparison_expression", "return", "blocks", ".", "Filter", "(", "final_expression", ")" ]
Return a Filter basic block that performs the given comparison against the property field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to perform the comparison against; if the parameter is optional and missing, the check will return True operator: unicode, a comparison operator, like '=', '!=', '>=' etc. This is a kwarg only to preserve the same positional arguments in the function signature, to ease validation. Returns: a Filter basic block that performs the requested comparison
[ "Return", "a", "Filter", "basic", "block", "that", "performs", "the", "given", "comparison", "against", "the", "property", "field", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L161-L204
246,840
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
_process_has_edge_degree_filter_directive
def _process_has_edge_degree_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks the degree of the edge to the given vertex field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to check the edge degree against; if the parameter is optional and missing, the check will return True Returns: a Filter basic block that performs the check """ if isinstance(filter_operation_info.field_ast, InlineFragment): raise AssertionError(u'Received InlineFragment AST node in "has_edge_degree" filter ' u'handler. This should have been caught earlier: ' u'{}'.format(filter_operation_info.field_ast)) filtered_field_name = filter_operation_info.field_name if filtered_field_name is None or not is_vertex_field_name(filtered_field_name): raise AssertionError(u'Invalid value for "filtered_field_name" in "has_edge_degree" ' u'filter: {}'.format(filtered_field_name)) if not is_vertex_field_type(filter_operation_info.field_type): raise AssertionError(u'Invalid value for "filter_operation_info.field_type" in ' u'"has_edge_degree" filter: {}'.format(filter_operation_info)) argument = parameters[0] if not is_variable_argument(argument): raise GraphQLCompilationError(u'The "has_edge_degree" filter only supports runtime ' u'variable arguments. Tagged values are not supported.' u'Argument name: {}'.format(argument)) argument_inferred_type = GraphQLInt argument_expression, non_existence_expression = _represent_argument( location, context, argument, argument_inferred_type) if non_existence_expression is not None: raise AssertionError(u'Since we do not support tagged values, non_existence_expression ' u'should have been None. However, it was: ' u'{}'.format(non_existence_expression)) # If no edges to the vertex field exist, the edges' field in the database may be "null". # We also don't know ahead of time whether the supplied argument is zero or not. # We have to accommodate these facts in our generated comparison code. # We construct the following expression to check if the edge degree is zero: # ({argument} == 0) && (edge_field == null) argument_is_zero = expressions.BinaryComposition( u'=', argument_expression, expressions.ZeroLiteral) edge_field_is_null = expressions.BinaryComposition( u'=', expressions.LocalField(filtered_field_name), expressions.NullLiteral) edge_degree_is_zero = expressions.BinaryComposition( u'&&', argument_is_zero, edge_field_is_null) # The following expression will check for a non-zero edge degree equal to the argument. # (edge_field != null) && (edge_field.size() == {argument}) edge_field_is_not_null = expressions.BinaryComposition( u'!=', expressions.LocalField(filtered_field_name), expressions.NullLiteral) edge_degree = expressions.UnaryTransformation( u'size', expressions.LocalField(filtered_field_name)) edge_degree_matches_argument = expressions.BinaryComposition( u'=', edge_degree, argument_expression) edge_degree_is_non_zero = expressions.BinaryComposition( u'&&', edge_field_is_not_null, edge_degree_matches_argument) # We combine the two cases with a logical-or to handle both situations: filter_predicate = expressions.BinaryComposition( u'||', edge_degree_is_zero, edge_degree_is_non_zero) return blocks.Filter(filter_predicate)
python
def _process_has_edge_degree_filter_directive(filter_operation_info, location, context, parameters): if isinstance(filter_operation_info.field_ast, InlineFragment): raise AssertionError(u'Received InlineFragment AST node in "has_edge_degree" filter ' u'handler. This should have been caught earlier: ' u'{}'.format(filter_operation_info.field_ast)) filtered_field_name = filter_operation_info.field_name if filtered_field_name is None or not is_vertex_field_name(filtered_field_name): raise AssertionError(u'Invalid value for "filtered_field_name" in "has_edge_degree" ' u'filter: {}'.format(filtered_field_name)) if not is_vertex_field_type(filter_operation_info.field_type): raise AssertionError(u'Invalid value for "filter_operation_info.field_type" in ' u'"has_edge_degree" filter: {}'.format(filter_operation_info)) argument = parameters[0] if not is_variable_argument(argument): raise GraphQLCompilationError(u'The "has_edge_degree" filter only supports runtime ' u'variable arguments. Tagged values are not supported.' u'Argument name: {}'.format(argument)) argument_inferred_type = GraphQLInt argument_expression, non_existence_expression = _represent_argument( location, context, argument, argument_inferred_type) if non_existence_expression is not None: raise AssertionError(u'Since we do not support tagged values, non_existence_expression ' u'should have been None. However, it was: ' u'{}'.format(non_existence_expression)) # If no edges to the vertex field exist, the edges' field in the database may be "null". # We also don't know ahead of time whether the supplied argument is zero or not. # We have to accommodate these facts in our generated comparison code. # We construct the following expression to check if the edge degree is zero: # ({argument} == 0) && (edge_field == null) argument_is_zero = expressions.BinaryComposition( u'=', argument_expression, expressions.ZeroLiteral) edge_field_is_null = expressions.BinaryComposition( u'=', expressions.LocalField(filtered_field_name), expressions.NullLiteral) edge_degree_is_zero = expressions.BinaryComposition( u'&&', argument_is_zero, edge_field_is_null) # The following expression will check for a non-zero edge degree equal to the argument. # (edge_field != null) && (edge_field.size() == {argument}) edge_field_is_not_null = expressions.BinaryComposition( u'!=', expressions.LocalField(filtered_field_name), expressions.NullLiteral) edge_degree = expressions.UnaryTransformation( u'size', expressions.LocalField(filtered_field_name)) edge_degree_matches_argument = expressions.BinaryComposition( u'=', edge_degree, argument_expression) edge_degree_is_non_zero = expressions.BinaryComposition( u'&&', edge_field_is_not_null, edge_degree_matches_argument) # We combine the two cases with a logical-or to handle both situations: filter_predicate = expressions.BinaryComposition( u'||', edge_degree_is_zero, edge_degree_is_non_zero) return blocks.Filter(filter_predicate)
[ "def", "_process_has_edge_degree_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ")", ":", "if", "isinstance", "(", "filter_operation_info", ".", "field_ast", ",", "InlineFragment", ")", ":", "raise", "AssertionError", "(", "u'Received InlineFragment AST node in \"has_edge_degree\" filter '", "u'handler. This should have been caught earlier: '", "u'{}'", ".", "format", "(", "filter_operation_info", ".", "field_ast", ")", ")", "filtered_field_name", "=", "filter_operation_info", ".", "field_name", "if", "filtered_field_name", "is", "None", "or", "not", "is_vertex_field_name", "(", "filtered_field_name", ")", ":", "raise", "AssertionError", "(", "u'Invalid value for \"filtered_field_name\" in \"has_edge_degree\" '", "u'filter: {}'", ".", "format", "(", "filtered_field_name", ")", ")", "if", "not", "is_vertex_field_type", "(", "filter_operation_info", ".", "field_type", ")", ":", "raise", "AssertionError", "(", "u'Invalid value for \"filter_operation_info.field_type\" in '", "u'\"has_edge_degree\" filter: {}'", ".", "format", "(", "filter_operation_info", ")", ")", "argument", "=", "parameters", "[", "0", "]", "if", "not", "is_variable_argument", "(", "argument", ")", ":", "raise", "GraphQLCompilationError", "(", "u'The \"has_edge_degree\" filter only supports runtime '", "u'variable arguments. Tagged values are not supported.'", "u'Argument name: {}'", ".", "format", "(", "argument", ")", ")", "argument_inferred_type", "=", "GraphQLInt", "argument_expression", ",", "non_existence_expression", "=", "_represent_argument", "(", "location", ",", "context", ",", "argument", ",", "argument_inferred_type", ")", "if", "non_existence_expression", "is", "not", "None", ":", "raise", "AssertionError", "(", "u'Since we do not support tagged values, non_existence_expression '", "u'should have been None. However, it was: '", "u'{}'", ".", "format", "(", "non_existence_expression", ")", ")", "# If no edges to the vertex field exist, the edges' field in the database may be \"null\".", "# We also don't know ahead of time whether the supplied argument is zero or not.", "# We have to accommodate these facts in our generated comparison code.", "# We construct the following expression to check if the edge degree is zero:", "# ({argument} == 0) && (edge_field == null)", "argument_is_zero", "=", "expressions", ".", "BinaryComposition", "(", "u'='", ",", "argument_expression", ",", "expressions", ".", "ZeroLiteral", ")", "edge_field_is_null", "=", "expressions", ".", "BinaryComposition", "(", "u'='", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ",", "expressions", ".", "NullLiteral", ")", "edge_degree_is_zero", "=", "expressions", ".", "BinaryComposition", "(", "u'&&'", ",", "argument_is_zero", ",", "edge_field_is_null", ")", "# The following expression will check for a non-zero edge degree equal to the argument.", "# (edge_field != null) && (edge_field.size() == {argument})", "edge_field_is_not_null", "=", "expressions", ".", "BinaryComposition", "(", "u'!='", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ",", "expressions", ".", "NullLiteral", ")", "edge_degree", "=", "expressions", ".", "UnaryTransformation", "(", "u'size'", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ")", "edge_degree_matches_argument", "=", "expressions", ".", "BinaryComposition", "(", "u'='", ",", "edge_degree", ",", "argument_expression", ")", "edge_degree_is_non_zero", "=", "expressions", ".", "BinaryComposition", "(", "u'&&'", ",", "edge_field_is_not_null", ",", "edge_degree_matches_argument", ")", "# We combine the two cases with a logical-or to handle both situations:", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "edge_degree_is_zero", ",", "edge_degree_is_non_zero", ")", "return", "blocks", ".", "Filter", "(", "filter_predicate", ")" ]
Return a Filter basic block that checks the degree of the edge to the given vertex field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to check the edge degree against; if the parameter is optional and missing, the check will return True Returns: a Filter basic block that performs the check
[ "Return", "a", "Filter", "basic", "block", "that", "checks", "the", "degree", "of", "the", "edge", "to", "the", "given", "vertex", "field", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L209-L279
246,841
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
_process_name_or_alias_filter_directive
def _process_name_or_alias_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks for a match against an Entity's name or alias. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to check the name or alias against; if the parameter is optional and missing, the check will return True Returns: a Filter basic block that performs the check against the name or alias """ filtered_field_type = filter_operation_info.field_type if isinstance(filtered_field_type, GraphQLUnionType): raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to union type ' u'{}'.format(filtered_field_type)) current_type_fields = filtered_field_type.fields name_field = current_type_fields.get('name', None) alias_field = current_type_fields.get('alias', None) if not name_field or not alias_field: raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because it lacks a ' u'"name" or "alias" field.'.format(filtered_field_type)) name_field_type = strip_non_null_from_type(name_field.type) alias_field_type = strip_non_null_from_type(alias_field.type) if not isinstance(name_field_type, GraphQLScalarType): raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its "name" ' u'field is not a scalar.'.format(filtered_field_type)) if not isinstance(alias_field_type, GraphQLList): raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its ' u'"alias" field is not a list.'.format(filtered_field_type)) alias_field_inner_type = strip_non_null_from_type(alias_field_type.of_type) if alias_field_inner_type != name_field_type: raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because the ' u'"name" field and the inner type of the "alias" field ' u'do not match: {} vs {}'.format(filtered_field_type, name_field_type, alias_field_inner_type)) argument_inferred_type = name_field_type argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) check_against_name = expressions.BinaryComposition( u'=', expressions.LocalField('name'), argument_expression) check_against_alias = expressions.BinaryComposition( u'contains', expressions.LocalField('alias'), argument_expression) filter_predicate = expressions.BinaryComposition( u'||', check_against_name, check_against_alias) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
python
def _process_name_or_alias_filter_directive(filter_operation_info, location, context, parameters): filtered_field_type = filter_operation_info.field_type if isinstance(filtered_field_type, GraphQLUnionType): raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to union type ' u'{}'.format(filtered_field_type)) current_type_fields = filtered_field_type.fields name_field = current_type_fields.get('name', None) alias_field = current_type_fields.get('alias', None) if not name_field or not alias_field: raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because it lacks a ' u'"name" or "alias" field.'.format(filtered_field_type)) name_field_type = strip_non_null_from_type(name_field.type) alias_field_type = strip_non_null_from_type(alias_field.type) if not isinstance(name_field_type, GraphQLScalarType): raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its "name" ' u'field is not a scalar.'.format(filtered_field_type)) if not isinstance(alias_field_type, GraphQLList): raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because its ' u'"alias" field is not a list.'.format(filtered_field_type)) alias_field_inner_type = strip_non_null_from_type(alias_field_type.of_type) if alias_field_inner_type != name_field_type: raise GraphQLCompilationError(u'Cannot apply "name_or_alias" to type {} because the ' u'"name" field and the inner type of the "alias" field ' u'do not match: {} vs {}'.format(filtered_field_type, name_field_type, alias_field_inner_type)) argument_inferred_type = name_field_type argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) check_against_name = expressions.BinaryComposition( u'=', expressions.LocalField('name'), argument_expression) check_against_alias = expressions.BinaryComposition( u'contains', expressions.LocalField('alias'), argument_expression) filter_predicate = expressions.BinaryComposition( u'||', check_against_name, check_against_alias) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
[ "def", "_process_name_or_alias_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ")", ":", "filtered_field_type", "=", "filter_operation_info", ".", "field_type", "if", "isinstance", "(", "filtered_field_type", ",", "GraphQLUnionType", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot apply \"name_or_alias\" to union type '", "u'{}'", ".", "format", "(", "filtered_field_type", ")", ")", "current_type_fields", "=", "filtered_field_type", ".", "fields", "name_field", "=", "current_type_fields", ".", "get", "(", "'name'", ",", "None", ")", "alias_field", "=", "current_type_fields", ".", "get", "(", "'alias'", ",", "None", ")", "if", "not", "name_field", "or", "not", "alias_field", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot apply \"name_or_alias\" to type {} because it lacks a '", "u'\"name\" or \"alias\" field.'", ".", "format", "(", "filtered_field_type", ")", ")", "name_field_type", "=", "strip_non_null_from_type", "(", "name_field", ".", "type", ")", "alias_field_type", "=", "strip_non_null_from_type", "(", "alias_field", ".", "type", ")", "if", "not", "isinstance", "(", "name_field_type", ",", "GraphQLScalarType", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot apply \"name_or_alias\" to type {} because its \"name\" '", "u'field is not a scalar.'", ".", "format", "(", "filtered_field_type", ")", ")", "if", "not", "isinstance", "(", "alias_field_type", ",", "GraphQLList", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot apply \"name_or_alias\" to type {} because its '", "u'\"alias\" field is not a list.'", ".", "format", "(", "filtered_field_type", ")", ")", "alias_field_inner_type", "=", "strip_non_null_from_type", "(", "alias_field_type", ".", "of_type", ")", "if", "alias_field_inner_type", "!=", "name_field_type", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot apply \"name_or_alias\" to type {} because the '", "u'\"name\" field and the inner type of the \"alias\" field '", "u'do not match: {} vs {}'", ".", "format", "(", "filtered_field_type", ",", "name_field_type", ",", "alias_field_inner_type", ")", ")", "argument_inferred_type", "=", "name_field_type", "argument_expression", ",", "non_existence_expression", "=", "_represent_argument", "(", "location", ",", "context", ",", "parameters", "[", "0", "]", ",", "argument_inferred_type", ")", "check_against_name", "=", "expressions", ".", "BinaryComposition", "(", "u'='", ",", "expressions", ".", "LocalField", "(", "'name'", ")", ",", "argument_expression", ")", "check_against_alias", "=", "expressions", ".", "BinaryComposition", "(", "u'contains'", ",", "expressions", ".", "LocalField", "(", "'alias'", ")", ",", "argument_expression", ")", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "check_against_name", ",", "check_against_alias", ")", "if", "non_existence_expression", "is", "not", "None", ":", "# The argument comes from an optional block and might not exist,", "# in which case the filter expression should evaluate to True.", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "non_existence_expression", ",", "filter_predicate", ")", "return", "blocks", ".", "Filter", "(", "filter_predicate", ")" ]
Return a Filter basic block that checks for a match against an Entity's name or alias. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, containing the value to check the name or alias against; if the parameter is optional and missing, the check will return True Returns: a Filter basic block that performs the check against the name or alias
[ "Return", "a", "Filter", "basic", "block", "that", "checks", "for", "a", "match", "against", "an", "Entity", "s", "name", "or", "alias", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L284-L346
246,842
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
_process_between_filter_directive
def _process_between_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks that a field is between two values, inclusive. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 2 elements, specifying the time range in which the data must lie; if either of the elements is optional and missing, their side of the check is assumed to be True Returns: a Filter basic block that performs the range check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = strip_non_null_from_type(filtered_field_type) arg1_expression, arg1_non_existence = _represent_argument( location, context, parameters[0], argument_inferred_type) arg2_expression, arg2_non_existence = _represent_argument( location, context, parameters[1], argument_inferred_type) lower_bound_clause = expressions.BinaryComposition( u'>=', expressions.LocalField(filtered_field_name), arg1_expression) if arg1_non_existence is not None: # The argument is optional, and if it doesn't exist, this side of the check should pass. lower_bound_clause = expressions.BinaryComposition( u'||', arg1_non_existence, lower_bound_clause) upper_bound_clause = expressions.BinaryComposition( u'<=', expressions.LocalField(filtered_field_name), arg2_expression) if arg2_non_existence is not None: # The argument is optional, and if it doesn't exist, this side of the check should pass. upper_bound_clause = expressions.BinaryComposition( u'||', arg2_non_existence, upper_bound_clause) filter_predicate = expressions.BinaryComposition( u'&&', lower_bound_clause, upper_bound_clause) return blocks.Filter(filter_predicate)
python
def _process_between_filter_directive(filter_operation_info, location, context, parameters): filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = strip_non_null_from_type(filtered_field_type) arg1_expression, arg1_non_existence = _represent_argument( location, context, parameters[0], argument_inferred_type) arg2_expression, arg2_non_existence = _represent_argument( location, context, parameters[1], argument_inferred_type) lower_bound_clause = expressions.BinaryComposition( u'>=', expressions.LocalField(filtered_field_name), arg1_expression) if arg1_non_existence is not None: # The argument is optional, and if it doesn't exist, this side of the check should pass. lower_bound_clause = expressions.BinaryComposition( u'||', arg1_non_existence, lower_bound_clause) upper_bound_clause = expressions.BinaryComposition( u'<=', expressions.LocalField(filtered_field_name), arg2_expression) if arg2_non_existence is not None: # The argument is optional, and if it doesn't exist, this side of the check should pass. upper_bound_clause = expressions.BinaryComposition( u'||', arg2_non_existence, upper_bound_clause) filter_predicate = expressions.BinaryComposition( u'&&', lower_bound_clause, upper_bound_clause) return blocks.Filter(filter_predicate)
[ "def", "_process_between_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ")", ":", "filtered_field_type", "=", "filter_operation_info", ".", "field_type", "filtered_field_name", "=", "filter_operation_info", ".", "field_name", "argument_inferred_type", "=", "strip_non_null_from_type", "(", "filtered_field_type", ")", "arg1_expression", ",", "arg1_non_existence", "=", "_represent_argument", "(", "location", ",", "context", ",", "parameters", "[", "0", "]", ",", "argument_inferred_type", ")", "arg2_expression", ",", "arg2_non_existence", "=", "_represent_argument", "(", "location", ",", "context", ",", "parameters", "[", "1", "]", ",", "argument_inferred_type", ")", "lower_bound_clause", "=", "expressions", ".", "BinaryComposition", "(", "u'>='", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ",", "arg1_expression", ")", "if", "arg1_non_existence", "is", "not", "None", ":", "# The argument is optional, and if it doesn't exist, this side of the check should pass.", "lower_bound_clause", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "arg1_non_existence", ",", "lower_bound_clause", ")", "upper_bound_clause", "=", "expressions", ".", "BinaryComposition", "(", "u'<='", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ",", "arg2_expression", ")", "if", "arg2_non_existence", "is", "not", "None", ":", "# The argument is optional, and if it doesn't exist, this side of the check should pass.", "upper_bound_clause", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "arg2_non_existence", ",", "upper_bound_clause", ")", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'&&'", ",", "lower_bound_clause", ",", "upper_bound_clause", ")", "return", "blocks", ".", "Filter", "(", "filter_predicate", ")" ]
Return a Filter basic block that checks that a field is between two values, inclusive. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 2 elements, specifying the time range in which the data must lie; if either of the elements is optional and missing, their side of the check is assumed to be True Returns: a Filter basic block that performs the range check
[ "Return", "a", "Filter", "basic", "block", "that", "checks", "that", "a", "field", "is", "between", "two", "values", "inclusive", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L351-L392
246,843
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
_process_in_collection_filter_directive
def _process_in_collection_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks for a value's existence in a collection. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the collection existence check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = GraphQLList(strip_non_null_from_type(filtered_field_type)) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'contains', argument_expression, expressions.LocalField(filtered_field_name)) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
python
def _process_in_collection_filter_directive(filter_operation_info, location, context, parameters): filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = GraphQLList(strip_non_null_from_type(filtered_field_type)) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'contains', argument_expression, expressions.LocalField(filtered_field_name)) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
[ "def", "_process_in_collection_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ")", ":", "filtered_field_type", "=", "filter_operation_info", ".", "field_type", "filtered_field_name", "=", "filter_operation_info", ".", "field_name", "argument_inferred_type", "=", "GraphQLList", "(", "strip_non_null_from_type", "(", "filtered_field_type", ")", ")", "argument_expression", ",", "non_existence_expression", "=", "_represent_argument", "(", "location", ",", "context", ",", "parameters", "[", "0", "]", ",", "argument_inferred_type", ")", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'contains'", ",", "argument_expression", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ")", "if", "non_existence_expression", "is", "not", "None", ":", "# The argument comes from an optional block and might not exist,", "# in which case the filter expression should evaluate to True.", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "non_existence_expression", ",", "filter_predicate", ")", "return", "blocks", ".", "Filter", "(", "filter_predicate", ")" ]
Return a Filter basic block that checks for a value's existence in a collection. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the collection existence check
[ "Return", "a", "Filter", "basic", "block", "that", "checks", "for", "a", "value", "s", "existence", "in", "a", "collection", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L397-L427
246,844
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
_process_has_substring_filter_directive
def _process_has_substring_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks if the directive arg is a substring of the field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the substring check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name if not strip_non_null_from_type(filtered_field_type).is_same_type(GraphQLString): raise GraphQLCompilationError(u'Cannot apply "has_substring" to non-string ' u'type {}'.format(filtered_field_type)) argument_inferred_type = GraphQLString argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'has_substring', expressions.LocalField(filtered_field_name), argument_expression) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
python
def _process_has_substring_filter_directive(filter_operation_info, location, context, parameters): filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name if not strip_non_null_from_type(filtered_field_type).is_same_type(GraphQLString): raise GraphQLCompilationError(u'Cannot apply "has_substring" to non-string ' u'type {}'.format(filtered_field_type)) argument_inferred_type = GraphQLString argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'has_substring', expressions.LocalField(filtered_field_name), argument_expression) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
[ "def", "_process_has_substring_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ")", ":", "filtered_field_type", "=", "filter_operation_info", ".", "field_type", "filtered_field_name", "=", "filter_operation_info", ".", "field_name", "if", "not", "strip_non_null_from_type", "(", "filtered_field_type", ")", ".", "is_same_type", "(", "GraphQLString", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot apply \"has_substring\" to non-string '", "u'type {}'", ".", "format", "(", "filtered_field_type", ")", ")", "argument_inferred_type", "=", "GraphQLString", "argument_expression", ",", "non_existence_expression", "=", "_represent_argument", "(", "location", ",", "context", ",", "parameters", "[", "0", "]", ",", "argument_inferred_type", ")", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'has_substring'", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ",", "argument_expression", ")", "if", "non_existence_expression", "is", "not", "None", ":", "# The argument comes from an optional block and might not exist,", "# in which case the filter expression should evaluate to True.", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "non_existence_expression", ",", "filter_predicate", ")", "return", "blocks", ".", "Filter", "(", "filter_predicate", ")" ]
Return a Filter basic block that checks if the directive arg is a substring of the field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the substring check
[ "Return", "a", "Filter", "basic", "block", "that", "checks", "if", "the", "directive", "arg", "is", "a", "substring", "of", "the", "field", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L432-L466
246,845
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
_process_contains_filter_directive
def _process_contains_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks if the directive arg is contained in the field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the contains check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name base_field_type = strip_non_null_from_type(filtered_field_type) if not isinstance(base_field_type, GraphQLList): raise GraphQLCompilationError(u'Cannot apply "contains" to non-list ' u'type {}'.format(filtered_field_type)) argument_inferred_type = strip_non_null_from_type(base_field_type.of_type) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'contains', expressions.LocalField(filtered_field_name), argument_expression) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
python
def _process_contains_filter_directive(filter_operation_info, location, context, parameters): filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name base_field_type = strip_non_null_from_type(filtered_field_type) if not isinstance(base_field_type, GraphQLList): raise GraphQLCompilationError(u'Cannot apply "contains" to non-list ' u'type {}'.format(filtered_field_type)) argument_inferred_type = strip_non_null_from_type(base_field_type.of_type) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'contains', expressions.LocalField(filtered_field_name), argument_expression) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
[ "def", "_process_contains_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ")", ":", "filtered_field_type", "=", "filter_operation_info", ".", "field_type", "filtered_field_name", "=", "filter_operation_info", ".", "field_name", "base_field_type", "=", "strip_non_null_from_type", "(", "filtered_field_type", ")", "if", "not", "isinstance", "(", "base_field_type", ",", "GraphQLList", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot apply \"contains\" to non-list '", "u'type {}'", ".", "format", "(", "filtered_field_type", ")", ")", "argument_inferred_type", "=", "strip_non_null_from_type", "(", "base_field_type", ".", "of_type", ")", "argument_expression", ",", "non_existence_expression", "=", "_represent_argument", "(", "location", ",", "context", ",", "parameters", "[", "0", "]", ",", "argument_inferred_type", ")", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'contains'", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ",", "argument_expression", ")", "if", "non_existence_expression", "is", "not", "None", ":", "# The argument comes from an optional block and might not exist,", "# in which case the filter expression should evaluate to True.", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "non_existence_expression", ",", "filter_predicate", ")", "return", "blocks", ".", "Filter", "(", "filter_predicate", ")" ]
Return a Filter basic block that checks if the directive arg is contained in the field. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the contains check
[ "Return", "a", "Filter", "basic", "block", "that", "checks", "if", "the", "directive", "arg", "is", "contained", "in", "the", "field", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L470-L505
246,846
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
_process_intersects_filter_directive
def _process_intersects_filter_directive(filter_operation_info, location, context, parameters): """Return a Filter basic block that checks if the directive arg and the field intersect. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the intersects check """ filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = strip_non_null_from_type(filtered_field_type) if not isinstance(argument_inferred_type, GraphQLList): raise GraphQLCompilationError(u'Cannot apply "intersects" to non-list ' u'type {}'.format(filtered_field_type)) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'intersects', expressions.LocalField(filtered_field_name), argument_expression) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
python
def _process_intersects_filter_directive(filter_operation_info, location, context, parameters): filtered_field_type = filter_operation_info.field_type filtered_field_name = filter_operation_info.field_name argument_inferred_type = strip_non_null_from_type(filtered_field_type) if not isinstance(argument_inferred_type, GraphQLList): raise GraphQLCompilationError(u'Cannot apply "intersects" to non-list ' u'type {}'.format(filtered_field_type)) argument_expression, non_existence_expression = _represent_argument( location, context, parameters[0], argument_inferred_type) filter_predicate = expressions.BinaryComposition( u'intersects', expressions.LocalField(filtered_field_name), argument_expression) if non_existence_expression is not None: # The argument comes from an optional block and might not exist, # in which case the filter expression should evaluate to True. filter_predicate = expressions.BinaryComposition( u'||', non_existence_expression, filter_predicate) return blocks.Filter(filter_predicate)
[ "def", "_process_intersects_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ",", "parameters", ")", ":", "filtered_field_type", "=", "filter_operation_info", ".", "field_type", "filtered_field_name", "=", "filter_operation_info", ".", "field_name", "argument_inferred_type", "=", "strip_non_null_from_type", "(", "filtered_field_type", ")", "if", "not", "isinstance", "(", "argument_inferred_type", ",", "GraphQLList", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Cannot apply \"intersects\" to non-list '", "u'type {}'", ".", "format", "(", "filtered_field_type", ")", ")", "argument_expression", ",", "non_existence_expression", "=", "_represent_argument", "(", "location", ",", "context", ",", "parameters", "[", "0", "]", ",", "argument_inferred_type", ")", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'intersects'", ",", "expressions", ".", "LocalField", "(", "filtered_field_name", ")", ",", "argument_expression", ")", "if", "non_existence_expression", "is", "not", "None", ":", "# The argument comes from an optional block and might not exist,", "# in which case the filter expression should evaluate to True.", "filter_predicate", "=", "expressions", ".", "BinaryComposition", "(", "u'||'", ",", "non_existence_expression", ",", "filter_predicate", ")", "return", "blocks", ".", "Filter", "(", "filter_predicate", ")" ]
Return a Filter basic block that checks if the directive arg and the field intersect. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! parameters: list of 1 element, specifying the collection in which the value must exist; if the collection is optional and missing, the check will return True Returns: a Filter basic block that performs the intersects check
[ "Return", "a", "Filter", "basic", "block", "that", "checks", "if", "the", "directive", "arg", "and", "the", "field", "intersect", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L509-L543
246,847
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
is_filter_with_outer_scope_vertex_field_operator
def is_filter_with_outer_scope_vertex_field_operator(directive): """Return True if we have a filter directive whose operator applies to the outer scope.""" if directive.name.value != 'filter': return False op_name, _ = _get_filter_op_name_and_values(directive) return op_name in OUTER_SCOPE_VERTEX_FIELD_OPERATORS
python
def is_filter_with_outer_scope_vertex_field_operator(directive): if directive.name.value != 'filter': return False op_name, _ = _get_filter_op_name_and_values(directive) return op_name in OUTER_SCOPE_VERTEX_FIELD_OPERATORS
[ "def", "is_filter_with_outer_scope_vertex_field_operator", "(", "directive", ")", ":", "if", "directive", ".", "name", ".", "value", "!=", "'filter'", ":", "return", "False", "op_name", ",", "_", "=", "_get_filter_op_name_and_values", "(", "directive", ")", "return", "op_name", "in", "OUTER_SCOPE_VERTEX_FIELD_OPERATORS" ]
Return True if we have a filter directive whose operator applies to the outer scope.
[ "Return", "True", "if", "we", "have", "a", "filter", "directive", "whose", "operator", "applies", "to", "the", "outer", "scope", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L598-L604
246,848
kensho-technologies/graphql-compiler
graphql_compiler/compiler/filters.py
process_filter_directive
def process_filter_directive(filter_operation_info, location, context): """Return a Filter basic block that corresponds to the filter operation in the directive. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: a Filter basic block that performs the requested filtering operation """ op_name, operator_params = _get_filter_op_name_and_values(filter_operation_info.directive) non_comparison_filters = { u'name_or_alias': _process_name_or_alias_filter_directive, u'between': _process_between_filter_directive, u'in_collection': _process_in_collection_filter_directive, u'has_substring': _process_has_substring_filter_directive, u'contains': _process_contains_filter_directive, u'intersects': _process_intersects_filter_directive, u'has_edge_degree': _process_has_edge_degree_filter_directive, } all_recognized_filters = frozenset(non_comparison_filters.keys()) | COMPARISON_OPERATORS if all_recognized_filters != ALL_OPERATORS: unrecognized_filters = ALL_OPERATORS - all_recognized_filters raise AssertionError(u'Some filtering operators are defined but do not have an associated ' u'processing function. This is a bug: {}'.format(unrecognized_filters)) if op_name in COMPARISON_OPERATORS: process_func = partial(_process_comparison_filter_directive, operator=op_name) else: process_func = non_comparison_filters.get(op_name, None) if process_func is None: raise GraphQLCompilationError(u'Unknown op_name for filter directive: {}'.format(op_name)) # Operators that do not affect the inner scope require a field name to which they apply. # There is no field name on InlineFragment ASTs, which is why only operators that affect # the inner scope make semantic sense when applied to InlineFragments. # Here, we ensure that we either have a field name to which the filter applies, # or that the operator affects the inner scope. if (filter_operation_info.field_name is None and op_name not in INNER_SCOPE_VERTEX_FIELD_OPERATORS): raise GraphQLCompilationError(u'The filter with op_name "{}" must be applied on a field. ' u'It may not be applied on a type coercion.'.format(op_name)) fields = ((filter_operation_info.field_name,) if op_name != 'name_or_alias' else ('name', 'alias')) context['metadata'].record_filter_info( location, FilterInfo(fields=fields, op_name=op_name, args=tuple(operator_params)) ) return process_func(filter_operation_info, location, context, operator_params)
python
def process_filter_directive(filter_operation_info, location, context): op_name, operator_params = _get_filter_op_name_and_values(filter_operation_info.directive) non_comparison_filters = { u'name_or_alias': _process_name_or_alias_filter_directive, u'between': _process_between_filter_directive, u'in_collection': _process_in_collection_filter_directive, u'has_substring': _process_has_substring_filter_directive, u'contains': _process_contains_filter_directive, u'intersects': _process_intersects_filter_directive, u'has_edge_degree': _process_has_edge_degree_filter_directive, } all_recognized_filters = frozenset(non_comparison_filters.keys()) | COMPARISON_OPERATORS if all_recognized_filters != ALL_OPERATORS: unrecognized_filters = ALL_OPERATORS - all_recognized_filters raise AssertionError(u'Some filtering operators are defined but do not have an associated ' u'processing function. This is a bug: {}'.format(unrecognized_filters)) if op_name in COMPARISON_OPERATORS: process_func = partial(_process_comparison_filter_directive, operator=op_name) else: process_func = non_comparison_filters.get(op_name, None) if process_func is None: raise GraphQLCompilationError(u'Unknown op_name for filter directive: {}'.format(op_name)) # Operators that do not affect the inner scope require a field name to which they apply. # There is no field name on InlineFragment ASTs, which is why only operators that affect # the inner scope make semantic sense when applied to InlineFragments. # Here, we ensure that we either have a field name to which the filter applies, # or that the operator affects the inner scope. if (filter_operation_info.field_name is None and op_name not in INNER_SCOPE_VERTEX_FIELD_OPERATORS): raise GraphQLCompilationError(u'The filter with op_name "{}" must be applied on a field. ' u'It may not be applied on a type coercion.'.format(op_name)) fields = ((filter_operation_info.field_name,) if op_name != 'name_or_alias' else ('name', 'alias')) context['metadata'].record_filter_info( location, FilterInfo(fields=fields, op_name=op_name, args=tuple(operator_params)) ) return process_func(filter_operation_info, location, context, operator_params)
[ "def", "process_filter_directive", "(", "filter_operation_info", ",", "location", ",", "context", ")", ":", "op_name", ",", "operator_params", "=", "_get_filter_op_name_and_values", "(", "filter_operation_info", ".", "directive", ")", "non_comparison_filters", "=", "{", "u'name_or_alias'", ":", "_process_name_or_alias_filter_directive", ",", "u'between'", ":", "_process_between_filter_directive", ",", "u'in_collection'", ":", "_process_in_collection_filter_directive", ",", "u'has_substring'", ":", "_process_has_substring_filter_directive", ",", "u'contains'", ":", "_process_contains_filter_directive", ",", "u'intersects'", ":", "_process_intersects_filter_directive", ",", "u'has_edge_degree'", ":", "_process_has_edge_degree_filter_directive", ",", "}", "all_recognized_filters", "=", "frozenset", "(", "non_comparison_filters", ".", "keys", "(", ")", ")", "|", "COMPARISON_OPERATORS", "if", "all_recognized_filters", "!=", "ALL_OPERATORS", ":", "unrecognized_filters", "=", "ALL_OPERATORS", "-", "all_recognized_filters", "raise", "AssertionError", "(", "u'Some filtering operators are defined but do not have an associated '", "u'processing function. This is a bug: {}'", ".", "format", "(", "unrecognized_filters", ")", ")", "if", "op_name", "in", "COMPARISON_OPERATORS", ":", "process_func", "=", "partial", "(", "_process_comparison_filter_directive", ",", "operator", "=", "op_name", ")", "else", ":", "process_func", "=", "non_comparison_filters", ".", "get", "(", "op_name", ",", "None", ")", "if", "process_func", "is", "None", ":", "raise", "GraphQLCompilationError", "(", "u'Unknown op_name for filter directive: {}'", ".", "format", "(", "op_name", ")", ")", "# Operators that do not affect the inner scope require a field name to which they apply.", "# There is no field name on InlineFragment ASTs, which is why only operators that affect", "# the inner scope make semantic sense when applied to InlineFragments.", "# Here, we ensure that we either have a field name to which the filter applies,", "# or that the operator affects the inner scope.", "if", "(", "filter_operation_info", ".", "field_name", "is", "None", "and", "op_name", "not", "in", "INNER_SCOPE_VERTEX_FIELD_OPERATORS", ")", ":", "raise", "GraphQLCompilationError", "(", "u'The filter with op_name \"{}\" must be applied on a field. '", "u'It may not be applied on a type coercion.'", ".", "format", "(", "op_name", ")", ")", "fields", "=", "(", "(", "filter_operation_info", ".", "field_name", ",", ")", "if", "op_name", "!=", "'name_or_alias'", "else", "(", "'name'", ",", "'alias'", ")", ")", "context", "[", "'metadata'", "]", ".", "record_filter_info", "(", "location", ",", "FilterInfo", "(", "fields", "=", "fields", ",", "op_name", "=", "op_name", ",", "args", "=", "tuple", "(", "operator_params", ")", ")", ")", "return", "process_func", "(", "filter_operation_info", ",", "location", ",", "context", ",", "operator_params", ")" ]
Return a Filter basic block that corresponds to the filter operation in the directive. Args: filter_operation_info: FilterOperationInfo object, containing the directive and field info of the field where the filter is to be applied. location: Location where this filter is used. context: dict, various per-compilation data (e.g. declared tags, whether the current block is optional, etc.). May be mutated in-place in this function! Returns: a Filter basic block that performs the requested filtering operation
[ "Return", "a", "Filter", "basic", "block", "that", "corresponds", "to", "the", "filter", "operation", "in", "the", "directive", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/filters.py#L607-L663
246,849
kensho-technologies/graphql-compiler
graphql_compiler/compiler/sql_context_helpers.py
get_schema_type_name
def get_schema_type_name(node, context): """Return the GraphQL type name of a node.""" query_path = node.query_path if query_path not in context.query_path_to_location_info: raise AssertionError( u'Unable to find type name for query path {} with context {}.'.format( query_path, context)) location_info = context.query_path_to_location_info[query_path] return location_info.type.name
python
def get_schema_type_name(node, context): query_path = node.query_path if query_path not in context.query_path_to_location_info: raise AssertionError( u'Unable to find type name for query path {} with context {}.'.format( query_path, context)) location_info = context.query_path_to_location_info[query_path] return location_info.type.name
[ "def", "get_schema_type_name", "(", "node", ",", "context", ")", ":", "query_path", "=", "node", ".", "query_path", "if", "query_path", "not", "in", "context", ".", "query_path_to_location_info", ":", "raise", "AssertionError", "(", "u'Unable to find type name for query path {} with context {}.'", ".", "format", "(", "query_path", ",", "context", ")", ")", "location_info", "=", "context", ".", "query_path_to_location_info", "[", "query_path", "]", "return", "location_info", ".", "type", ".", "name" ]
Return the GraphQL type name of a node.
[ "Return", "the", "GraphQL", "type", "name", "of", "a", "node", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/sql_context_helpers.py#L5-L13
246,850
kensho-technologies/graphql-compiler
graphql_compiler/compiler/sql_context_helpers.py
get_node_at_path
def get_node_at_path(query_path, context): """Return the SqlNode associated with the query path.""" if query_path not in context.query_path_to_node: raise AssertionError( u'Unable to find SqlNode for query path {} with context {}.'.format( query_path, context)) node = context.query_path_to_node[query_path] return node
python
def get_node_at_path(query_path, context): if query_path not in context.query_path_to_node: raise AssertionError( u'Unable to find SqlNode for query path {} with context {}.'.format( query_path, context)) node = context.query_path_to_node[query_path] return node
[ "def", "get_node_at_path", "(", "query_path", ",", "context", ")", ":", "if", "query_path", "not", "in", "context", ".", "query_path_to_node", ":", "raise", "AssertionError", "(", "u'Unable to find SqlNode for query path {} with context {}.'", ".", "format", "(", "query_path", ",", "context", ")", ")", "node", "=", "context", ".", "query_path_to_node", "[", "query_path", "]", "return", "node" ]
Return the SqlNode associated with the query path.
[ "Return", "the", "SqlNode", "associated", "with", "the", "query", "path", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/sql_context_helpers.py#L27-L34
246,851
kensho-technologies/graphql-compiler
graphql_compiler/compiler/sql_context_helpers.py
try_get_column
def try_get_column(column_name, node, context): """Attempt to get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: Optional[column], the SQLAlchemy column if found, None otherwise. """ selectable = get_node_selectable(node, context) if not hasattr(selectable, 'c'): raise AssertionError( u'Selectable "{}" does not have a column collection. Context is {}.'.format( selectable, context)) return selectable.c.get(column_name, None)
python
def try_get_column(column_name, node, context): selectable = get_node_selectable(node, context) if not hasattr(selectable, 'c'): raise AssertionError( u'Selectable "{}" does not have a column collection. Context is {}.'.format( selectable, context)) return selectable.c.get(column_name, None)
[ "def", "try_get_column", "(", "column_name", ",", "node", ",", "context", ")", ":", "selectable", "=", "get_node_selectable", "(", "node", ",", "context", ")", "if", "not", "hasattr", "(", "selectable", ",", "'c'", ")", ":", "raise", "AssertionError", "(", "u'Selectable \"{}\" does not have a column collection. Context is {}.'", ".", "format", "(", "selectable", ",", "context", ")", ")", "return", "selectable", ".", "c", ".", "get", "(", "column_name", ",", "None", ")" ]
Attempt to get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: Optional[column], the SQLAlchemy column if found, None otherwise.
[ "Attempt", "to", "get", "a", "column", "by", "name", "from", "the", "selectable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/sql_context_helpers.py#L37-L53
246,852
kensho-technologies/graphql-compiler
graphql_compiler/compiler/sql_context_helpers.py
get_column
def get_column(column_name, node, context): """Get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: column, the SQLAlchemy column if found. Raises an AssertionError otherwise. """ column = try_get_column(column_name, node, context) if column is None: selectable = get_node_selectable(node, context) raise AssertionError( u'Column "{}" not found in selectable "{}". Columns present are {}. ' u'Context is {}.'.format(column_name, selectable.original, [col.name for col in selectable.c], context)) return column
python
def get_column(column_name, node, context): column = try_get_column(column_name, node, context) if column is None: selectable = get_node_selectable(node, context) raise AssertionError( u'Column "{}" not found in selectable "{}". Columns present are {}. ' u'Context is {}.'.format(column_name, selectable.original, [col.name for col in selectable.c], context)) return column
[ "def", "get_column", "(", "column_name", ",", "node", ",", "context", ")", ":", "column", "=", "try_get_column", "(", "column_name", ",", "node", ",", "context", ")", "if", "column", "is", "None", ":", "selectable", "=", "get_node_selectable", "(", "node", ",", "context", ")", "raise", "AssertionError", "(", "u'Column \"{}\" not found in selectable \"{}\". Columns present are {}. '", "u'Context is {}.'", ".", "format", "(", "column_name", ",", "selectable", ".", "original", ",", "[", "col", ".", "name", "for", "col", "in", "selectable", ".", "c", "]", ",", "context", ")", ")", "return", "column" ]
Get a column by name from the selectable. Args: column_name: str, name of the column to retrieve. node: SqlNode, the node the column is being retrieved for. context: CompilationContext, compilation specific metadata. Returns: column, the SQLAlchemy column if found. Raises an AssertionError otherwise.
[ "Get", "a", "column", "by", "name", "from", "the", "selectable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/sql_context_helpers.py#L56-L74
246,853
kensho-technologies/graphql-compiler
graphql_compiler/compiler/directive_helpers.py
get_unique_directives
def get_unique_directives(ast): """Return a dict of directive name to directive object for the given AST node. Any directives that are allowed to exist more than once on any AST node are ignored. For any directives that can only exist up to once, we verify that they are not duplicated raising GraphQLCompilationError in case we find them more than once on the AST node. Args: ast: GraphQL AST node, obtained from the graphql library Returns: dict of string to directive object """ if not ast.directives: return dict() result = dict() for directive_obj in ast.directives: directive_name = directive_obj.name.value if directive_name in ALLOWED_DUPLICATED_DIRECTIVES: pass # We don't return these. elif directive_name in result: raise GraphQLCompilationError(u'Directive was unexpectedly applied twice in the same ' u'location: {} {}'.format(directive_name, ast.directives)) else: result[directive_name] = directive_obj return result
python
def get_unique_directives(ast): if not ast.directives: return dict() result = dict() for directive_obj in ast.directives: directive_name = directive_obj.name.value if directive_name in ALLOWED_DUPLICATED_DIRECTIVES: pass # We don't return these. elif directive_name in result: raise GraphQLCompilationError(u'Directive was unexpectedly applied twice in the same ' u'location: {} {}'.format(directive_name, ast.directives)) else: result[directive_name] = directive_obj return result
[ "def", "get_unique_directives", "(", "ast", ")", ":", "if", "not", "ast", ".", "directives", ":", "return", "dict", "(", ")", "result", "=", "dict", "(", ")", "for", "directive_obj", "in", "ast", ".", "directives", ":", "directive_name", "=", "directive_obj", ".", "name", ".", "value", "if", "directive_name", "in", "ALLOWED_DUPLICATED_DIRECTIVES", ":", "pass", "# We don't return these.", "elif", "directive_name", "in", "result", ":", "raise", "GraphQLCompilationError", "(", "u'Directive was unexpectedly applied twice in the same '", "u'location: {} {}'", ".", "format", "(", "directive_name", ",", "ast", ".", "directives", ")", ")", "else", ":", "result", "[", "directive_name", "]", "=", "directive_obj", "return", "result" ]
Return a dict of directive name to directive object for the given AST node. Any directives that are allowed to exist more than once on any AST node are ignored. For any directives that can only exist up to once, we verify that they are not duplicated raising GraphQLCompilationError in case we find them more than once on the AST node. Args: ast: GraphQL AST node, obtained from the graphql library Returns: dict of string to directive object
[ "Return", "a", "dict", "of", "directive", "name", "to", "directive", "object", "for", "the", "given", "AST", "node", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L27-L54
246,854
kensho-technologies/graphql-compiler
graphql_compiler/compiler/directive_helpers.py
get_local_filter_directives
def get_local_filter_directives(ast, current_schema_type, inner_vertex_fields): """Get all filter directives that apply to the current field. This helper abstracts away the fact that some vertex field filtering operators apply on the inner scope (the scope of the inner vertex field on which they are applied), whereas some apply on the outer scope (the scope that contains the inner vertex field). See filters.py for more information. Args: ast: a GraphQL AST object for which to load local filters, from the graphql library current_schema_type: GraphQLType, the schema type at the current AST location inner_vertex_fields: a list of inner AST objects representing vertex fields that are within the current field. If currently processing a property field (i.e. there are no inner vertex fields), this argument may be set to None. Returns: list of FilterOperationInfo objects. If the field_ast field is of type InlineFragment, the field_name field is set to None. """ result = [] if ast.directives: # it'll be None if the AST has no directives at that node for directive_obj in ast.directives: # Of all filters that appear *on the field itself*, only the ones that apply # to the outer scope are not considered "local" and are not to be returned. if directive_obj.name.value == 'filter': filtered_field_name = get_ast_field_name_or_none(ast) if is_filter_with_outer_scope_vertex_field_operator(directive_obj): # We found a filter that affects the outer scope vertex. Let's make sure # we are at a vertex field. If we are actually at a property field, # that is a compilation error. if not is_vertex_field_type(current_schema_type): raise GraphQLCompilationError( u'Found disallowed filter on a property field: {} {} ' u'{}'.format(directive_obj, current_schema_type, filtered_field_name)) elif isinstance(ast, InlineFragment): raise GraphQLCompilationError( u'Found disallowed filter on a type coercion: {} ' u'{}'.format(directive_obj, current_schema_type)) else: # The filter is valid and non-local, since it is applied at this AST node # but affects the outer scope vertex field. Skip over it. pass else: operation = FilterOperationInfo( directive=directive_obj, field_name=filtered_field_name, field_type=current_schema_type, field_ast=ast) result.append(operation) if inner_vertex_fields: # allow the argument to be None for inner_ast in inner_vertex_fields: for directive_obj in inner_ast.directives: # Of all filters that appear on an inner vertex field, only the ones that apply # to the outer scope are "local" to the outer field and therefore to be returned. if is_filter_with_outer_scope_vertex_field_operator(directive_obj): # The inner AST must not be an InlineFragment, so it must have a field name. filtered_field_name = get_ast_field_name(inner_ast) filtered_field_type = get_vertex_field_type( current_schema_type, filtered_field_name) operation = FilterOperationInfo( directive=directive_obj, field_name=filtered_field_name, field_type=filtered_field_type, field_ast=inner_ast) result.append(operation) return result
python
def get_local_filter_directives(ast, current_schema_type, inner_vertex_fields): result = [] if ast.directives: # it'll be None if the AST has no directives at that node for directive_obj in ast.directives: # Of all filters that appear *on the field itself*, only the ones that apply # to the outer scope are not considered "local" and are not to be returned. if directive_obj.name.value == 'filter': filtered_field_name = get_ast_field_name_or_none(ast) if is_filter_with_outer_scope_vertex_field_operator(directive_obj): # We found a filter that affects the outer scope vertex. Let's make sure # we are at a vertex field. If we are actually at a property field, # that is a compilation error. if not is_vertex_field_type(current_schema_type): raise GraphQLCompilationError( u'Found disallowed filter on a property field: {} {} ' u'{}'.format(directive_obj, current_schema_type, filtered_field_name)) elif isinstance(ast, InlineFragment): raise GraphQLCompilationError( u'Found disallowed filter on a type coercion: {} ' u'{}'.format(directive_obj, current_schema_type)) else: # The filter is valid and non-local, since it is applied at this AST node # but affects the outer scope vertex field. Skip over it. pass else: operation = FilterOperationInfo( directive=directive_obj, field_name=filtered_field_name, field_type=current_schema_type, field_ast=ast) result.append(operation) if inner_vertex_fields: # allow the argument to be None for inner_ast in inner_vertex_fields: for directive_obj in inner_ast.directives: # Of all filters that appear on an inner vertex field, only the ones that apply # to the outer scope are "local" to the outer field and therefore to be returned. if is_filter_with_outer_scope_vertex_field_operator(directive_obj): # The inner AST must not be an InlineFragment, so it must have a field name. filtered_field_name = get_ast_field_name(inner_ast) filtered_field_type = get_vertex_field_type( current_schema_type, filtered_field_name) operation = FilterOperationInfo( directive=directive_obj, field_name=filtered_field_name, field_type=filtered_field_type, field_ast=inner_ast) result.append(operation) return result
[ "def", "get_local_filter_directives", "(", "ast", ",", "current_schema_type", ",", "inner_vertex_fields", ")", ":", "result", "=", "[", "]", "if", "ast", ".", "directives", ":", "# it'll be None if the AST has no directives at that node", "for", "directive_obj", "in", "ast", ".", "directives", ":", "# Of all filters that appear *on the field itself*, only the ones that apply", "# to the outer scope are not considered \"local\" and are not to be returned.", "if", "directive_obj", ".", "name", ".", "value", "==", "'filter'", ":", "filtered_field_name", "=", "get_ast_field_name_or_none", "(", "ast", ")", "if", "is_filter_with_outer_scope_vertex_field_operator", "(", "directive_obj", ")", ":", "# We found a filter that affects the outer scope vertex. Let's make sure", "# we are at a vertex field. If we are actually at a property field,", "# that is a compilation error.", "if", "not", "is_vertex_field_type", "(", "current_schema_type", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Found disallowed filter on a property field: {} {} '", "u'{}'", ".", "format", "(", "directive_obj", ",", "current_schema_type", ",", "filtered_field_name", ")", ")", "elif", "isinstance", "(", "ast", ",", "InlineFragment", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Found disallowed filter on a type coercion: {} '", "u'{}'", ".", "format", "(", "directive_obj", ",", "current_schema_type", ")", ")", "else", ":", "# The filter is valid and non-local, since it is applied at this AST node", "# but affects the outer scope vertex field. Skip over it.", "pass", "else", ":", "operation", "=", "FilterOperationInfo", "(", "directive", "=", "directive_obj", ",", "field_name", "=", "filtered_field_name", ",", "field_type", "=", "current_schema_type", ",", "field_ast", "=", "ast", ")", "result", ".", "append", "(", "operation", ")", "if", "inner_vertex_fields", ":", "# allow the argument to be None", "for", "inner_ast", "in", "inner_vertex_fields", ":", "for", "directive_obj", "in", "inner_ast", ".", "directives", ":", "# Of all filters that appear on an inner vertex field, only the ones that apply", "# to the outer scope are \"local\" to the outer field and therefore to be returned.", "if", "is_filter_with_outer_scope_vertex_field_operator", "(", "directive_obj", ")", ":", "# The inner AST must not be an InlineFragment, so it must have a field name.", "filtered_field_name", "=", "get_ast_field_name", "(", "inner_ast", ")", "filtered_field_type", "=", "get_vertex_field_type", "(", "current_schema_type", ",", "filtered_field_name", ")", "operation", "=", "FilterOperationInfo", "(", "directive", "=", "directive_obj", ",", "field_name", "=", "filtered_field_name", ",", "field_type", "=", "filtered_field_type", ",", "field_ast", "=", "inner_ast", ")", "result", ".", "append", "(", "operation", ")", "return", "result" ]
Get all filter directives that apply to the current field. This helper abstracts away the fact that some vertex field filtering operators apply on the inner scope (the scope of the inner vertex field on which they are applied), whereas some apply on the outer scope (the scope that contains the inner vertex field). See filters.py for more information. Args: ast: a GraphQL AST object for which to load local filters, from the graphql library current_schema_type: GraphQLType, the schema type at the current AST location inner_vertex_fields: a list of inner AST objects representing vertex fields that are within the current field. If currently processing a property field (i.e. there are no inner vertex fields), this argument may be set to None. Returns: list of FilterOperationInfo objects. If the field_ast field is of type InlineFragment, the field_name field is set to None.
[ "Get", "all", "filter", "directives", "that", "apply", "to", "the", "current", "field", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L57-L121
246,855
kensho-technologies/graphql-compiler
graphql_compiler/compiler/directive_helpers.py
validate_property_directives
def validate_property_directives(directives): """Validate the directives that appear at a property field.""" for directive_name in six.iterkeys(directives): if directive_name in VERTEX_ONLY_DIRECTIVES: raise GraphQLCompilationError( u'Found vertex-only directive {} set on property.'.format(directive_name))
python
def validate_property_directives(directives): for directive_name in six.iterkeys(directives): if directive_name in VERTEX_ONLY_DIRECTIVES: raise GraphQLCompilationError( u'Found vertex-only directive {} set on property.'.format(directive_name))
[ "def", "validate_property_directives", "(", "directives", ")", ":", "for", "directive_name", "in", "six", ".", "iterkeys", "(", "directives", ")", ":", "if", "directive_name", "in", "VERTEX_ONLY_DIRECTIVES", ":", "raise", "GraphQLCompilationError", "(", "u'Found vertex-only directive {} set on property.'", ".", "format", "(", "directive_name", ")", ")" ]
Validate the directives that appear at a property field.
[ "Validate", "the", "directives", "that", "appear", "at", "a", "property", "field", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L124-L129
246,856
kensho-technologies/graphql-compiler
graphql_compiler/compiler/directive_helpers.py
validate_vertex_directives
def validate_vertex_directives(directives): """Validate the directives that appear at a vertex field.""" for directive_name in six.iterkeys(directives): if directive_name in PROPERTY_ONLY_DIRECTIVES: raise GraphQLCompilationError( u'Found property-only directive {} set on vertex.'.format(directive_name))
python
def validate_vertex_directives(directives): for directive_name in six.iterkeys(directives): if directive_name in PROPERTY_ONLY_DIRECTIVES: raise GraphQLCompilationError( u'Found property-only directive {} set on vertex.'.format(directive_name))
[ "def", "validate_vertex_directives", "(", "directives", ")", ":", "for", "directive_name", "in", "six", ".", "iterkeys", "(", "directives", ")", ":", "if", "directive_name", "in", "PROPERTY_ONLY_DIRECTIVES", ":", "raise", "GraphQLCompilationError", "(", "u'Found property-only directive {} set on vertex.'", ".", "format", "(", "directive_name", ")", ")" ]
Validate the directives that appear at a vertex field.
[ "Validate", "the", "directives", "that", "appear", "at", "a", "vertex", "field", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L132-L137
246,857
kensho-technologies/graphql-compiler
graphql_compiler/compiler/directive_helpers.py
validate_root_vertex_directives
def validate_root_vertex_directives(root_ast): """Validate the directives that appear at the root vertex field.""" directives_present_at_root = set() for directive_obj in root_ast.directives: directive_name = directive_obj.name.value if is_filter_with_outer_scope_vertex_field_operator(directive_obj): raise GraphQLCompilationError(u'Found a filter directive with an operator that is not' u'allowed on the root vertex: {}'.format(directive_obj)) directives_present_at_root.add(directive_name) disallowed_directives = directives_present_at_root & VERTEX_DIRECTIVES_PROHIBITED_ON_ROOT if disallowed_directives: raise GraphQLCompilationError(u'Found prohibited directives on root vertex: ' u'{}'.format(disallowed_directives))
python
def validate_root_vertex_directives(root_ast): directives_present_at_root = set() for directive_obj in root_ast.directives: directive_name = directive_obj.name.value if is_filter_with_outer_scope_vertex_field_operator(directive_obj): raise GraphQLCompilationError(u'Found a filter directive with an operator that is not' u'allowed on the root vertex: {}'.format(directive_obj)) directives_present_at_root.add(directive_name) disallowed_directives = directives_present_at_root & VERTEX_DIRECTIVES_PROHIBITED_ON_ROOT if disallowed_directives: raise GraphQLCompilationError(u'Found prohibited directives on root vertex: ' u'{}'.format(disallowed_directives))
[ "def", "validate_root_vertex_directives", "(", "root_ast", ")", ":", "directives_present_at_root", "=", "set", "(", ")", "for", "directive_obj", "in", "root_ast", ".", "directives", ":", "directive_name", "=", "directive_obj", ".", "name", ".", "value", "if", "is_filter_with_outer_scope_vertex_field_operator", "(", "directive_obj", ")", ":", "raise", "GraphQLCompilationError", "(", "u'Found a filter directive with an operator that is not'", "u'allowed on the root vertex: {}'", ".", "format", "(", "directive_obj", ")", ")", "directives_present_at_root", ".", "add", "(", "directive_name", ")", "disallowed_directives", "=", "directives_present_at_root", "&", "VERTEX_DIRECTIVES_PROHIBITED_ON_ROOT", "if", "disallowed_directives", ":", "raise", "GraphQLCompilationError", "(", "u'Found prohibited directives on root vertex: '", "u'{}'", ".", "format", "(", "disallowed_directives", ")", ")" ]
Validate the directives that appear at the root vertex field.
[ "Validate", "the", "directives", "that", "appear", "at", "the", "root", "vertex", "field", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L140-L155
246,858
kensho-technologies/graphql-compiler
graphql_compiler/compiler/directive_helpers.py
validate_vertex_field_directive_interactions
def validate_vertex_field_directive_interactions(parent_location, vertex_field_name, directives): """Ensure that the specified vertex field directives are not mutually disallowed.""" fold_directive = directives.get('fold', None) optional_directive = directives.get('optional', None) output_source_directive = directives.get('output_source', None) recurse_directive = directives.get('recurse', None) if fold_directive and optional_directive: raise GraphQLCompilationError(u'@fold and @optional may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if fold_directive and output_source_directive: raise GraphQLCompilationError(u'@fold and @output_source may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if fold_directive and recurse_directive: raise GraphQLCompilationError(u'@fold and @recurse may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_directive and output_source_directive: raise GraphQLCompilationError(u'@optional and @output_source may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_directive and recurse_directive: raise GraphQLCompilationError(u'@optional and @recurse may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name))
python
def validate_vertex_field_directive_interactions(parent_location, vertex_field_name, directives): fold_directive = directives.get('fold', None) optional_directive = directives.get('optional', None) output_source_directive = directives.get('output_source', None) recurse_directive = directives.get('recurse', None) if fold_directive and optional_directive: raise GraphQLCompilationError(u'@fold and @optional may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if fold_directive and output_source_directive: raise GraphQLCompilationError(u'@fold and @output_source may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if fold_directive and recurse_directive: raise GraphQLCompilationError(u'@fold and @recurse may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_directive and output_source_directive: raise GraphQLCompilationError(u'@optional and @output_source may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_directive and recurse_directive: raise GraphQLCompilationError(u'@optional and @recurse may not appear at the same ' u'vertex field! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name))
[ "def", "validate_vertex_field_directive_interactions", "(", "parent_location", ",", "vertex_field_name", ",", "directives", ")", ":", "fold_directive", "=", "directives", ".", "get", "(", "'fold'", ",", "None", ")", "optional_directive", "=", "directives", ".", "get", "(", "'optional'", ",", "None", ")", "output_source_directive", "=", "directives", ".", "get", "(", "'output_source'", ",", "None", ")", "recurse_directive", "=", "directives", ".", "get", "(", "'recurse'", ",", "None", ")", "if", "fold_directive", "and", "optional_directive", ":", "raise", "GraphQLCompilationError", "(", "u'@fold and @optional may not appear at the same '", "u'vertex field! Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")", "if", "fold_directive", "and", "output_source_directive", ":", "raise", "GraphQLCompilationError", "(", "u'@fold and @output_source may not appear at the same '", "u'vertex field! Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")", "if", "fold_directive", "and", "recurse_directive", ":", "raise", "GraphQLCompilationError", "(", "u'@fold and @recurse may not appear at the same '", "u'vertex field! Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")", "if", "optional_directive", "and", "output_source_directive", ":", "raise", "GraphQLCompilationError", "(", "u'@optional and @output_source may not appear at the same '", "u'vertex field! Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")", "if", "optional_directive", "and", "recurse_directive", ":", "raise", "GraphQLCompilationError", "(", "u'@optional and @recurse may not appear at the same '", "u'vertex field! Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")" ]
Ensure that the specified vertex field directives are not mutually disallowed.
[ "Ensure", "that", "the", "specified", "vertex", "field", "directives", "are", "not", "mutually", "disallowed", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L158-L188
246,859
kensho-technologies/graphql-compiler
graphql_compiler/compiler/directive_helpers.py
validate_vertex_field_directive_in_context
def validate_vertex_field_directive_in_context(parent_location, vertex_field_name, directives, context): """Ensure that the specified vertex field directives are allowed in the current context.""" fold_directive = directives.get('fold', None) optional_directive = directives.get('optional', None) recurse_directive = directives.get('recurse', None) output_source_directive = directives.get('output_source', None) fold_context = 'fold' in context optional_context = 'optional' in context output_source_context = 'output_source' in context if fold_directive and fold_context: raise GraphQLCompilationError(u'@fold is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_directive and fold_context: raise GraphQLCompilationError(u'@optional is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if output_source_directive and fold_context: raise GraphQLCompilationError(u'@output_source is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if recurse_directive and fold_context: raise GraphQLCompilationError(u'@recurse is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if output_source_context and not fold_directive: raise GraphQLCompilationError(u'Found non-fold vertex field after the vertex marked ' u'output source! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_context and fold_directive: raise GraphQLCompilationError(u'@fold is not allowed within a @optional traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_context and output_source_directive: raise GraphQLCompilationError(u'@output_source is not allowed within a @optional ' u'traversal! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name))
python
def validate_vertex_field_directive_in_context(parent_location, vertex_field_name, directives, context): fold_directive = directives.get('fold', None) optional_directive = directives.get('optional', None) recurse_directive = directives.get('recurse', None) output_source_directive = directives.get('output_source', None) fold_context = 'fold' in context optional_context = 'optional' in context output_source_context = 'output_source' in context if fold_directive and fold_context: raise GraphQLCompilationError(u'@fold is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_directive and fold_context: raise GraphQLCompilationError(u'@optional is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if output_source_directive and fold_context: raise GraphQLCompilationError(u'@output_source is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if recurse_directive and fold_context: raise GraphQLCompilationError(u'@recurse is not allowed within a @fold traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if output_source_context and not fold_directive: raise GraphQLCompilationError(u'Found non-fold vertex field after the vertex marked ' u'output source! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_context and fold_directive: raise GraphQLCompilationError(u'@fold is not allowed within a @optional traversal! ' u'Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name)) if optional_context and output_source_directive: raise GraphQLCompilationError(u'@output_source is not allowed within a @optional ' u'traversal! Parent location: {}, vertex field name: {}' .format(parent_location, vertex_field_name))
[ "def", "validate_vertex_field_directive_in_context", "(", "parent_location", ",", "vertex_field_name", ",", "directives", ",", "context", ")", ":", "fold_directive", "=", "directives", ".", "get", "(", "'fold'", ",", "None", ")", "optional_directive", "=", "directives", ".", "get", "(", "'optional'", ",", "None", ")", "recurse_directive", "=", "directives", ".", "get", "(", "'recurse'", ",", "None", ")", "output_source_directive", "=", "directives", ".", "get", "(", "'output_source'", ",", "None", ")", "fold_context", "=", "'fold'", "in", "context", "optional_context", "=", "'optional'", "in", "context", "output_source_context", "=", "'output_source'", "in", "context", "if", "fold_directive", "and", "fold_context", ":", "raise", "GraphQLCompilationError", "(", "u'@fold is not allowed within a @fold traversal! '", "u'Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")", "if", "optional_directive", "and", "fold_context", ":", "raise", "GraphQLCompilationError", "(", "u'@optional is not allowed within a @fold traversal! '", "u'Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")", "if", "output_source_directive", "and", "fold_context", ":", "raise", "GraphQLCompilationError", "(", "u'@output_source is not allowed within a @fold traversal! '", "u'Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")", "if", "recurse_directive", "and", "fold_context", ":", "raise", "GraphQLCompilationError", "(", "u'@recurse is not allowed within a @fold traversal! '", "u'Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")", "if", "output_source_context", "and", "not", "fold_directive", ":", "raise", "GraphQLCompilationError", "(", "u'Found non-fold vertex field after the vertex marked '", "u'output source! Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")", "if", "optional_context", "and", "fold_directive", ":", "raise", "GraphQLCompilationError", "(", "u'@fold is not allowed within a @optional traversal! '", "u'Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")", "if", "optional_context", "and", "output_source_directive", ":", "raise", "GraphQLCompilationError", "(", "u'@output_source is not allowed within a @optional '", "u'traversal! Parent location: {}, vertex field name: {}'", ".", "format", "(", "parent_location", ",", "vertex_field_name", ")", ")" ]
Ensure that the specified vertex field directives are allowed in the current context.
[ "Ensure", "that", "the", "specified", "vertex", "field", "directives", "are", "allowed", "in", "the", "current", "context", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/directive_helpers.py#L191-L231
246,860
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/match_formatting.py
_safe_match_string
def _safe_match_string(value): """Sanitize and represent a string argument in MATCH.""" if not isinstance(value, six.string_types): if isinstance(value, bytes): # should only happen in py3 value = value.decode('utf-8') else: raise GraphQLInvalidArgumentError(u'Attempting to convert a non-string into a string: ' u'{}'.format(value)) # Using JSON encoding means that all unicode literals and special chars # (e.g. newlines and backslashes) are replaced by appropriate escape sequences. # JSON has the same escaping rules as MATCH / SQL, so no further escaping is necessary. return json.dumps(value)
python
def _safe_match_string(value): if not isinstance(value, six.string_types): if isinstance(value, bytes): # should only happen in py3 value = value.decode('utf-8') else: raise GraphQLInvalidArgumentError(u'Attempting to convert a non-string into a string: ' u'{}'.format(value)) # Using JSON encoding means that all unicode literals and special chars # (e.g. newlines and backslashes) are replaced by appropriate escape sequences. # JSON has the same escaping rules as MATCH / SQL, so no further escaping is necessary. return json.dumps(value)
[ "def", "_safe_match_string", "(", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "six", ".", "string_types", ")", ":", "if", "isinstance", "(", "value", ",", "bytes", ")", ":", "# should only happen in py3", "value", "=", "value", ".", "decode", "(", "'utf-8'", ")", "else", ":", "raise", "GraphQLInvalidArgumentError", "(", "u'Attempting to convert a non-string into a string: '", "u'{}'", ".", "format", "(", "value", ")", ")", "# Using JSON encoding means that all unicode literals and special chars", "# (e.g. newlines and backslashes) are replaced by appropriate escape sequences.", "# JSON has the same escaping rules as MATCH / SQL, so no further escaping is necessary.", "return", "json", ".", "dumps", "(", "value", ")" ]
Sanitize and represent a string argument in MATCH.
[ "Sanitize", "and", "represent", "a", "string", "argument", "in", "MATCH", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/match_formatting.py#L17-L29
246,861
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/match_formatting.py
_safe_match_date_and_datetime
def _safe_match_date_and_datetime(graphql_type, expected_python_types, value): """Represent date and datetime objects as MATCH strings.""" # Python datetime.datetime is a subclass of datetime.date, # but in this case, the two are not interchangeable. # Rather than using isinstance, we will therefore check for exact type equality. value_type = type(value) if not any(value_type == x for x in expected_python_types): raise GraphQLInvalidArgumentError(u'Expected value to be exactly one of ' u'python types {}, but was {}: ' u'{}'.format(expected_python_types, value_type, value)) # The serialize() method of GraphQLDate and GraphQLDateTime produces the correct # ISO-8601 format that MATCH expects. We then simply represent it as a regular string. try: serialized_value = graphql_type.serialize(value) except ValueError as e: raise GraphQLInvalidArgumentError(e) return _safe_match_string(serialized_value)
python
def _safe_match_date_and_datetime(graphql_type, expected_python_types, value): # Python datetime.datetime is a subclass of datetime.date, # but in this case, the two are not interchangeable. # Rather than using isinstance, we will therefore check for exact type equality. value_type = type(value) if not any(value_type == x for x in expected_python_types): raise GraphQLInvalidArgumentError(u'Expected value to be exactly one of ' u'python types {}, but was {}: ' u'{}'.format(expected_python_types, value_type, value)) # The serialize() method of GraphQLDate and GraphQLDateTime produces the correct # ISO-8601 format that MATCH expects. We then simply represent it as a regular string. try: serialized_value = graphql_type.serialize(value) except ValueError as e: raise GraphQLInvalidArgumentError(e) return _safe_match_string(serialized_value)
[ "def", "_safe_match_date_and_datetime", "(", "graphql_type", ",", "expected_python_types", ",", "value", ")", ":", "# Python datetime.datetime is a subclass of datetime.date,", "# but in this case, the two are not interchangeable.", "# Rather than using isinstance, we will therefore check for exact type equality.", "value_type", "=", "type", "(", "value", ")", "if", "not", "any", "(", "value_type", "==", "x", "for", "x", "in", "expected_python_types", ")", ":", "raise", "GraphQLInvalidArgumentError", "(", "u'Expected value to be exactly one of '", "u'python types {}, but was {}: '", "u'{}'", ".", "format", "(", "expected_python_types", ",", "value_type", ",", "value", ")", ")", "# The serialize() method of GraphQLDate and GraphQLDateTime produces the correct", "# ISO-8601 format that MATCH expects. We then simply represent it as a regular string.", "try", ":", "serialized_value", "=", "graphql_type", ".", "serialize", "(", "value", ")", "except", "ValueError", "as", "e", ":", "raise", "GraphQLInvalidArgumentError", "(", "e", ")", "return", "_safe_match_string", "(", "serialized_value", ")" ]
Represent date and datetime objects as MATCH strings.
[ "Represent", "date", "and", "datetime", "objects", "as", "MATCH", "strings", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/match_formatting.py#L32-L50
246,862
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/match_formatting.py
_safe_match_list
def _safe_match_list(inner_type, argument_value): """Represent the list of "inner_type" objects in MATCH form.""" stripped_type = strip_non_null_from_type(inner_type) if isinstance(stripped_type, GraphQLList): raise GraphQLInvalidArgumentError(u'MATCH does not currently support nested lists, ' u'but inner type was {}: ' u'{}'.format(inner_type, argument_value)) if not isinstance(argument_value, list): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-list as a list: ' u'{}'.format(argument_value)) components = ( _safe_match_argument(stripped_type, x) for x in argument_value ) return u'[' + u','.join(components) + u']'
python
def _safe_match_list(inner_type, argument_value): stripped_type = strip_non_null_from_type(inner_type) if isinstance(stripped_type, GraphQLList): raise GraphQLInvalidArgumentError(u'MATCH does not currently support nested lists, ' u'but inner type was {}: ' u'{}'.format(inner_type, argument_value)) if not isinstance(argument_value, list): raise GraphQLInvalidArgumentError(u'Attempting to represent a non-list as a list: ' u'{}'.format(argument_value)) components = ( _safe_match_argument(stripped_type, x) for x in argument_value ) return u'[' + u','.join(components) + u']'
[ "def", "_safe_match_list", "(", "inner_type", ",", "argument_value", ")", ":", "stripped_type", "=", "strip_non_null_from_type", "(", "inner_type", ")", "if", "isinstance", "(", "stripped_type", ",", "GraphQLList", ")", ":", "raise", "GraphQLInvalidArgumentError", "(", "u'MATCH does not currently support nested lists, '", "u'but inner type was {}: '", "u'{}'", ".", "format", "(", "inner_type", ",", "argument_value", ")", ")", "if", "not", "isinstance", "(", "argument_value", ",", "list", ")", ":", "raise", "GraphQLInvalidArgumentError", "(", "u'Attempting to represent a non-list as a list: '", "u'{}'", ".", "format", "(", "argument_value", ")", ")", "components", "=", "(", "_safe_match_argument", "(", "stripped_type", ",", "x", ")", "for", "x", "in", "argument_value", ")", "return", "u'['", "+", "u','", ".", "join", "(", "components", ")", "+", "u']'" ]
Represent the list of "inner_type" objects in MATCH form.
[ "Represent", "the", "list", "of", "inner_type", "objects", "in", "MATCH", "form", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/match_formatting.py#L59-L75
246,863
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/match_formatting.py
insert_arguments_into_match_query
def insert_arguments_into_match_query(compilation_result, arguments): """Insert the arguments into the compiled MATCH query to form a complete query. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every parameter the query expects. Returns: string, a MATCH query with inserted argument data """ if compilation_result.language != MATCH_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query argument_types = compilation_result.input_metadata # The arguments are assumed to have already been validated against the query. sanitized_arguments = { key: _safe_match_argument(argument_types[key], value) for key, value in six.iteritems(arguments) } return base_query.format(**sanitized_arguments)
python
def insert_arguments_into_match_query(compilation_result, arguments): if compilation_result.language != MATCH_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query argument_types = compilation_result.input_metadata # The arguments are assumed to have already been validated against the query. sanitized_arguments = { key: _safe_match_argument(argument_types[key], value) for key, value in six.iteritems(arguments) } return base_query.format(**sanitized_arguments)
[ "def", "insert_arguments_into_match_query", "(", "compilation_result", ",", "arguments", ")", ":", "if", "compilation_result", ".", "language", "!=", "MATCH_LANGUAGE", ":", "raise", "AssertionError", "(", "u'Unexpected query output language: {}'", ".", "format", "(", "compilation_result", ")", ")", "base_query", "=", "compilation_result", ".", "query", "argument_types", "=", "compilation_result", ".", "input_metadata", "# The arguments are assumed to have already been validated against the query.", "sanitized_arguments", "=", "{", "key", ":", "_safe_match_argument", "(", "argument_types", "[", "key", "]", ",", "value", ")", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "arguments", ")", "}", "return", "base_query", ".", "format", "(", "*", "*", "sanitized_arguments", ")" ]
Insert the arguments into the compiled MATCH query to form a complete query. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every parameter the query expects. Returns: string, a MATCH query with inserted argument data
[ "Insert", "the", "arguments", "into", "the", "compiled", "MATCH", "query", "to", "form", "a", "complete", "query", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/match_formatting.py#L120-L142
246,864
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_sql/metadata.py
SqlMetadata.get_table
def get_table(self, schema_type): """Retrieve a SQLAlchemy table based on the supplied GraphQL schema type name.""" table_name = schema_type.lower() if not self.has_table(table_name): raise exceptions.GraphQLCompilationError( 'No Table found in SQLAlchemy metadata for table name "{}"'.format(table_name) ) return self.table_name_to_table[table_name]
python
def get_table(self, schema_type): table_name = schema_type.lower() if not self.has_table(table_name): raise exceptions.GraphQLCompilationError( 'No Table found in SQLAlchemy metadata for table name "{}"'.format(table_name) ) return self.table_name_to_table[table_name]
[ "def", "get_table", "(", "self", ",", "schema_type", ")", ":", "table_name", "=", "schema_type", ".", "lower", "(", ")", "if", "not", "self", ".", "has_table", "(", "table_name", ")", ":", "raise", "exceptions", ".", "GraphQLCompilationError", "(", "'No Table found in SQLAlchemy metadata for table name \"{}\"'", ".", "format", "(", "table_name", ")", ")", "return", "self", ".", "table_name_to_table", "[", "table_name", "]" ]
Retrieve a SQLAlchemy table based on the supplied GraphQL schema type name.
[ "Retrieve", "a", "SQLAlchemy", "table", "based", "on", "the", "supplied", "GraphQL", "schema", "type", "name", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/metadata.py#L27-L34
246,865
kensho-technologies/graphql-compiler
graphql_compiler/compiler/match_query.py
_per_location_tuple_to_step
def _per_location_tuple_to_step(ir_tuple): """Construct a MatchStep from a tuple of its constituent blocks.""" root_block = ir_tuple[0] if not isinstance(root_block, root_block_types): raise AssertionError(u'Unexpected root block type for MatchStep: ' u'{} {}'.format(root_block, ir_tuple)) coerce_type_block = None where_block = None as_block = None for block in ir_tuple[1:]: if isinstance(block, CoerceType): if coerce_type_block is not None: raise AssertionError(u'Unexpectedly found two blocks eligible for "class" clause: ' u'{} {} {}'.format(block, coerce_type_block, ir_tuple)) coerce_type_block = block elif isinstance(block, MarkLocation): if as_block is not None: raise AssertionError(u'Unexpectedly found two blocks eligible for "as" clause: ' u'{} {} {}'.format(block, as_block, ir_tuple)) as_block = block elif isinstance(block, Filter): if where_block is not None: raise AssertionError(u'Unexpectedly found two blocks eligible for "where" clause: ' u'{} {} {}'.format(block, as_block, ir_tuple)) # Filter always comes before MarkLocation in a given MatchStep. if as_block is not None: raise AssertionError(u'Unexpectedly found MarkLocation before Filter in ' u'MatchStep: {} {} {}'.format(block, where_block, ir_tuple)) where_block = block else: raise AssertionError(u'Unexpected block encountered: {} {}'.format(block, ir_tuple)) step = MatchStep(root_block=root_block, coerce_type_block=coerce_type_block, where_block=where_block, as_block=as_block) # MatchSteps with Backtrack as the root block should only contain MarkLocation, # and not do filtering or type coercion. if isinstance(root_block, Backtrack): if where_block is not None or coerce_type_block is not None: raise AssertionError(u'Unexpected blocks in Backtrack-based MatchStep: {}'.format(step)) return step
python
def _per_location_tuple_to_step(ir_tuple): root_block = ir_tuple[0] if not isinstance(root_block, root_block_types): raise AssertionError(u'Unexpected root block type for MatchStep: ' u'{} {}'.format(root_block, ir_tuple)) coerce_type_block = None where_block = None as_block = None for block in ir_tuple[1:]: if isinstance(block, CoerceType): if coerce_type_block is not None: raise AssertionError(u'Unexpectedly found two blocks eligible for "class" clause: ' u'{} {} {}'.format(block, coerce_type_block, ir_tuple)) coerce_type_block = block elif isinstance(block, MarkLocation): if as_block is not None: raise AssertionError(u'Unexpectedly found two blocks eligible for "as" clause: ' u'{} {} {}'.format(block, as_block, ir_tuple)) as_block = block elif isinstance(block, Filter): if where_block is not None: raise AssertionError(u'Unexpectedly found two blocks eligible for "where" clause: ' u'{} {} {}'.format(block, as_block, ir_tuple)) # Filter always comes before MarkLocation in a given MatchStep. if as_block is not None: raise AssertionError(u'Unexpectedly found MarkLocation before Filter in ' u'MatchStep: {} {} {}'.format(block, where_block, ir_tuple)) where_block = block else: raise AssertionError(u'Unexpected block encountered: {} {}'.format(block, ir_tuple)) step = MatchStep(root_block=root_block, coerce_type_block=coerce_type_block, where_block=where_block, as_block=as_block) # MatchSteps with Backtrack as the root block should only contain MarkLocation, # and not do filtering or type coercion. if isinstance(root_block, Backtrack): if where_block is not None or coerce_type_block is not None: raise AssertionError(u'Unexpected blocks in Backtrack-based MatchStep: {}'.format(step)) return step
[ "def", "_per_location_tuple_to_step", "(", "ir_tuple", ")", ":", "root_block", "=", "ir_tuple", "[", "0", "]", "if", "not", "isinstance", "(", "root_block", ",", "root_block_types", ")", ":", "raise", "AssertionError", "(", "u'Unexpected root block type for MatchStep: '", "u'{} {}'", ".", "format", "(", "root_block", ",", "ir_tuple", ")", ")", "coerce_type_block", "=", "None", "where_block", "=", "None", "as_block", "=", "None", "for", "block", "in", "ir_tuple", "[", "1", ":", "]", ":", "if", "isinstance", "(", "block", ",", "CoerceType", ")", ":", "if", "coerce_type_block", "is", "not", "None", ":", "raise", "AssertionError", "(", "u'Unexpectedly found two blocks eligible for \"class\" clause: '", "u'{} {} {}'", ".", "format", "(", "block", ",", "coerce_type_block", ",", "ir_tuple", ")", ")", "coerce_type_block", "=", "block", "elif", "isinstance", "(", "block", ",", "MarkLocation", ")", ":", "if", "as_block", "is", "not", "None", ":", "raise", "AssertionError", "(", "u'Unexpectedly found two blocks eligible for \"as\" clause: '", "u'{} {} {}'", ".", "format", "(", "block", ",", "as_block", ",", "ir_tuple", ")", ")", "as_block", "=", "block", "elif", "isinstance", "(", "block", ",", "Filter", ")", ":", "if", "where_block", "is", "not", "None", ":", "raise", "AssertionError", "(", "u'Unexpectedly found two blocks eligible for \"where\" clause: '", "u'{} {} {}'", ".", "format", "(", "block", ",", "as_block", ",", "ir_tuple", ")", ")", "# Filter always comes before MarkLocation in a given MatchStep.", "if", "as_block", "is", "not", "None", ":", "raise", "AssertionError", "(", "u'Unexpectedly found MarkLocation before Filter in '", "u'MatchStep: {} {} {}'", ".", "format", "(", "block", ",", "where_block", ",", "ir_tuple", ")", ")", "where_block", "=", "block", "else", ":", "raise", "AssertionError", "(", "u'Unexpected block encountered: {} {}'", ".", "format", "(", "block", ",", "ir_tuple", ")", ")", "step", "=", "MatchStep", "(", "root_block", "=", "root_block", ",", "coerce_type_block", "=", "coerce_type_block", ",", "where_block", "=", "where_block", ",", "as_block", "=", "as_block", ")", "# MatchSteps with Backtrack as the root block should only contain MarkLocation,", "# and not do filtering or type coercion.", "if", "isinstance", "(", "root_block", ",", "Backtrack", ")", ":", "if", "where_block", "is", "not", "None", "or", "coerce_type_block", "is", "not", "None", ":", "raise", "AssertionError", "(", "u'Unexpected blocks in Backtrack-based MatchStep: {}'", ".", "format", "(", "step", ")", ")", "return", "step" ]
Construct a MatchStep from a tuple of its constituent blocks.
[ "Construct", "a", "MatchStep", "from", "a", "tuple", "of", "its", "constituent", "blocks", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/match_query.py#L39-L85
246,866
kensho-technologies/graphql-compiler
graphql_compiler/compiler/match_query.py
_split_ir_into_match_steps
def _split_ir_into_match_steps(pruned_ir_blocks): """Split a list of IR blocks into per-location MATCH steps. Args: pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step. Returns: list of MatchStep namedtuples, each of which contains all basic blocks that correspond to a single MATCH step. """ output = [] current_tuple = None for block in pruned_ir_blocks: if isinstance(block, OutputSource): # OutputSource blocks do not require any MATCH code, and only serve to help # optimizations and debugging. Simply omit them at this stage. continue elif isinstance(block, root_block_types): if current_tuple is not None: output.append(current_tuple) current_tuple = (block,) elif isinstance(block, (CoerceType, Filter, MarkLocation)): current_tuple += (block,) else: raise AssertionError(u'Unexpected block type when converting to MATCH query: ' u'{} {}'.format(block, pruned_ir_blocks)) if current_tuple is None: raise AssertionError(u'current_tuple was unexpectedly None: {}'.format(pruned_ir_blocks)) output.append(current_tuple) return [_per_location_tuple_to_step(x) for x in output]
python
def _split_ir_into_match_steps(pruned_ir_blocks): output = [] current_tuple = None for block in pruned_ir_blocks: if isinstance(block, OutputSource): # OutputSource blocks do not require any MATCH code, and only serve to help # optimizations and debugging. Simply omit them at this stage. continue elif isinstance(block, root_block_types): if current_tuple is not None: output.append(current_tuple) current_tuple = (block,) elif isinstance(block, (CoerceType, Filter, MarkLocation)): current_tuple += (block,) else: raise AssertionError(u'Unexpected block type when converting to MATCH query: ' u'{} {}'.format(block, pruned_ir_blocks)) if current_tuple is None: raise AssertionError(u'current_tuple was unexpectedly None: {}'.format(pruned_ir_blocks)) output.append(current_tuple) return [_per_location_tuple_to_step(x) for x in output]
[ "def", "_split_ir_into_match_steps", "(", "pruned_ir_blocks", ")", ":", "output", "=", "[", "]", "current_tuple", "=", "None", "for", "block", "in", "pruned_ir_blocks", ":", "if", "isinstance", "(", "block", ",", "OutputSource", ")", ":", "# OutputSource blocks do not require any MATCH code, and only serve to help", "# optimizations and debugging. Simply omit them at this stage.", "continue", "elif", "isinstance", "(", "block", ",", "root_block_types", ")", ":", "if", "current_tuple", "is", "not", "None", ":", "output", ".", "append", "(", "current_tuple", ")", "current_tuple", "=", "(", "block", ",", ")", "elif", "isinstance", "(", "block", ",", "(", "CoerceType", ",", "Filter", ",", "MarkLocation", ")", ")", ":", "current_tuple", "+=", "(", "block", ",", ")", "else", ":", "raise", "AssertionError", "(", "u'Unexpected block type when converting to MATCH query: '", "u'{} {}'", ".", "format", "(", "block", ",", "pruned_ir_blocks", ")", ")", "if", "current_tuple", "is", "None", ":", "raise", "AssertionError", "(", "u'current_tuple was unexpectedly None: {}'", ".", "format", "(", "pruned_ir_blocks", ")", ")", "output", ".", "append", "(", "current_tuple", ")", "return", "[", "_per_location_tuple_to_step", "(", "x", ")", "for", "x", "in", "output", "]" ]
Split a list of IR blocks into per-location MATCH steps. Args: pruned_ir_blocks: list of IR basic block objects that have gone through a lowering step. Returns: list of MatchStep namedtuples, each of which contains all basic blocks that correspond to a single MATCH step.
[ "Split", "a", "list", "of", "IR", "blocks", "into", "per", "-", "location", "MATCH", "steps", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/match_query.py#L88-L119
246,867
kensho-technologies/graphql-compiler
graphql_compiler/compiler/match_query.py
_split_match_steps_into_match_traversals
def _split_match_steps_into_match_traversals(match_steps): """Split a list of MatchSteps into multiple lists, each denoting a single MATCH traversal.""" output = [] current_list = None for step in match_steps: if isinstance(step.root_block, QueryRoot): if current_list is not None: output.append(current_list) current_list = [step] else: current_list.append(step) if current_list is None: raise AssertionError(u'current_list was unexpectedly None: {}'.format(match_steps)) output.append(current_list) return output
python
def _split_match_steps_into_match_traversals(match_steps): output = [] current_list = None for step in match_steps: if isinstance(step.root_block, QueryRoot): if current_list is not None: output.append(current_list) current_list = [step] else: current_list.append(step) if current_list is None: raise AssertionError(u'current_list was unexpectedly None: {}'.format(match_steps)) output.append(current_list) return output
[ "def", "_split_match_steps_into_match_traversals", "(", "match_steps", ")", ":", "output", "=", "[", "]", "current_list", "=", "None", "for", "step", "in", "match_steps", ":", "if", "isinstance", "(", "step", ".", "root_block", ",", "QueryRoot", ")", ":", "if", "current_list", "is", "not", "None", ":", "output", ".", "append", "(", "current_list", ")", "current_list", "=", "[", "step", "]", "else", ":", "current_list", ".", "append", "(", "step", ")", "if", "current_list", "is", "None", ":", "raise", "AssertionError", "(", "u'current_list was unexpectedly None: {}'", ".", "format", "(", "match_steps", ")", ")", "output", ".", "append", "(", "current_list", ")", "return", "output" ]
Split a list of MatchSteps into multiple lists, each denoting a single MATCH traversal.
[ "Split", "a", "list", "of", "MatchSteps", "into", "multiple", "lists", "each", "denoting", "a", "single", "MATCH", "traversal", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/match_query.py#L122-L138
246,868
kensho-technologies/graphql-compiler
graphql_compiler/compiler/match_query.py
convert_to_match_query
def convert_to_match_query(ir_blocks): """Convert the list of IR blocks into a MatchQuery object, for easier manipulation.""" output_block = ir_blocks[-1] if not isinstance(output_block, ConstructResult): raise AssertionError(u'Expected last IR block to be ConstructResult, found: ' u'{} {}'.format(output_block, ir_blocks)) ir_except_output = ir_blocks[:-1] folds, ir_except_output_and_folds = extract_folds_from_ir_blocks(ir_except_output) # Extract WHERE Filter global_operation_ir_blocks_tuple = _extract_global_operations(ir_except_output_and_folds) global_operation_blocks, pruned_ir_blocks = global_operation_ir_blocks_tuple if len(global_operation_blocks) > 1: raise AssertionError(u'Received IR blocks with multiple global operation blocks. Only one ' u'is allowed: {} {}'.format(global_operation_blocks, ir_blocks)) if len(global_operation_blocks) == 1: if not isinstance(global_operation_blocks[0], Filter): raise AssertionError(u'Received non-Filter global operation block. {}' .format(global_operation_blocks[0])) where_block = global_operation_blocks[0] else: where_block = None match_steps = _split_ir_into_match_steps(pruned_ir_blocks) match_traversals = _split_match_steps_into_match_traversals(match_steps) return MatchQuery( match_traversals=match_traversals, folds=folds, output_block=output_block, where_block=where_block, )
python
def convert_to_match_query(ir_blocks): output_block = ir_blocks[-1] if not isinstance(output_block, ConstructResult): raise AssertionError(u'Expected last IR block to be ConstructResult, found: ' u'{} {}'.format(output_block, ir_blocks)) ir_except_output = ir_blocks[:-1] folds, ir_except_output_and_folds = extract_folds_from_ir_blocks(ir_except_output) # Extract WHERE Filter global_operation_ir_blocks_tuple = _extract_global_operations(ir_except_output_and_folds) global_operation_blocks, pruned_ir_blocks = global_operation_ir_blocks_tuple if len(global_operation_blocks) > 1: raise AssertionError(u'Received IR blocks with multiple global operation blocks. Only one ' u'is allowed: {} {}'.format(global_operation_blocks, ir_blocks)) if len(global_operation_blocks) == 1: if not isinstance(global_operation_blocks[0], Filter): raise AssertionError(u'Received non-Filter global operation block. {}' .format(global_operation_blocks[0])) where_block = global_operation_blocks[0] else: where_block = None match_steps = _split_ir_into_match_steps(pruned_ir_blocks) match_traversals = _split_match_steps_into_match_traversals(match_steps) return MatchQuery( match_traversals=match_traversals, folds=folds, output_block=output_block, where_block=where_block, )
[ "def", "convert_to_match_query", "(", "ir_blocks", ")", ":", "output_block", "=", "ir_blocks", "[", "-", "1", "]", "if", "not", "isinstance", "(", "output_block", ",", "ConstructResult", ")", ":", "raise", "AssertionError", "(", "u'Expected last IR block to be ConstructResult, found: '", "u'{} {}'", ".", "format", "(", "output_block", ",", "ir_blocks", ")", ")", "ir_except_output", "=", "ir_blocks", "[", ":", "-", "1", "]", "folds", ",", "ir_except_output_and_folds", "=", "extract_folds_from_ir_blocks", "(", "ir_except_output", ")", "# Extract WHERE Filter", "global_operation_ir_blocks_tuple", "=", "_extract_global_operations", "(", "ir_except_output_and_folds", ")", "global_operation_blocks", ",", "pruned_ir_blocks", "=", "global_operation_ir_blocks_tuple", "if", "len", "(", "global_operation_blocks", ")", ">", "1", ":", "raise", "AssertionError", "(", "u'Received IR blocks with multiple global operation blocks. Only one '", "u'is allowed: {} {}'", ".", "format", "(", "global_operation_blocks", ",", "ir_blocks", ")", ")", "if", "len", "(", "global_operation_blocks", ")", "==", "1", ":", "if", "not", "isinstance", "(", "global_operation_blocks", "[", "0", "]", ",", "Filter", ")", ":", "raise", "AssertionError", "(", "u'Received non-Filter global operation block. {}'", ".", "format", "(", "global_operation_blocks", "[", "0", "]", ")", ")", "where_block", "=", "global_operation_blocks", "[", "0", "]", "else", ":", "where_block", "=", "None", "match_steps", "=", "_split_ir_into_match_steps", "(", "pruned_ir_blocks", ")", "match_traversals", "=", "_split_match_steps_into_match_traversals", "(", "match_steps", ")", "return", "MatchQuery", "(", "match_traversals", "=", "match_traversals", ",", "folds", "=", "folds", ",", "output_block", "=", "output_block", ",", "where_block", "=", "where_block", ",", ")" ]
Convert the list of IR blocks into a MatchQuery object, for easier manipulation.
[ "Convert", "the", "list", "of", "IR", "blocks", "into", "a", "MatchQuery", "object", "for", "easier", "manipulation", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/match_query.py#L178-L211
246,869
kensho-technologies/graphql-compiler
graphql_compiler/query_formatting/sql_formatting.py
insert_arguments_into_sql_query
def insert_arguments_into_sql_query(compilation_result, arguments): """Insert the arguments into the compiled SQL query to form a complete query. Args: compilation_result: CompilationResult, compilation result from the GraphQL compiler. arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects. Returns: SQLAlchemy Selectable, a executable SQL query with parameters bound. """ if compilation_result.language != SQL_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query return base_query.params(**arguments)
python
def insert_arguments_into_sql_query(compilation_result, arguments): if compilation_result.language != SQL_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query return base_query.params(**arguments)
[ "def", "insert_arguments_into_sql_query", "(", "compilation_result", ",", "arguments", ")", ":", "if", "compilation_result", ".", "language", "!=", "SQL_LANGUAGE", ":", "raise", "AssertionError", "(", "u'Unexpected query output language: {}'", ".", "format", "(", "compilation_result", ")", ")", "base_query", "=", "compilation_result", ".", "query", "return", "base_query", ".", "params", "(", "*", "*", "arguments", ")" ]
Insert the arguments into the compiled SQL query to form a complete query. Args: compilation_result: CompilationResult, compilation result from the GraphQL compiler. arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects. Returns: SQLAlchemy Selectable, a executable SQL query with parameters bound.
[ "Insert", "the", "arguments", "into", "the", "compiled", "SQL", "query", "to", "form", "a", "complete", "query", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/sql_formatting.py#L10-L23
246,870
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/utils.py
convert_coerce_type_to_instanceof_filter
def convert_coerce_type_to_instanceof_filter(coerce_type_block): """Create an "INSTANCEOF" Filter block from a CoerceType block.""" coerce_type_target = get_only_element_from_collection(coerce_type_block.target_class) # INSTANCEOF requires the target class to be passed in as a string, # so we make the target class a string literal. new_predicate = BinaryComposition( u'INSTANCEOF', LocalField('@this'), Literal(coerce_type_target)) return Filter(new_predicate)
python
def convert_coerce_type_to_instanceof_filter(coerce_type_block): coerce_type_target = get_only_element_from_collection(coerce_type_block.target_class) # INSTANCEOF requires the target class to be passed in as a string, # so we make the target class a string literal. new_predicate = BinaryComposition( u'INSTANCEOF', LocalField('@this'), Literal(coerce_type_target)) return Filter(new_predicate)
[ "def", "convert_coerce_type_to_instanceof_filter", "(", "coerce_type_block", ")", ":", "coerce_type_target", "=", "get_only_element_from_collection", "(", "coerce_type_block", ".", "target_class", ")", "# INSTANCEOF requires the target class to be passed in as a string,", "# so we make the target class a string literal.", "new_predicate", "=", "BinaryComposition", "(", "u'INSTANCEOF'", ",", "LocalField", "(", "'@this'", ")", ",", "Literal", "(", "coerce_type_target", ")", ")", "return", "Filter", "(", "new_predicate", ")" ]
Create an "INSTANCEOF" Filter block from a CoerceType block.
[ "Create", "an", "INSTANCEOF", "Filter", "block", "from", "a", "CoerceType", "block", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L15-L24
246,871
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/utils.py
convert_coerce_type_and_add_to_where_block
def convert_coerce_type_and_add_to_where_block(coerce_type_block, where_block): """Create an "INSTANCEOF" Filter from a CoerceType, adding to an existing Filter if any.""" instanceof_filter = convert_coerce_type_to_instanceof_filter(coerce_type_block) if where_block: # There was already a Filter block -- we'll merge the two predicates together. return Filter(BinaryComposition(u'&&', instanceof_filter.predicate, where_block.predicate)) else: return instanceof_filter
python
def convert_coerce_type_and_add_to_where_block(coerce_type_block, where_block): instanceof_filter = convert_coerce_type_to_instanceof_filter(coerce_type_block) if where_block: # There was already a Filter block -- we'll merge the two predicates together. return Filter(BinaryComposition(u'&&', instanceof_filter.predicate, where_block.predicate)) else: return instanceof_filter
[ "def", "convert_coerce_type_and_add_to_where_block", "(", "coerce_type_block", ",", "where_block", ")", ":", "instanceof_filter", "=", "convert_coerce_type_to_instanceof_filter", "(", "coerce_type_block", ")", "if", "where_block", ":", "# There was already a Filter block -- we'll merge the two predicates together.", "return", "Filter", "(", "BinaryComposition", "(", "u'&&'", ",", "instanceof_filter", ".", "predicate", ",", "where_block", ".", "predicate", ")", ")", "else", ":", "return", "instanceof_filter" ]
Create an "INSTANCEOF" Filter from a CoerceType, adding to an existing Filter if any.
[ "Create", "an", "INSTANCEOF", "Filter", "from", "a", "CoerceType", "adding", "to", "an", "existing", "Filter", "if", "any", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L27-L35
246,872
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/utils.py
expression_list_to_conjunction
def expression_list_to_conjunction(expression_list): """Convert a list of expressions to an Expression that is the conjunction of all of them.""" if not isinstance(expression_list, list): raise AssertionError(u'Expected `list`, Received {}.'.format(expression_list)) if len(expression_list) == 0: return TrueLiteral if not isinstance(expression_list[0], Expression): raise AssertionError(u'Non-Expression object {} found in expression_list' .format(expression_list[0])) if len(expression_list) == 1: return expression_list[0] else: return BinaryComposition(u'&&', expression_list_to_conjunction(expression_list[1:]), expression_list[0])
python
def expression_list_to_conjunction(expression_list): if not isinstance(expression_list, list): raise AssertionError(u'Expected `list`, Received {}.'.format(expression_list)) if len(expression_list) == 0: return TrueLiteral if not isinstance(expression_list[0], Expression): raise AssertionError(u'Non-Expression object {} found in expression_list' .format(expression_list[0])) if len(expression_list) == 1: return expression_list[0] else: return BinaryComposition(u'&&', expression_list_to_conjunction(expression_list[1:]), expression_list[0])
[ "def", "expression_list_to_conjunction", "(", "expression_list", ")", ":", "if", "not", "isinstance", "(", "expression_list", ",", "list", ")", ":", "raise", "AssertionError", "(", "u'Expected `list`, Received {}.'", ".", "format", "(", "expression_list", ")", ")", "if", "len", "(", "expression_list", ")", "==", "0", ":", "return", "TrueLiteral", "if", "not", "isinstance", "(", "expression_list", "[", "0", "]", ",", "Expression", ")", ":", "raise", "AssertionError", "(", "u'Non-Expression object {} found in expression_list'", ".", "format", "(", "expression_list", "[", "0", "]", ")", ")", "if", "len", "(", "expression_list", ")", "==", "1", ":", "return", "expression_list", "[", "0", "]", "else", ":", "return", "BinaryComposition", "(", "u'&&'", ",", "expression_list_to_conjunction", "(", "expression_list", "[", "1", ":", "]", ")", ",", "expression_list", "[", "0", "]", ")" ]
Convert a list of expressions to an Expression that is the conjunction of all of them.
[ "Convert", "a", "list", "of", "expressions", "to", "an", "Expression", "that", "is", "the", "conjunction", "of", "all", "of", "them", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L38-L54
246,873
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/utils.py
construct_where_filter_predicate
def construct_where_filter_predicate(query_metadata_table, simple_optional_root_info): """Return an Expression that is True if and only if each simple optional filter is True. Construct filters for each simple optional, that are True if and only if `edge_field` does not exist in the `simple_optional_root_location` OR the `inner_location` is not defined. Return an Expression that evaluates to True if and only if *all* of the aforementioned filters evaluate to True (conjunction). Args: query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). simple_optional_root_info: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple @optional (one that does not expands vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope Returns: a new Expression object """ inner_location_name_to_where_filter = {} for root_location, root_info_dict in six.iteritems(simple_optional_root_info): inner_location_name = root_info_dict['inner_location_name'] edge_field = root_info_dict['edge_field'] optional_edge_location = root_location.navigate_to_field(edge_field) optional_edge_where_filter = _filter_orientdb_simple_optional_edge( query_metadata_table, optional_edge_location, inner_location_name) inner_location_name_to_where_filter[inner_location_name] = optional_edge_where_filter # Sort expressions by inner_location_name to obtain deterministic order where_filter_expressions = [ inner_location_name_to_where_filter[key] for key in sorted(inner_location_name_to_where_filter.keys()) ] return expression_list_to_conjunction(where_filter_expressions)
python
def construct_where_filter_predicate(query_metadata_table, simple_optional_root_info): inner_location_name_to_where_filter = {} for root_location, root_info_dict in six.iteritems(simple_optional_root_info): inner_location_name = root_info_dict['inner_location_name'] edge_field = root_info_dict['edge_field'] optional_edge_location = root_location.navigate_to_field(edge_field) optional_edge_where_filter = _filter_orientdb_simple_optional_edge( query_metadata_table, optional_edge_location, inner_location_name) inner_location_name_to_where_filter[inner_location_name] = optional_edge_where_filter # Sort expressions by inner_location_name to obtain deterministic order where_filter_expressions = [ inner_location_name_to_where_filter[key] for key in sorted(inner_location_name_to_where_filter.keys()) ] return expression_list_to_conjunction(where_filter_expressions)
[ "def", "construct_where_filter_predicate", "(", "query_metadata_table", ",", "simple_optional_root_info", ")", ":", "inner_location_name_to_where_filter", "=", "{", "}", "for", "root_location", ",", "root_info_dict", "in", "six", ".", "iteritems", "(", "simple_optional_root_info", ")", ":", "inner_location_name", "=", "root_info_dict", "[", "'inner_location_name'", "]", "edge_field", "=", "root_info_dict", "[", "'edge_field'", "]", "optional_edge_location", "=", "root_location", ".", "navigate_to_field", "(", "edge_field", ")", "optional_edge_where_filter", "=", "_filter_orientdb_simple_optional_edge", "(", "query_metadata_table", ",", "optional_edge_location", ",", "inner_location_name", ")", "inner_location_name_to_where_filter", "[", "inner_location_name", "]", "=", "optional_edge_where_filter", "# Sort expressions by inner_location_name to obtain deterministic order", "where_filter_expressions", "=", "[", "inner_location_name_to_where_filter", "[", "key", "]", "for", "key", "in", "sorted", "(", "inner_location_name_to_where_filter", ".", "keys", "(", ")", ")", "]", "return", "expression_list_to_conjunction", "(", "where_filter_expressions", ")" ]
Return an Expression that is True if and only if each simple optional filter is True. Construct filters for each simple optional, that are True if and only if `edge_field` does not exist in the `simple_optional_root_location` OR the `inner_location` is not defined. Return an Expression that evaluates to True if and only if *all* of the aforementioned filters evaluate to True (conjunction). Args: query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or optional). simple_optional_root_info: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple @optional (one that does not expands vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope Returns: a new Expression object
[ "Return", "an", "Expression", "that", "is", "True", "if", "and", "only", "if", "each", "simple", "optional", "filter", "is", "True", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L192-L233
246,874
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/utils.py
construct_optional_traversal_tree
def construct_optional_traversal_tree(complex_optional_roots, location_to_optional_roots): """Return a tree of complex optional root locations. Args: complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: OptionalTraversalTree object representing the tree of complex optional roots """ tree = OptionalTraversalTree(complex_optional_roots) for optional_root_locations_stack in six.itervalues(location_to_optional_roots): tree.insert(list(optional_root_locations_stack)) return tree
python
def construct_optional_traversal_tree(complex_optional_roots, location_to_optional_roots): tree = OptionalTraversalTree(complex_optional_roots) for optional_root_locations_stack in six.itervalues(location_to_optional_roots): tree.insert(list(optional_root_locations_stack)) return tree
[ "def", "construct_optional_traversal_tree", "(", "complex_optional_roots", ",", "location_to_optional_roots", ")", ":", "tree", "=", "OptionalTraversalTree", "(", "complex_optional_roots", ")", "for", "optional_root_locations_stack", "in", "six", ".", "itervalues", "(", "location_to_optional_roots", ")", ":", "tree", ".", "insert", "(", "list", "(", "optional_root_locations_stack", ")", ")", "return", "tree" ]
Return a tree of complex optional root locations. Args: complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: OptionalTraversalTree object representing the tree of complex optional roots
[ "Return", "a", "tree", "of", "complex", "optional", "root", "locations", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L337-L355
246,875
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/utils.py
BetweenClause.validate
def validate(self): """Validate that the Between Expression is correctly representable.""" if not isinstance(self.field, LocalField): raise TypeError(u'Expected LocalField field, got: {} {}'.format( type(self.field).__name__, self.field)) if not isinstance(self.lower_bound, Expression): raise TypeError(u'Expected Expression lower_bound, got: {} {}'.format( type(self.lower_bound).__name__, self.lower_bound)) if not isinstance(self.upper_bound, Expression): raise TypeError(u'Expected Expression upper_bound, got: {} {}'.format( type(self.upper_bound).__name__, self.upper_bound))
python
def validate(self): if not isinstance(self.field, LocalField): raise TypeError(u'Expected LocalField field, got: {} {}'.format( type(self.field).__name__, self.field)) if not isinstance(self.lower_bound, Expression): raise TypeError(u'Expected Expression lower_bound, got: {} {}'.format( type(self.lower_bound).__name__, self.lower_bound)) if not isinstance(self.upper_bound, Expression): raise TypeError(u'Expected Expression upper_bound, got: {} {}'.format( type(self.upper_bound).__name__, self.upper_bound))
[ "def", "validate", "(", "self", ")", ":", "if", "not", "isinstance", "(", "self", ".", "field", ",", "LocalField", ")", ":", "raise", "TypeError", "(", "u'Expected LocalField field, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "field", ")", ".", "__name__", ",", "self", ".", "field", ")", ")", "if", "not", "isinstance", "(", "self", ".", "lower_bound", ",", "Expression", ")", ":", "raise", "TypeError", "(", "u'Expected Expression lower_bound, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "lower_bound", ")", ".", "__name__", ",", "self", ".", "lower_bound", ")", ")", "if", "not", "isinstance", "(", "self", ".", "upper_bound", ",", "Expression", ")", ":", "raise", "TypeError", "(", "u'Expected Expression upper_bound, got: {} {}'", ".", "format", "(", "type", "(", "self", ".", "upper_bound", ")", ".", "__name__", ",", "self", ".", "upper_bound", ")", ")" ]
Validate that the Between Expression is correctly representable.
[ "Validate", "that", "the", "Between", "Expression", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L77-L89
246,876
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/utils.py
BetweenClause.to_match
def to_match(self): """Return a unicode object with the MATCH representation of this BetweenClause.""" template = u'({field_name} BETWEEN {lower_bound} AND {upper_bound})' return template.format( field_name=self.field.to_match(), lower_bound=self.lower_bound.to_match(), upper_bound=self.upper_bound.to_match())
python
def to_match(self): template = u'({field_name} BETWEEN {lower_bound} AND {upper_bound})' return template.format( field_name=self.field.to_match(), lower_bound=self.lower_bound.to_match(), upper_bound=self.upper_bound.to_match())
[ "def", "to_match", "(", "self", ")", ":", "template", "=", "u'({field_name} BETWEEN {lower_bound} AND {upper_bound})'", "return", "template", ".", "format", "(", "field_name", "=", "self", ".", "field", ".", "to_match", "(", ")", ",", "lower_bound", "=", "self", ".", "lower_bound", ".", "to_match", "(", ")", ",", "upper_bound", "=", "self", ".", "upper_bound", ".", "to_match", "(", ")", ")" ]
Return a unicode object with the MATCH representation of this BetweenClause.
[ "Return", "a", "unicode", "object", "with", "the", "MATCH", "representation", "of", "this", "BetweenClause", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L101-L107
246,877
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/utils.py
OptionalTraversalTree.insert
def insert(self, optional_root_locations_path): """Insert a path of optional Locations into the tree. Each OptionalTraversalTree object contains child Location objects as keys mapping to other OptionalTraversalTree objects. Args: optional_root_locations_path: list of optional root Locations all except the last of which must be present in complex_optional_roots """ encountered_simple_optional = False parent_location = self._root_location for optional_root_location in optional_root_locations_path: if encountered_simple_optional: raise AssertionError(u'Encountered simple optional root location {} in path, but' u'further locations are present. This should not happen: {}' .format(optional_root_location, optional_root_locations_path)) if optional_root_location not in self._location_to_children: # Simple optionals are ignored. # There should be no complex optionals after a simple optional. encountered_simple_optional = True else: self._location_to_children[parent_location].add(optional_root_location) parent_location = optional_root_location
python
def insert(self, optional_root_locations_path): encountered_simple_optional = False parent_location = self._root_location for optional_root_location in optional_root_locations_path: if encountered_simple_optional: raise AssertionError(u'Encountered simple optional root location {} in path, but' u'further locations are present. This should not happen: {}' .format(optional_root_location, optional_root_locations_path)) if optional_root_location not in self._location_to_children: # Simple optionals are ignored. # There should be no complex optionals after a simple optional. encountered_simple_optional = True else: self._location_to_children[parent_location].add(optional_root_location) parent_location = optional_root_location
[ "def", "insert", "(", "self", ",", "optional_root_locations_path", ")", ":", "encountered_simple_optional", "=", "False", "parent_location", "=", "self", ".", "_root_location", "for", "optional_root_location", "in", "optional_root_locations_path", ":", "if", "encountered_simple_optional", ":", "raise", "AssertionError", "(", "u'Encountered simple optional root location {} in path, but'", "u'further locations are present. This should not happen: {}'", ".", "format", "(", "optional_root_location", ",", "optional_root_locations_path", ")", ")", "if", "optional_root_location", "not", "in", "self", ".", "_location_to_children", ":", "# Simple optionals are ignored.", "# There should be no complex optionals after a simple optional.", "encountered_simple_optional", "=", "True", "else", ":", "self", ".", "_location_to_children", "[", "parent_location", "]", ".", "add", "(", "optional_root_location", ")", "parent_location", "=", "optional_root_location" ]
Insert a path of optional Locations into the tree. Each OptionalTraversalTree object contains child Location objects as keys mapping to other OptionalTraversalTree objects. Args: optional_root_locations_path: list of optional root Locations all except the last of which must be present in complex_optional_roots
[ "Insert", "a", "path", "of", "optional", "Locations", "into", "the", "tree", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/utils.py#L261-L285
246,878
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
emit_code_from_ir
def emit_code_from_ir(sql_query_tree, compiler_metadata): """Return a SQLAlchemy Query from a passed SqlQueryTree. Args: sql_query_tree: SqlQueryTree, tree representation of the query to emit. compiler_metadata: SqlMetadata, SQLAlchemy specific metadata. Returns: SQLAlchemy Query """ context = CompilationContext( query_path_to_selectable=dict(), query_path_to_location_info=sql_query_tree.query_path_to_location_info, query_path_to_output_fields=sql_query_tree.query_path_to_output_fields, query_path_to_filters=sql_query_tree.query_path_to_filters, query_path_to_node=sql_query_tree.query_path_to_node, compiler_metadata=compiler_metadata, ) return _query_tree_to_query(sql_query_tree.root, context)
python
def emit_code_from_ir(sql_query_tree, compiler_metadata): context = CompilationContext( query_path_to_selectable=dict(), query_path_to_location_info=sql_query_tree.query_path_to_location_info, query_path_to_output_fields=sql_query_tree.query_path_to_output_fields, query_path_to_filters=sql_query_tree.query_path_to_filters, query_path_to_node=sql_query_tree.query_path_to_node, compiler_metadata=compiler_metadata, ) return _query_tree_to_query(sql_query_tree.root, context)
[ "def", "emit_code_from_ir", "(", "sql_query_tree", ",", "compiler_metadata", ")", ":", "context", "=", "CompilationContext", "(", "query_path_to_selectable", "=", "dict", "(", ")", ",", "query_path_to_location_info", "=", "sql_query_tree", ".", "query_path_to_location_info", ",", "query_path_to_output_fields", "=", "sql_query_tree", ".", "query_path_to_output_fields", ",", "query_path_to_filters", "=", "sql_query_tree", ".", "query_path_to_filters", ",", "query_path_to_node", "=", "sql_query_tree", ".", "query_path_to_node", ",", "compiler_metadata", "=", "compiler_metadata", ",", ")", "return", "_query_tree_to_query", "(", "sql_query_tree", ".", "root", ",", "context", ")" ]
Return a SQLAlchemy Query from a passed SqlQueryTree. Args: sql_query_tree: SqlQueryTree, tree representation of the query to emit. compiler_metadata: SqlMetadata, SQLAlchemy specific metadata. Returns: SQLAlchemy Query
[ "Return", "a", "SQLAlchemy", "Query", "from", "a", "passed", "SqlQueryTree", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L39-L58
246,879
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
_create_table_and_update_context
def _create_table_and_update_context(node, context): """Create an aliased table for a SqlNode. Updates the relevant Selectable global context. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Table, the newly aliased SQLAlchemy table. """ schema_type_name = sql_context_helpers.get_schema_type_name(node, context) table = context.compiler_metadata.get_table(schema_type_name).alias() context.query_path_to_selectable[node.query_path] = table return table
python
def _create_table_and_update_context(node, context): schema_type_name = sql_context_helpers.get_schema_type_name(node, context) table = context.compiler_metadata.get_table(schema_type_name).alias() context.query_path_to_selectable[node.query_path] = table return table
[ "def", "_create_table_and_update_context", "(", "node", ",", "context", ")", ":", "schema_type_name", "=", "sql_context_helpers", ".", "get_schema_type_name", "(", "node", ",", "context", ")", "table", "=", "context", ".", "compiler_metadata", ".", "get_table", "(", "schema_type_name", ")", ".", "alias", "(", ")", "context", ".", "query_path_to_selectable", "[", "node", ".", "query_path", "]", "=", "table", "return", "table" ]
Create an aliased table for a SqlNode. Updates the relevant Selectable global context. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Table, the newly aliased SQLAlchemy table.
[ "Create", "an", "aliased", "table", "for", "a", "SqlNode", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L75-L90
246,880
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
_create_query
def _create_query(node, context): """Create a query from a SqlNode. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Selectable, selectable of the generated query. """ visited_nodes = [node] output_columns = _get_output_columns(visited_nodes, context) filters = _get_filters(visited_nodes, context) selectable = sql_context_helpers.get_node_selectable(node, context) query = select(output_columns).select_from(selectable).where(and_(*filters)) return query
python
def _create_query(node, context): visited_nodes = [node] output_columns = _get_output_columns(visited_nodes, context) filters = _get_filters(visited_nodes, context) selectable = sql_context_helpers.get_node_selectable(node, context) query = select(output_columns).select_from(selectable).where(and_(*filters)) return query
[ "def", "_create_query", "(", "node", ",", "context", ")", ":", "visited_nodes", "=", "[", "node", "]", "output_columns", "=", "_get_output_columns", "(", "visited_nodes", ",", "context", ")", "filters", "=", "_get_filters", "(", "visited_nodes", ",", "context", ")", "selectable", "=", "sql_context_helpers", ".", "get_node_selectable", "(", "node", ",", "context", ")", "query", "=", "select", "(", "output_columns", ")", ".", "select_from", "(", "selectable", ")", ".", "where", "(", "and_", "(", "*", "filters", ")", ")", "return", "query" ]
Create a query from a SqlNode. Args: node: SqlNode, the current node. context: CompilationContext, global compilation state and metadata. Returns: Selectable, selectable of the generated query.
[ "Create", "a", "query", "from", "a", "SqlNode", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L93-L108
246,881
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
_get_output_columns
def _get_output_columns(nodes, context): """Get the output columns for a list of SqlNodes. Args: nodes: List[SqlNode], the nodes to get output columns from. context: CompilationContext, global compilation state and metadata. Returns: List[Column], list of SqlAlchemy Columns to output for this query. """ columns = [] for node in nodes: for sql_output in sql_context_helpers.get_outputs(node, context): field_name = sql_output.field_name column = sql_context_helpers.get_column(field_name, node, context) column = column.label(sql_output.output_name) columns.append(column) return columns
python
def _get_output_columns(nodes, context): columns = [] for node in nodes: for sql_output in sql_context_helpers.get_outputs(node, context): field_name = sql_output.field_name column = sql_context_helpers.get_column(field_name, node, context) column = column.label(sql_output.output_name) columns.append(column) return columns
[ "def", "_get_output_columns", "(", "nodes", ",", "context", ")", ":", "columns", "=", "[", "]", "for", "node", "in", "nodes", ":", "for", "sql_output", "in", "sql_context_helpers", ".", "get_outputs", "(", "node", ",", "context", ")", ":", "field_name", "=", "sql_output", ".", "field_name", "column", "=", "sql_context_helpers", ".", "get_column", "(", "field_name", ",", "node", ",", "context", ")", "column", "=", "column", ".", "label", "(", "sql_output", ".", "output_name", ")", "columns", ".", "append", "(", "column", ")", "return", "columns" ]
Get the output columns for a list of SqlNodes. Args: nodes: List[SqlNode], the nodes to get output columns from. context: CompilationContext, global compilation state and metadata. Returns: List[Column], list of SqlAlchemy Columns to output for this query.
[ "Get", "the", "output", "columns", "for", "a", "list", "of", "SqlNodes", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L111-L128
246,882
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
_get_filters
def _get_filters(nodes, context): """Get filters to apply to a list of SqlNodes. Args: nodes: List[SqlNode], the SqlNodes to get filters for. context: CompilationContext, global compilation state and metadata. Returns: List[Expression], list of SQLAlchemy expressions. """ filters = [] for node in nodes: for filter_block in sql_context_helpers.get_filters(node, context): filter_sql_expression = _transform_filter_to_sql(filter_block, node, context) filters.append(filter_sql_expression) return filters
python
def _get_filters(nodes, context): filters = [] for node in nodes: for filter_block in sql_context_helpers.get_filters(node, context): filter_sql_expression = _transform_filter_to_sql(filter_block, node, context) filters.append(filter_sql_expression) return filters
[ "def", "_get_filters", "(", "nodes", ",", "context", ")", ":", "filters", "=", "[", "]", "for", "node", "in", "nodes", ":", "for", "filter_block", "in", "sql_context_helpers", ".", "get_filters", "(", "node", ",", "context", ")", ":", "filter_sql_expression", "=", "_transform_filter_to_sql", "(", "filter_block", ",", "node", ",", "context", ")", "filters", ".", "append", "(", "filter_sql_expression", ")", "return", "filters" ]
Get filters to apply to a list of SqlNodes. Args: nodes: List[SqlNode], the SqlNodes to get filters for. context: CompilationContext, global compilation state and metadata. Returns: List[Expression], list of SQLAlchemy expressions.
[ "Get", "filters", "to", "apply", "to", "a", "list", "of", "SqlNodes", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L131-L146
246,883
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
_transform_filter_to_sql
def _transform_filter_to_sql(filter_block, node, context): """Transform a Filter block to its corresponding SQLAlchemy expression. Args: filter_block: Filter, the Filter block to transform. node: SqlNode, the node Filter block applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression equivalent to the Filter.predicate expression. """ expression = filter_block.predicate return _expression_to_sql(expression, node, context)
python
def _transform_filter_to_sql(filter_block, node, context): expression = filter_block.predicate return _expression_to_sql(expression, node, context)
[ "def", "_transform_filter_to_sql", "(", "filter_block", ",", "node", ",", "context", ")", ":", "expression", "=", "filter_block", ".", "predicate", "return", "_expression_to_sql", "(", "expression", ",", "node", ",", "context", ")" ]
Transform a Filter block to its corresponding SQLAlchemy expression. Args: filter_block: Filter, the Filter block to transform. node: SqlNode, the node Filter block applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression equivalent to the Filter.predicate expression.
[ "Transform", "a", "Filter", "block", "to", "its", "corresponding", "SQLAlchemy", "expression", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L149-L161
246,884
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
_expression_to_sql
def _expression_to_sql(expression, node, context): """Recursively transform a Filter block predicate to its SQLAlchemy expression representation. Args: expression: expression, the compiler expression to transform. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy Expression equivalent to the passed compiler expression. """ _expression_transformers = { expressions.LocalField: _transform_local_field_to_expression, expressions.Variable: _transform_variable_to_expression, expressions.Literal: _transform_literal_to_expression, expressions.BinaryComposition: _transform_binary_composition_to_expression, } expression_type = type(expression) if expression_type not in _expression_transformers: raise NotImplementedError( u'Unsupported compiler expression "{}" of type "{}" cannot be converted to SQL ' u'expression.'.format(expression, type(expression))) return _expression_transformers[expression_type](expression, node, context)
python
def _expression_to_sql(expression, node, context): _expression_transformers = { expressions.LocalField: _transform_local_field_to_expression, expressions.Variable: _transform_variable_to_expression, expressions.Literal: _transform_literal_to_expression, expressions.BinaryComposition: _transform_binary_composition_to_expression, } expression_type = type(expression) if expression_type not in _expression_transformers: raise NotImplementedError( u'Unsupported compiler expression "{}" of type "{}" cannot be converted to SQL ' u'expression.'.format(expression, type(expression))) return _expression_transformers[expression_type](expression, node, context)
[ "def", "_expression_to_sql", "(", "expression", ",", "node", ",", "context", ")", ":", "_expression_transformers", "=", "{", "expressions", ".", "LocalField", ":", "_transform_local_field_to_expression", ",", "expressions", ".", "Variable", ":", "_transform_variable_to_expression", ",", "expressions", ".", "Literal", ":", "_transform_literal_to_expression", ",", "expressions", ".", "BinaryComposition", ":", "_transform_binary_composition_to_expression", ",", "}", "expression_type", "=", "type", "(", "expression", ")", "if", "expression_type", "not", "in", "_expression_transformers", ":", "raise", "NotImplementedError", "(", "u'Unsupported compiler expression \"{}\" of type \"{}\" cannot be converted to SQL '", "u'expression.'", ".", "format", "(", "expression", ",", "type", "(", "expression", ")", ")", ")", "return", "_expression_transformers", "[", "expression_type", "]", "(", "expression", ",", "node", ",", "context", ")" ]
Recursively transform a Filter block predicate to its SQLAlchemy expression representation. Args: expression: expression, the compiler expression to transform. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy Expression equivalent to the passed compiler expression.
[ "Recursively", "transform", "a", "Filter", "block", "predicate", "to", "its", "SQLAlchemy", "expression", "representation", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L164-L186
246,885
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
_transform_binary_composition_to_expression
def _transform_binary_composition_to_expression(expression, node, context): """Transform a BinaryComposition compiler expression into a SQLAlchemy expression. Recursively calls _expression_to_sql to convert its left and right sub-expressions. Args: expression: expression, BinaryComposition compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression. """ if expression.operator not in constants.SUPPORTED_OPERATORS: raise NotImplementedError( u'Filter operation "{}" is not supported by the SQL backend.'.format( expression.operator)) sql_operator = constants.SUPPORTED_OPERATORS[expression.operator] left = _expression_to_sql(expression.left, node, context) right = _expression_to_sql(expression.right, node, context) if sql_operator.cardinality == constants.CARDINALITY_UNARY: left, right = _get_column_and_bindparam(left, right, sql_operator) clause = getattr(left, sql_operator.name)(right) return clause elif sql_operator.cardinality == constants.CARDINALITY_BINARY: clause = getattr(sql_expressions, sql_operator.name)(left, right) return clause elif sql_operator.cardinality == constants.CARDINALITY_LIST_VALUED: left, right = _get_column_and_bindparam(left, right, sql_operator) # ensure that SQLAlchemy treats the right bind parameter as list valued right.expanding = True clause = getattr(left, sql_operator.name)(right) return clause raise AssertionError(u'Unreachable, operator cardinality {} for compiler expression {} is ' u'unknown'.format(sql_operator.cardinality, expression))
python
def _transform_binary_composition_to_expression(expression, node, context): if expression.operator not in constants.SUPPORTED_OPERATORS: raise NotImplementedError( u'Filter operation "{}" is not supported by the SQL backend.'.format( expression.operator)) sql_operator = constants.SUPPORTED_OPERATORS[expression.operator] left = _expression_to_sql(expression.left, node, context) right = _expression_to_sql(expression.right, node, context) if sql_operator.cardinality == constants.CARDINALITY_UNARY: left, right = _get_column_and_bindparam(left, right, sql_operator) clause = getattr(left, sql_operator.name)(right) return clause elif sql_operator.cardinality == constants.CARDINALITY_BINARY: clause = getattr(sql_expressions, sql_operator.name)(left, right) return clause elif sql_operator.cardinality == constants.CARDINALITY_LIST_VALUED: left, right = _get_column_and_bindparam(left, right, sql_operator) # ensure that SQLAlchemy treats the right bind parameter as list valued right.expanding = True clause = getattr(left, sql_operator.name)(right) return clause raise AssertionError(u'Unreachable, operator cardinality {} for compiler expression {} is ' u'unknown'.format(sql_operator.cardinality, expression))
[ "def", "_transform_binary_composition_to_expression", "(", "expression", ",", "node", ",", "context", ")", ":", "if", "expression", ".", "operator", "not", "in", "constants", ".", "SUPPORTED_OPERATORS", ":", "raise", "NotImplementedError", "(", "u'Filter operation \"{}\" is not supported by the SQL backend.'", ".", "format", "(", "expression", ".", "operator", ")", ")", "sql_operator", "=", "constants", ".", "SUPPORTED_OPERATORS", "[", "expression", ".", "operator", "]", "left", "=", "_expression_to_sql", "(", "expression", ".", "left", ",", "node", ",", "context", ")", "right", "=", "_expression_to_sql", "(", "expression", ".", "right", ",", "node", ",", "context", ")", "if", "sql_operator", ".", "cardinality", "==", "constants", ".", "CARDINALITY_UNARY", ":", "left", ",", "right", "=", "_get_column_and_bindparam", "(", "left", ",", "right", ",", "sql_operator", ")", "clause", "=", "getattr", "(", "left", ",", "sql_operator", ".", "name", ")", "(", "right", ")", "return", "clause", "elif", "sql_operator", ".", "cardinality", "==", "constants", ".", "CARDINALITY_BINARY", ":", "clause", "=", "getattr", "(", "sql_expressions", ",", "sql_operator", ".", "name", ")", "(", "left", ",", "right", ")", "return", "clause", "elif", "sql_operator", ".", "cardinality", "==", "constants", ".", "CARDINALITY_LIST_VALUED", ":", "left", ",", "right", "=", "_get_column_and_bindparam", "(", "left", ",", "right", ",", "sql_operator", ")", "# ensure that SQLAlchemy treats the right bind parameter as list valued", "right", ".", "expanding", "=", "True", "clause", "=", "getattr", "(", "left", ",", "sql_operator", ".", "name", ")", "(", "right", ")", "return", "clause", "raise", "AssertionError", "(", "u'Unreachable, operator cardinality {} for compiler expression {} is '", "u'unknown'", ".", "format", "(", "sql_operator", ".", "cardinality", ",", "expression", ")", ")" ]
Transform a BinaryComposition compiler expression into a SQLAlchemy expression. Recursively calls _expression_to_sql to convert its left and right sub-expressions. Args: expression: expression, BinaryComposition compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression.
[ "Transform", "a", "BinaryComposition", "compiler", "expression", "into", "a", "SQLAlchemy", "expression", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L189-L223
246,886
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
_transform_variable_to_expression
def _transform_variable_to_expression(expression, node, context): """Transform a Variable compiler expression into its SQLAlchemy expression representation. Args: expression: expression, Variable compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression. """ variable_name = expression.variable_name if not variable_name.startswith(u'$'): raise AssertionError(u'Unexpectedly received variable name {} that is not ' u'prefixed with "$"'.format(variable_name)) return bindparam(variable_name[1:])
python
def _transform_variable_to_expression(expression, node, context): variable_name = expression.variable_name if not variable_name.startswith(u'$'): raise AssertionError(u'Unexpectedly received variable name {} that is not ' u'prefixed with "$"'.format(variable_name)) return bindparam(variable_name[1:])
[ "def", "_transform_variable_to_expression", "(", "expression", ",", "node", ",", "context", ")", ":", "variable_name", "=", "expression", ".", "variable_name", "if", "not", "variable_name", ".", "startswith", "(", "u'$'", ")", ":", "raise", "AssertionError", "(", "u'Unexpectedly received variable name {} that is not '", "u'prefixed with \"$\"'", ".", "format", "(", "variable_name", ")", ")", "return", "bindparam", "(", "variable_name", "[", "1", ":", "]", ")" ]
Transform a Variable compiler expression into its SQLAlchemy expression representation. Args: expression: expression, Variable compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression.
[ "Transform", "a", "Variable", "compiler", "expression", "into", "its", "SQLAlchemy", "expression", "representation", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L255-L270
246,887
kensho-technologies/graphql-compiler
graphql_compiler/compiler/emit_sql.py
_transform_local_field_to_expression
def _transform_local_field_to_expression(expression, node, context): """Transform a LocalField compiler expression into its SQLAlchemy expression representation. Args: expression: expression, LocalField compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression. """ column_name = expression.field_name column = sql_context_helpers.get_column(column_name, node, context) return column
python
def _transform_local_field_to_expression(expression, node, context): column_name = expression.field_name column = sql_context_helpers.get_column(column_name, node, context) return column
[ "def", "_transform_local_field_to_expression", "(", "expression", ",", "node", ",", "context", ")", ":", "column_name", "=", "expression", ".", "field_name", "column", "=", "sql_context_helpers", ".", "get_column", "(", "column_name", ",", "node", ",", "context", ")", "return", "column" ]
Transform a LocalField compiler expression into its SQLAlchemy expression representation. Args: expression: expression, LocalField compiler expression. node: SqlNode, the SqlNode the expression applies to. context: CompilationContext, global compilation state and metadata. Returns: Expression, SQLAlchemy expression.
[ "Transform", "a", "LocalField", "compiler", "expression", "into", "its", "SQLAlchemy", "expression", "representation", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/emit_sql.py#L273-L286
246,888
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_common.py
lower_context_field_existence
def lower_context_field_existence(ir_blocks, query_metadata_table): """Lower ContextFieldExistence expressions into lower-level expressions.""" def regular_visitor_fn(expression): """Expression visitor function that rewrites ContextFieldExistence expressions.""" if not isinstance(expression, ContextFieldExistence): return expression location_type = query_metadata_table.get_location_info(expression.location).type # Since this function is only used in blocks that aren't ConstructResult, # the location check is performed using a regular ContextField expression. return BinaryComposition( u'!=', ContextField(expression.location, location_type), NullLiteral) def construct_result_visitor_fn(expression): """Expression visitor function that rewrites ContextFieldExistence expressions.""" if not isinstance(expression, ContextFieldExistence): return expression location_type = query_metadata_table.get_location_info(expression.location).type # Since this function is only used in ConstructResult blocks, # the location check is performed using the special OutputContextVertex expression. return BinaryComposition( u'!=', OutputContextVertex(expression.location, location_type), NullLiteral) new_ir_blocks = [] for block in ir_blocks: new_block = None if isinstance(block, ConstructResult): new_block = block.visit_and_update_expressions(construct_result_visitor_fn) else: new_block = block.visit_and_update_expressions(regular_visitor_fn) new_ir_blocks.append(new_block) return new_ir_blocks
python
def lower_context_field_existence(ir_blocks, query_metadata_table): def regular_visitor_fn(expression): """Expression visitor function that rewrites ContextFieldExistence expressions.""" if not isinstance(expression, ContextFieldExistence): return expression location_type = query_metadata_table.get_location_info(expression.location).type # Since this function is only used in blocks that aren't ConstructResult, # the location check is performed using a regular ContextField expression. return BinaryComposition( u'!=', ContextField(expression.location, location_type), NullLiteral) def construct_result_visitor_fn(expression): """Expression visitor function that rewrites ContextFieldExistence expressions.""" if not isinstance(expression, ContextFieldExistence): return expression location_type = query_metadata_table.get_location_info(expression.location).type # Since this function is only used in ConstructResult blocks, # the location check is performed using the special OutputContextVertex expression. return BinaryComposition( u'!=', OutputContextVertex(expression.location, location_type), NullLiteral) new_ir_blocks = [] for block in ir_blocks: new_block = None if isinstance(block, ConstructResult): new_block = block.visit_and_update_expressions(construct_result_visitor_fn) else: new_block = block.visit_and_update_expressions(regular_visitor_fn) new_ir_blocks.append(new_block) return new_ir_blocks
[ "def", "lower_context_field_existence", "(", "ir_blocks", ",", "query_metadata_table", ")", ":", "def", "regular_visitor_fn", "(", "expression", ")", ":", "\"\"\"Expression visitor function that rewrites ContextFieldExistence expressions.\"\"\"", "if", "not", "isinstance", "(", "expression", ",", "ContextFieldExistence", ")", ":", "return", "expression", "location_type", "=", "query_metadata_table", ".", "get_location_info", "(", "expression", ".", "location", ")", ".", "type", "# Since this function is only used in blocks that aren't ConstructResult,", "# the location check is performed using a regular ContextField expression.", "return", "BinaryComposition", "(", "u'!='", ",", "ContextField", "(", "expression", ".", "location", ",", "location_type", ")", ",", "NullLiteral", ")", "def", "construct_result_visitor_fn", "(", "expression", ")", ":", "\"\"\"Expression visitor function that rewrites ContextFieldExistence expressions.\"\"\"", "if", "not", "isinstance", "(", "expression", ",", "ContextFieldExistence", ")", ":", "return", "expression", "location_type", "=", "query_metadata_table", ".", "get_location_info", "(", "expression", ".", "location", ")", ".", "type", "# Since this function is only used in ConstructResult blocks,", "# the location check is performed using the special OutputContextVertex expression.", "return", "BinaryComposition", "(", "u'!='", ",", "OutputContextVertex", "(", "expression", ".", "location", ",", "location_type", ")", ",", "NullLiteral", ")", "new_ir_blocks", "=", "[", "]", "for", "block", "in", "ir_blocks", ":", "new_block", "=", "None", "if", "isinstance", "(", "block", ",", "ConstructResult", ")", ":", "new_block", "=", "block", ".", "visit_and_update_expressions", "(", "construct_result_visitor_fn", ")", "else", ":", "new_block", "=", "block", ".", "visit_and_update_expressions", "(", "regular_visitor_fn", ")", "new_ir_blocks", ".", "append", "(", "new_block", ")", "return", "new_ir_blocks" ]
Lower ContextFieldExistence expressions into lower-level expressions.
[ "Lower", "ContextFieldExistence", "expressions", "into", "lower", "-", "level", "expressions", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_common.py#L56-L95
246,889
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_common.py
optimize_boolean_expression_comparisons
def optimize_boolean_expression_comparisons(ir_blocks): """Optimize comparisons of a boolean binary comparison expression against a boolean literal. Rewriting example: BinaryComposition( '=', BinaryComposition('!=', something, NullLiteral) False) The above is rewritten into: BinaryComposition('=', something, NullLiteral) Args: ir_blocks: list of basic block objects Returns: a new list of basic block objects, with the optimization applied """ operator_inverses = { u'=': u'!=', u'!=': u'=', } def visitor_fn(expression): """Expression visitor function that performs the above rewriting.""" if not isinstance(expression, BinaryComposition): return expression left_is_binary_composition = isinstance(expression.left, BinaryComposition) right_is_binary_composition = isinstance(expression.right, BinaryComposition) if not left_is_binary_composition and not right_is_binary_composition: # Nothing to rewrite, return the expression as-is. return expression identity_literal = None # The boolean literal for which we just use the inner expression. inverse_literal = None # The boolean literal for which we negate the inner expression. if expression.operator == u'=': identity_literal = TrueLiteral inverse_literal = FalseLiteral elif expression.operator == u'!=': identity_literal = FalseLiteral inverse_literal = TrueLiteral else: return expression expression_to_rewrite = None if expression.left == identity_literal and right_is_binary_composition: return expression.right elif expression.right == identity_literal and left_is_binary_composition: return expression.left elif expression.left == inverse_literal and right_is_binary_composition: expression_to_rewrite = expression.right elif expression.right == inverse_literal and left_is_binary_composition: expression_to_rewrite = expression.left if expression_to_rewrite is None: # We couldn't find anything to rewrite, return the expression as-is. return expression elif expression_to_rewrite.operator not in operator_inverses: # We can't rewrite the inner expression since we don't know its inverse operator. return expression else: return BinaryComposition( operator_inverses[expression_to_rewrite.operator], expression_to_rewrite.left, expression_to_rewrite.right) new_ir_blocks = [] for block in ir_blocks: new_block = block.visit_and_update_expressions(visitor_fn) new_ir_blocks.append(new_block) return new_ir_blocks
python
def optimize_boolean_expression_comparisons(ir_blocks): operator_inverses = { u'=': u'!=', u'!=': u'=', } def visitor_fn(expression): """Expression visitor function that performs the above rewriting.""" if not isinstance(expression, BinaryComposition): return expression left_is_binary_composition = isinstance(expression.left, BinaryComposition) right_is_binary_composition = isinstance(expression.right, BinaryComposition) if not left_is_binary_composition and not right_is_binary_composition: # Nothing to rewrite, return the expression as-is. return expression identity_literal = None # The boolean literal for which we just use the inner expression. inverse_literal = None # The boolean literal for which we negate the inner expression. if expression.operator == u'=': identity_literal = TrueLiteral inverse_literal = FalseLiteral elif expression.operator == u'!=': identity_literal = FalseLiteral inverse_literal = TrueLiteral else: return expression expression_to_rewrite = None if expression.left == identity_literal and right_is_binary_composition: return expression.right elif expression.right == identity_literal and left_is_binary_composition: return expression.left elif expression.left == inverse_literal and right_is_binary_composition: expression_to_rewrite = expression.right elif expression.right == inverse_literal and left_is_binary_composition: expression_to_rewrite = expression.left if expression_to_rewrite is None: # We couldn't find anything to rewrite, return the expression as-is. return expression elif expression_to_rewrite.operator not in operator_inverses: # We can't rewrite the inner expression since we don't know its inverse operator. return expression else: return BinaryComposition( operator_inverses[expression_to_rewrite.operator], expression_to_rewrite.left, expression_to_rewrite.right) new_ir_blocks = [] for block in ir_blocks: new_block = block.visit_and_update_expressions(visitor_fn) new_ir_blocks.append(new_block) return new_ir_blocks
[ "def", "optimize_boolean_expression_comparisons", "(", "ir_blocks", ")", ":", "operator_inverses", "=", "{", "u'='", ":", "u'!='", ",", "u'!='", ":", "u'='", ",", "}", "def", "visitor_fn", "(", "expression", ")", ":", "\"\"\"Expression visitor function that performs the above rewriting.\"\"\"", "if", "not", "isinstance", "(", "expression", ",", "BinaryComposition", ")", ":", "return", "expression", "left_is_binary_composition", "=", "isinstance", "(", "expression", ".", "left", ",", "BinaryComposition", ")", "right_is_binary_composition", "=", "isinstance", "(", "expression", ".", "right", ",", "BinaryComposition", ")", "if", "not", "left_is_binary_composition", "and", "not", "right_is_binary_composition", ":", "# Nothing to rewrite, return the expression as-is.", "return", "expression", "identity_literal", "=", "None", "# The boolean literal for which we just use the inner expression.", "inverse_literal", "=", "None", "# The boolean literal for which we negate the inner expression.", "if", "expression", ".", "operator", "==", "u'='", ":", "identity_literal", "=", "TrueLiteral", "inverse_literal", "=", "FalseLiteral", "elif", "expression", ".", "operator", "==", "u'!='", ":", "identity_literal", "=", "FalseLiteral", "inverse_literal", "=", "TrueLiteral", "else", ":", "return", "expression", "expression_to_rewrite", "=", "None", "if", "expression", ".", "left", "==", "identity_literal", "and", "right_is_binary_composition", ":", "return", "expression", ".", "right", "elif", "expression", ".", "right", "==", "identity_literal", "and", "left_is_binary_composition", ":", "return", "expression", ".", "left", "elif", "expression", ".", "left", "==", "inverse_literal", "and", "right_is_binary_composition", ":", "expression_to_rewrite", "=", "expression", ".", "right", "elif", "expression", ".", "right", "==", "inverse_literal", "and", "left_is_binary_composition", ":", "expression_to_rewrite", "=", "expression", ".", "left", "if", "expression_to_rewrite", "is", "None", ":", "# We couldn't find anything to rewrite, return the expression as-is.", "return", "expression", "elif", "expression_to_rewrite", ".", "operator", "not", "in", "operator_inverses", ":", "# We can't rewrite the inner expression since we don't know its inverse operator.", "return", "expression", "else", ":", "return", "BinaryComposition", "(", "operator_inverses", "[", "expression_to_rewrite", ".", "operator", "]", ",", "expression_to_rewrite", ".", "left", ",", "expression_to_rewrite", ".", "right", ")", "new_ir_blocks", "=", "[", "]", "for", "block", "in", "ir_blocks", ":", "new_block", "=", "block", ".", "visit_and_update_expressions", "(", "visitor_fn", ")", "new_ir_blocks", ".", "append", "(", "new_block", ")", "return", "new_ir_blocks" ]
Optimize comparisons of a boolean binary comparison expression against a boolean literal. Rewriting example: BinaryComposition( '=', BinaryComposition('!=', something, NullLiteral) False) The above is rewritten into: BinaryComposition('=', something, NullLiteral) Args: ir_blocks: list of basic block objects Returns: a new list of basic block objects, with the optimization applied
[ "Optimize", "comparisons", "of", "a", "boolean", "binary", "comparison", "expression", "against", "a", "boolean", "literal", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_common.py#L98-L171
246,890
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_common.py
extract_simple_optional_location_info
def extract_simple_optional_location_info( ir_blocks, complex_optional_roots, location_to_optional_roots): """Construct a map from simple optional locations to their inner location and traversed edge. Args: ir_blocks: list of IR blocks to extract optional data from complex_optional_roots: list of @optional locations (location immmediately preceding an @optional traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple optional (one that does not expand vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope """ # Simple optional roots are a subset of location_to_optional_roots.values() (all optional roots) # We filter out the ones that are also present in complex_optional_roots. location_to_preceding_optional_root_iteritems = six.iteritems({ location: optional_root_locations_stack[-1] for location, optional_root_locations_stack in six.iteritems(location_to_optional_roots) }) simple_optional_root_to_inner_location = { optional_root_location: inner_location for inner_location, optional_root_location in location_to_preceding_optional_root_iteritems if optional_root_location not in complex_optional_roots } simple_optional_root_locations = set(simple_optional_root_to_inner_location.keys()) # Blocks within folded scopes should not be taken into account in this function. _, non_folded_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) simple_optional_root_info = {} preceding_location = None for current_block in non_folded_ir_blocks: if isinstance(current_block, MarkLocation): preceding_location = current_block.location elif isinstance(current_block, Traverse) and current_block.optional: if preceding_location in simple_optional_root_locations: # The current optional Traverse is "simple" # i.e. it does not contain any Traverses within. inner_location = simple_optional_root_to_inner_location[preceding_location] inner_location_name, _ = inner_location.get_location_name() simple_optional_info_dict = { 'inner_location_name': inner_location_name, 'edge_field': current_block.get_field_name(), } simple_optional_root_info[preceding_location] = simple_optional_info_dict return simple_optional_root_info
python
def extract_simple_optional_location_info( ir_blocks, complex_optional_roots, location_to_optional_roots): # Simple optional roots are a subset of location_to_optional_roots.values() (all optional roots) # We filter out the ones that are also present in complex_optional_roots. location_to_preceding_optional_root_iteritems = six.iteritems({ location: optional_root_locations_stack[-1] for location, optional_root_locations_stack in six.iteritems(location_to_optional_roots) }) simple_optional_root_to_inner_location = { optional_root_location: inner_location for inner_location, optional_root_location in location_to_preceding_optional_root_iteritems if optional_root_location not in complex_optional_roots } simple_optional_root_locations = set(simple_optional_root_to_inner_location.keys()) # Blocks within folded scopes should not be taken into account in this function. _, non_folded_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) simple_optional_root_info = {} preceding_location = None for current_block in non_folded_ir_blocks: if isinstance(current_block, MarkLocation): preceding_location = current_block.location elif isinstance(current_block, Traverse) and current_block.optional: if preceding_location in simple_optional_root_locations: # The current optional Traverse is "simple" # i.e. it does not contain any Traverses within. inner_location = simple_optional_root_to_inner_location[preceding_location] inner_location_name, _ = inner_location.get_location_name() simple_optional_info_dict = { 'inner_location_name': inner_location_name, 'edge_field': current_block.get_field_name(), } simple_optional_root_info[preceding_location] = simple_optional_info_dict return simple_optional_root_info
[ "def", "extract_simple_optional_location_info", "(", "ir_blocks", ",", "complex_optional_roots", ",", "location_to_optional_roots", ")", ":", "# Simple optional roots are a subset of location_to_optional_roots.values() (all optional roots)", "# We filter out the ones that are also present in complex_optional_roots.", "location_to_preceding_optional_root_iteritems", "=", "six", ".", "iteritems", "(", "{", "location", ":", "optional_root_locations_stack", "[", "-", "1", "]", "for", "location", ",", "optional_root_locations_stack", "in", "six", ".", "iteritems", "(", "location_to_optional_roots", ")", "}", ")", "simple_optional_root_to_inner_location", "=", "{", "optional_root_location", ":", "inner_location", "for", "inner_location", ",", "optional_root_location", "in", "location_to_preceding_optional_root_iteritems", "if", "optional_root_location", "not", "in", "complex_optional_roots", "}", "simple_optional_root_locations", "=", "set", "(", "simple_optional_root_to_inner_location", ".", "keys", "(", ")", ")", "# Blocks within folded scopes should not be taken into account in this function.", "_", ",", "non_folded_ir_blocks", "=", "extract_folds_from_ir_blocks", "(", "ir_blocks", ")", "simple_optional_root_info", "=", "{", "}", "preceding_location", "=", "None", "for", "current_block", "in", "non_folded_ir_blocks", ":", "if", "isinstance", "(", "current_block", ",", "MarkLocation", ")", ":", "preceding_location", "=", "current_block", ".", "location", "elif", "isinstance", "(", "current_block", ",", "Traverse", ")", "and", "current_block", ".", "optional", ":", "if", "preceding_location", "in", "simple_optional_root_locations", ":", "# The current optional Traverse is \"simple\"", "# i.e. it does not contain any Traverses within.", "inner_location", "=", "simple_optional_root_to_inner_location", "[", "preceding_location", "]", "inner_location_name", ",", "_", "=", "inner_location", ".", "get_location_name", "(", ")", "simple_optional_info_dict", "=", "{", "'inner_location_name'", ":", "inner_location_name", ",", "'edge_field'", ":", "current_block", ".", "get_field_name", "(", ")", ",", "}", "simple_optional_root_info", "[", "preceding_location", "]", "=", "simple_optional_info_dict", "return", "simple_optional_root_info" ]
Construct a map from simple optional locations to their inner location and traversed edge. Args: ir_blocks: list of IR blocks to extract optional data from complex_optional_roots: list of @optional locations (location immmediately preceding an @optional traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides Returns: dict mapping from simple_optional_root_location -> dict containing keys - 'inner_location_name': Location object correspoding to the unique MarkLocation present within a simple optional (one that does not expand vertex fields) scope - 'edge_field': string representing the optional edge being traversed where simple_optional_root_to_inner_location is the location preceding the @optional scope
[ "Construct", "a", "map", "from", "simple", "optional", "locations", "to", "their", "inner", "location", "and", "traversed", "edge", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_common.py#L283-L337
246,891
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_common.py
remove_end_optionals
def remove_end_optionals(ir_blocks): """Return a list of IR blocks as a copy of the original, with EndOptional blocks removed.""" new_ir_blocks = [] for block in ir_blocks: if not isinstance(block, EndOptional): new_ir_blocks.append(block) return new_ir_blocks
python
def remove_end_optionals(ir_blocks): new_ir_blocks = [] for block in ir_blocks: if not isinstance(block, EndOptional): new_ir_blocks.append(block) return new_ir_blocks
[ "def", "remove_end_optionals", "(", "ir_blocks", ")", ":", "new_ir_blocks", "=", "[", "]", "for", "block", "in", "ir_blocks", ":", "if", "not", "isinstance", "(", "block", ",", "EndOptional", ")", ":", "new_ir_blocks", ".", "append", "(", "block", ")", "return", "new_ir_blocks" ]
Return a list of IR blocks as a copy of the original, with EndOptional blocks removed.
[ "Return", "a", "list", "of", "IR", "blocks", "as", "a", "copy", "of", "the", "original", "with", "EndOptional", "blocks", "removed", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_common.py#L340-L346
246,892
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_common.py
OutputContextVertex.validate
def validate(self): """Validate that the OutputContextVertex is correctly representable.""" super(OutputContextVertex, self).validate() if self.location.field is not None: raise ValueError(u'Expected location at a vertex, but got: {}'.format(self.location))
python
def validate(self): super(OutputContextVertex, self).validate() if self.location.field is not None: raise ValueError(u'Expected location at a vertex, but got: {}'.format(self.location))
[ "def", "validate", "(", "self", ")", ":", "super", "(", "OutputContextVertex", ",", "self", ")", ".", "validate", "(", ")", "if", "self", ".", "location", ".", "field", "is", "not", "None", ":", "raise", "ValueError", "(", "u'Expected location at a vertex, but got: {}'", ".", "format", "(", "self", ".", "location", ")", ")" ]
Validate that the OutputContextVertex is correctly representable.
[ "Validate", "that", "the", "OutputContextVertex", "is", "correctly", "representable", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_common.py#L35-L40
246,893
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/ir_lowering.py
lower_has_substring_binary_compositions
def lower_has_substring_binary_compositions(ir_blocks): """Lower Filter blocks that use the "has_substring" operation into MATCH-representable form.""" def visitor_fn(expression): """Rewrite BinaryComposition expressions with "has_substring" into representable form.""" # The implementation of "has_substring" must use the LIKE operator in MATCH, and must # prepend and append "%" symbols to the substring being matched. # We transform any structures that resemble the following: # BinaryComposition(u'has_substring', X, Y) # into the following: # BinaryComposition( # u'LIKE', # X, # BinaryComposition( # u'+', # Literal("%"), # BinaryComposition( # u'+', # Y, # Literal("%") # ) # ) # ) if not isinstance(expression, BinaryComposition) or expression.operator != u'has_substring': return expression return BinaryComposition( u'LIKE', expression.left, BinaryComposition( u'+', Literal('%'), BinaryComposition( u'+', expression.right, Literal('%') ) ) ) new_ir_blocks = [ block.visit_and_update_expressions(visitor_fn) for block in ir_blocks ] return new_ir_blocks
python
def lower_has_substring_binary_compositions(ir_blocks): def visitor_fn(expression): """Rewrite BinaryComposition expressions with "has_substring" into representable form.""" # The implementation of "has_substring" must use the LIKE operator in MATCH, and must # prepend and append "%" symbols to the substring being matched. # We transform any structures that resemble the following: # BinaryComposition(u'has_substring', X, Y) # into the following: # BinaryComposition( # u'LIKE', # X, # BinaryComposition( # u'+', # Literal("%"), # BinaryComposition( # u'+', # Y, # Literal("%") # ) # ) # ) if not isinstance(expression, BinaryComposition) or expression.operator != u'has_substring': return expression return BinaryComposition( u'LIKE', expression.left, BinaryComposition( u'+', Literal('%'), BinaryComposition( u'+', expression.right, Literal('%') ) ) ) new_ir_blocks = [ block.visit_and_update_expressions(visitor_fn) for block in ir_blocks ] return new_ir_blocks
[ "def", "lower_has_substring_binary_compositions", "(", "ir_blocks", ")", ":", "def", "visitor_fn", "(", "expression", ")", ":", "\"\"\"Rewrite BinaryComposition expressions with \"has_substring\" into representable form.\"\"\"", "# The implementation of \"has_substring\" must use the LIKE operator in MATCH, and must", "# prepend and append \"%\" symbols to the substring being matched.", "# We transform any structures that resemble the following:", "# BinaryComposition(u'has_substring', X, Y)", "# into the following:", "# BinaryComposition(", "# u'LIKE',", "# X,", "# BinaryComposition(", "# u'+',", "# Literal(\"%\"),", "# BinaryComposition(", "# u'+',", "# Y,", "# Literal(\"%\")", "# )", "# )", "# )", "if", "not", "isinstance", "(", "expression", ",", "BinaryComposition", ")", "or", "expression", ".", "operator", "!=", "u'has_substring'", ":", "return", "expression", "return", "BinaryComposition", "(", "u'LIKE'", ",", "expression", ".", "left", ",", "BinaryComposition", "(", "u'+'", ",", "Literal", "(", "'%'", ")", ",", "BinaryComposition", "(", "u'+'", ",", "expression", ".", "right", ",", "Literal", "(", "'%'", ")", ")", ")", ")", "new_ir_blocks", "=", "[", "block", ".", "visit_and_update_expressions", "(", "visitor_fn", ")", "for", "block", "in", "ir_blocks", "]", "return", "new_ir_blocks" ]
Lower Filter blocks that use the "has_substring" operation into MATCH-representable form.
[ "Lower", "Filter", "blocks", "that", "use", "the", "has_substring", "operation", "into", "MATCH", "-", "representable", "form", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L96-L140
246,894
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/ir_lowering.py
truncate_repeated_single_step_traversals
def truncate_repeated_single_step_traversals(match_query): """Truncate one-step traversals that overlap a previous traversal location.""" # Such traversals frequently happen as side-effects of the lowering process # of Backtrack blocks, and needlessly complicate the executed queries. new_match_traversals = [] visited_locations = set() for current_match_traversal in match_query.match_traversals: ignore_traversal = False if len(current_match_traversal) == 1: # Single-step traversal detected. If its location was visited already, ignore it. single_step = current_match_traversal[0] if single_step.as_block is None: raise AssertionError(u'Unexpectedly found a single-step traversal with no as_block:' u' {} {}'.format(current_match_traversal, match_query)) if single_step.as_block.location in visited_locations: # This location was visited before, omit the traversal. ignore_traversal = True if not ignore_traversal: # For each step in this traversal, mark its location as visited. for step in current_match_traversal: if step.as_block is not None: visited_locations.add(step.as_block.location) new_match_traversals.append(current_match_traversal) return match_query._replace(match_traversals=new_match_traversals)
python
def truncate_repeated_single_step_traversals(match_query): # Such traversals frequently happen as side-effects of the lowering process # of Backtrack blocks, and needlessly complicate the executed queries. new_match_traversals = [] visited_locations = set() for current_match_traversal in match_query.match_traversals: ignore_traversal = False if len(current_match_traversal) == 1: # Single-step traversal detected. If its location was visited already, ignore it. single_step = current_match_traversal[0] if single_step.as_block is None: raise AssertionError(u'Unexpectedly found a single-step traversal with no as_block:' u' {} {}'.format(current_match_traversal, match_query)) if single_step.as_block.location in visited_locations: # This location was visited before, omit the traversal. ignore_traversal = True if not ignore_traversal: # For each step in this traversal, mark its location as visited. for step in current_match_traversal: if step.as_block is not None: visited_locations.add(step.as_block.location) new_match_traversals.append(current_match_traversal) return match_query._replace(match_traversals=new_match_traversals)
[ "def", "truncate_repeated_single_step_traversals", "(", "match_query", ")", ":", "# Such traversals frequently happen as side-effects of the lowering process", "# of Backtrack blocks, and needlessly complicate the executed queries.", "new_match_traversals", "=", "[", "]", "visited_locations", "=", "set", "(", ")", "for", "current_match_traversal", "in", "match_query", ".", "match_traversals", ":", "ignore_traversal", "=", "False", "if", "len", "(", "current_match_traversal", ")", "==", "1", ":", "# Single-step traversal detected. If its location was visited already, ignore it.", "single_step", "=", "current_match_traversal", "[", "0", "]", "if", "single_step", ".", "as_block", "is", "None", ":", "raise", "AssertionError", "(", "u'Unexpectedly found a single-step traversal with no as_block:'", "u' {} {}'", ".", "format", "(", "current_match_traversal", ",", "match_query", ")", ")", "if", "single_step", ".", "as_block", ".", "location", "in", "visited_locations", ":", "# This location was visited before, omit the traversal.", "ignore_traversal", "=", "True", "if", "not", "ignore_traversal", ":", "# For each step in this traversal, mark its location as visited.", "for", "step", "in", "current_match_traversal", ":", "if", "step", ".", "as_block", "is", "not", "None", ":", "visited_locations", ".", "add", "(", "step", ".", "as_block", ".", "location", ")", "new_match_traversals", ".", "append", "(", "current_match_traversal", ")", "return", "match_query", ".", "_replace", "(", "match_traversals", "=", "new_match_traversals", ")" ]
Truncate one-step traversals that overlap a previous traversal location.
[ "Truncate", "one", "-", "step", "traversals", "that", "overlap", "a", "previous", "traversal", "location", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L143-L171
246,895
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/ir_lowering.py
_flatten_location_translations
def _flatten_location_translations(location_translations): """If location A translates to B, and B to C, then make A translate directly to C. Args: location_translations: dict of Location -> Location, where the key translates to the value. Mutated in place for efficiency and simplicity of implementation. """ sources_to_process = set(six.iterkeys(location_translations)) def _update_translation(source): """Return the proper (fully-flattened) translation for the given location.""" destination = location_translations[source] if destination not in location_translations: # "destination" cannot be translated, no further flattening required. return destination else: # "destination" can itself be translated -- do so, # and then flatten "source" to the final translation as well. sources_to_process.discard(destination) final_destination = _update_translation(destination) location_translations[source] = final_destination return final_destination while sources_to_process: _update_translation(sources_to_process.pop())
python
def _flatten_location_translations(location_translations): sources_to_process = set(six.iterkeys(location_translations)) def _update_translation(source): """Return the proper (fully-flattened) translation for the given location.""" destination = location_translations[source] if destination not in location_translations: # "destination" cannot be translated, no further flattening required. return destination else: # "destination" can itself be translated -- do so, # and then flatten "source" to the final translation as well. sources_to_process.discard(destination) final_destination = _update_translation(destination) location_translations[source] = final_destination return final_destination while sources_to_process: _update_translation(sources_to_process.pop())
[ "def", "_flatten_location_translations", "(", "location_translations", ")", ":", "sources_to_process", "=", "set", "(", "six", ".", "iterkeys", "(", "location_translations", ")", ")", "def", "_update_translation", "(", "source", ")", ":", "\"\"\"Return the proper (fully-flattened) translation for the given location.\"\"\"", "destination", "=", "location_translations", "[", "source", "]", "if", "destination", "not", "in", "location_translations", ":", "# \"destination\" cannot be translated, no further flattening required.", "return", "destination", "else", ":", "# \"destination\" can itself be translated -- do so,", "# and then flatten \"source\" to the final translation as well.", "sources_to_process", ".", "discard", "(", "destination", ")", "final_destination", "=", "_update_translation", "(", "destination", ")", "location_translations", "[", "source", "]", "=", "final_destination", "return", "final_destination", "while", "sources_to_process", ":", "_update_translation", "(", "sources_to_process", ".", "pop", "(", ")", ")" ]
If location A translates to B, and B to C, then make A translate directly to C. Args: location_translations: dict of Location -> Location, where the key translates to the value. Mutated in place for efficiency and simplicity of implementation.
[ "If", "location", "A", "translates", "to", "B", "and", "B", "to", "C", "then", "make", "A", "translate", "directly", "to", "C", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L224-L248
246,896
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/ir_lowering.py
_translate_equivalent_locations
def _translate_equivalent_locations(match_query, location_translations): """Translate Location objects into their equivalent locations, based on the given dict.""" new_match_traversals = [] def visitor_fn(expression): """Expression visitor function used to rewrite expressions with updated Location data.""" if isinstance(expression, (ContextField, GlobalContextField)): old_location = expression.location.at_vertex() new_location = location_translations.get(old_location, old_location) if expression.location.field is not None: new_location = new_location.navigate_to_field(expression.location.field) # The Expression could be one of many types, including: # - ContextField # - GlobalContextField # We determine its exact class to make sure we return an object of the same class # as the expression being replaced. expression_cls = type(expression) return expression_cls(new_location, expression.field_type) elif isinstance(expression, ContextFieldExistence): old_location = expression.location new_location = location_translations.get(old_location, old_location) return ContextFieldExistence(new_location) elif isinstance(expression, FoldedContextField): # Update the Location within FoldedContextField old_location = expression.fold_scope_location.base_location new_location = location_translations.get(old_location, old_location) fold_path = expression.fold_scope_location.fold_path fold_field = expression.fold_scope_location.field new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field) field_type = expression.field_type return FoldedContextField(new_fold_scope_location, field_type) else: return expression # Rewrite the Locations in the steps of each MATCH traversal. for current_match_traversal in match_query.match_traversals: new_traversal = [] for step in current_match_traversal: new_step = step # If the root_block is a Backtrack, translate its Location if necessary. if isinstance(new_step.root_block, Backtrack): old_location = new_step.root_block.location if old_location in location_translations: new_location = location_translations[old_location] new_step = new_step._replace(root_block=Backtrack(new_location)) # If the as_block exists, translate its Location if necessary. if new_step.as_block is not None: old_location = new_step.as_block.location if old_location in location_translations: new_location = location_translations[old_location] new_step = new_step._replace(as_block=MarkLocation(new_location)) # If the where_block exists, update any Location objects in its predicate. if new_step.where_block is not None: new_where_block = new_step.where_block.visit_and_update_expressions(visitor_fn) new_step = new_step._replace(where_block=new_where_block) new_traversal.append(new_step) new_match_traversals.append(new_traversal) new_folds = {} # Update the Location within each FoldScopeLocation for fold_scope_location, fold_ir_blocks in six.iteritems(match_query.folds): fold_path = fold_scope_location.fold_path fold_field = fold_scope_location.field old_location = fold_scope_location.base_location new_location = location_translations.get(old_location, old_location) new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field) new_folds[new_fold_scope_location] = fold_ir_blocks # Rewrite the Locations in the ConstructResult output block. new_output_block = match_query.output_block.visit_and_update_expressions(visitor_fn) # Rewrite the Locations in the global where block. new_where_block = None if match_query.where_block is not None: new_where_block = match_query.where_block.visit_and_update_expressions(visitor_fn) return match_query._replace(match_traversals=new_match_traversals, folds=new_folds, output_block=new_output_block, where_block=new_where_block)
python
def _translate_equivalent_locations(match_query, location_translations): new_match_traversals = [] def visitor_fn(expression): """Expression visitor function used to rewrite expressions with updated Location data.""" if isinstance(expression, (ContextField, GlobalContextField)): old_location = expression.location.at_vertex() new_location = location_translations.get(old_location, old_location) if expression.location.field is not None: new_location = new_location.navigate_to_field(expression.location.field) # The Expression could be one of many types, including: # - ContextField # - GlobalContextField # We determine its exact class to make sure we return an object of the same class # as the expression being replaced. expression_cls = type(expression) return expression_cls(new_location, expression.field_type) elif isinstance(expression, ContextFieldExistence): old_location = expression.location new_location = location_translations.get(old_location, old_location) return ContextFieldExistence(new_location) elif isinstance(expression, FoldedContextField): # Update the Location within FoldedContextField old_location = expression.fold_scope_location.base_location new_location = location_translations.get(old_location, old_location) fold_path = expression.fold_scope_location.fold_path fold_field = expression.fold_scope_location.field new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field) field_type = expression.field_type return FoldedContextField(new_fold_scope_location, field_type) else: return expression # Rewrite the Locations in the steps of each MATCH traversal. for current_match_traversal in match_query.match_traversals: new_traversal = [] for step in current_match_traversal: new_step = step # If the root_block is a Backtrack, translate its Location if necessary. if isinstance(new_step.root_block, Backtrack): old_location = new_step.root_block.location if old_location in location_translations: new_location = location_translations[old_location] new_step = new_step._replace(root_block=Backtrack(new_location)) # If the as_block exists, translate its Location if necessary. if new_step.as_block is not None: old_location = new_step.as_block.location if old_location in location_translations: new_location = location_translations[old_location] new_step = new_step._replace(as_block=MarkLocation(new_location)) # If the where_block exists, update any Location objects in its predicate. if new_step.where_block is not None: new_where_block = new_step.where_block.visit_and_update_expressions(visitor_fn) new_step = new_step._replace(where_block=new_where_block) new_traversal.append(new_step) new_match_traversals.append(new_traversal) new_folds = {} # Update the Location within each FoldScopeLocation for fold_scope_location, fold_ir_blocks in six.iteritems(match_query.folds): fold_path = fold_scope_location.fold_path fold_field = fold_scope_location.field old_location = fold_scope_location.base_location new_location = location_translations.get(old_location, old_location) new_fold_scope_location = FoldScopeLocation(new_location, fold_path, field=fold_field) new_folds[new_fold_scope_location] = fold_ir_blocks # Rewrite the Locations in the ConstructResult output block. new_output_block = match_query.output_block.visit_and_update_expressions(visitor_fn) # Rewrite the Locations in the global where block. new_where_block = None if match_query.where_block is not None: new_where_block = match_query.where_block.visit_and_update_expressions(visitor_fn) return match_query._replace(match_traversals=new_match_traversals, folds=new_folds, output_block=new_output_block, where_block=new_where_block)
[ "def", "_translate_equivalent_locations", "(", "match_query", ",", "location_translations", ")", ":", "new_match_traversals", "=", "[", "]", "def", "visitor_fn", "(", "expression", ")", ":", "\"\"\"Expression visitor function used to rewrite expressions with updated Location data.\"\"\"", "if", "isinstance", "(", "expression", ",", "(", "ContextField", ",", "GlobalContextField", ")", ")", ":", "old_location", "=", "expression", ".", "location", ".", "at_vertex", "(", ")", "new_location", "=", "location_translations", ".", "get", "(", "old_location", ",", "old_location", ")", "if", "expression", ".", "location", ".", "field", "is", "not", "None", ":", "new_location", "=", "new_location", ".", "navigate_to_field", "(", "expression", ".", "location", ".", "field", ")", "# The Expression could be one of many types, including:", "# - ContextField", "# - GlobalContextField", "# We determine its exact class to make sure we return an object of the same class", "# as the expression being replaced.", "expression_cls", "=", "type", "(", "expression", ")", "return", "expression_cls", "(", "new_location", ",", "expression", ".", "field_type", ")", "elif", "isinstance", "(", "expression", ",", "ContextFieldExistence", ")", ":", "old_location", "=", "expression", ".", "location", "new_location", "=", "location_translations", ".", "get", "(", "old_location", ",", "old_location", ")", "return", "ContextFieldExistence", "(", "new_location", ")", "elif", "isinstance", "(", "expression", ",", "FoldedContextField", ")", ":", "# Update the Location within FoldedContextField", "old_location", "=", "expression", ".", "fold_scope_location", ".", "base_location", "new_location", "=", "location_translations", ".", "get", "(", "old_location", ",", "old_location", ")", "fold_path", "=", "expression", ".", "fold_scope_location", ".", "fold_path", "fold_field", "=", "expression", ".", "fold_scope_location", ".", "field", "new_fold_scope_location", "=", "FoldScopeLocation", "(", "new_location", ",", "fold_path", ",", "field", "=", "fold_field", ")", "field_type", "=", "expression", ".", "field_type", "return", "FoldedContextField", "(", "new_fold_scope_location", ",", "field_type", ")", "else", ":", "return", "expression", "# Rewrite the Locations in the steps of each MATCH traversal.", "for", "current_match_traversal", "in", "match_query", ".", "match_traversals", ":", "new_traversal", "=", "[", "]", "for", "step", "in", "current_match_traversal", ":", "new_step", "=", "step", "# If the root_block is a Backtrack, translate its Location if necessary.", "if", "isinstance", "(", "new_step", ".", "root_block", ",", "Backtrack", ")", ":", "old_location", "=", "new_step", ".", "root_block", ".", "location", "if", "old_location", "in", "location_translations", ":", "new_location", "=", "location_translations", "[", "old_location", "]", "new_step", "=", "new_step", ".", "_replace", "(", "root_block", "=", "Backtrack", "(", "new_location", ")", ")", "# If the as_block exists, translate its Location if necessary.", "if", "new_step", ".", "as_block", "is", "not", "None", ":", "old_location", "=", "new_step", ".", "as_block", ".", "location", "if", "old_location", "in", "location_translations", ":", "new_location", "=", "location_translations", "[", "old_location", "]", "new_step", "=", "new_step", ".", "_replace", "(", "as_block", "=", "MarkLocation", "(", "new_location", ")", ")", "# If the where_block exists, update any Location objects in its predicate.", "if", "new_step", ".", "where_block", "is", "not", "None", ":", "new_where_block", "=", "new_step", ".", "where_block", ".", "visit_and_update_expressions", "(", "visitor_fn", ")", "new_step", "=", "new_step", ".", "_replace", "(", "where_block", "=", "new_where_block", ")", "new_traversal", ".", "append", "(", "new_step", ")", "new_match_traversals", ".", "append", "(", "new_traversal", ")", "new_folds", "=", "{", "}", "# Update the Location within each FoldScopeLocation", "for", "fold_scope_location", ",", "fold_ir_blocks", "in", "six", ".", "iteritems", "(", "match_query", ".", "folds", ")", ":", "fold_path", "=", "fold_scope_location", ".", "fold_path", "fold_field", "=", "fold_scope_location", ".", "field", "old_location", "=", "fold_scope_location", ".", "base_location", "new_location", "=", "location_translations", ".", "get", "(", "old_location", ",", "old_location", ")", "new_fold_scope_location", "=", "FoldScopeLocation", "(", "new_location", ",", "fold_path", ",", "field", "=", "fold_field", ")", "new_folds", "[", "new_fold_scope_location", "]", "=", "fold_ir_blocks", "# Rewrite the Locations in the ConstructResult output block.", "new_output_block", "=", "match_query", ".", "output_block", ".", "visit_and_update_expressions", "(", "visitor_fn", ")", "# Rewrite the Locations in the global where block.", "new_where_block", "=", "None", "if", "match_query", ".", "where_block", "is", "not", "None", ":", "new_where_block", "=", "match_query", ".", "where_block", ".", "visit_and_update_expressions", "(", "visitor_fn", ")", "return", "match_query", ".", "_replace", "(", "match_traversals", "=", "new_match_traversals", ",", "folds", "=", "new_folds", ",", "output_block", "=", "new_output_block", ",", "where_block", "=", "new_where_block", ")" ]
Translate Location objects into their equivalent locations, based on the given dict.
[ "Translate", "Location", "objects", "into", "their", "equivalent", "locations", "based", "on", "the", "given", "dict", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L251-L338
246,897
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/ir_lowering.py
lower_folded_coerce_types_into_filter_blocks
def lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks): """Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks.""" new_folded_ir_blocks = [] for block in folded_ir_blocks: if isinstance(block, CoerceType): new_block = convert_coerce_type_to_instanceof_filter(block) else: new_block = block new_folded_ir_blocks.append(new_block) return new_folded_ir_blocks
python
def lower_folded_coerce_types_into_filter_blocks(folded_ir_blocks): new_folded_ir_blocks = [] for block in folded_ir_blocks: if isinstance(block, CoerceType): new_block = convert_coerce_type_to_instanceof_filter(block) else: new_block = block new_folded_ir_blocks.append(new_block) return new_folded_ir_blocks
[ "def", "lower_folded_coerce_types_into_filter_blocks", "(", "folded_ir_blocks", ")", ":", "new_folded_ir_blocks", "=", "[", "]", "for", "block", "in", "folded_ir_blocks", ":", "if", "isinstance", "(", "block", ",", "CoerceType", ")", ":", "new_block", "=", "convert_coerce_type_to_instanceof_filter", "(", "block", ")", "else", ":", "new_block", "=", "block", "new_folded_ir_blocks", ".", "append", "(", "new_block", ")", "return", "new_folded_ir_blocks" ]
Lower CoerceType blocks into "INSTANCEOF" Filter blocks. Indended for folded IR blocks.
[ "Lower", "CoerceType", "blocks", "into", "INSTANCEOF", "Filter", "blocks", ".", "Indended", "for", "folded", "IR", "blocks", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L341-L352
246,898
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/ir_lowering.py
remove_backtrack_blocks_from_fold
def remove_backtrack_blocks_from_fold(folded_ir_blocks): """Return a list of IR blocks with all Backtrack blocks removed.""" new_folded_ir_blocks = [] for block in folded_ir_blocks: if not isinstance(block, Backtrack): new_folded_ir_blocks.append(block) return new_folded_ir_blocks
python
def remove_backtrack_blocks_from_fold(folded_ir_blocks): new_folded_ir_blocks = [] for block in folded_ir_blocks: if not isinstance(block, Backtrack): new_folded_ir_blocks.append(block) return new_folded_ir_blocks
[ "def", "remove_backtrack_blocks_from_fold", "(", "folded_ir_blocks", ")", ":", "new_folded_ir_blocks", "=", "[", "]", "for", "block", "in", "folded_ir_blocks", ":", "if", "not", "isinstance", "(", "block", ",", "Backtrack", ")", ":", "new_folded_ir_blocks", ".", "append", "(", "block", ")", "return", "new_folded_ir_blocks" ]
Return a list of IR blocks with all Backtrack blocks removed.
[ "Return", "a", "list", "of", "IR", "blocks", "with", "all", "Backtrack", "blocks", "removed", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L355-L361
246,899
kensho-technologies/graphql-compiler
graphql_compiler/compiler/ir_lowering_match/ir_lowering.py
truncate_repeated_single_step_traversals_in_sub_queries
def truncate_repeated_single_step_traversals_in_sub_queries(compound_match_query): """For each sub-query, remove one-step traversals that overlap a previous traversal location.""" lowered_match_queries = [] for match_query in compound_match_query.match_queries: new_match_query = truncate_repeated_single_step_traversals(match_query) lowered_match_queries.append(new_match_query) return compound_match_query._replace(match_queries=lowered_match_queries)
python
def truncate_repeated_single_step_traversals_in_sub_queries(compound_match_query): lowered_match_queries = [] for match_query in compound_match_query.match_queries: new_match_query = truncate_repeated_single_step_traversals(match_query) lowered_match_queries.append(new_match_query) return compound_match_query._replace(match_queries=lowered_match_queries)
[ "def", "truncate_repeated_single_step_traversals_in_sub_queries", "(", "compound_match_query", ")", ":", "lowered_match_queries", "=", "[", "]", "for", "match_query", "in", "compound_match_query", ".", "match_queries", ":", "new_match_query", "=", "truncate_repeated_single_step_traversals", "(", "match_query", ")", "lowered_match_queries", ".", "append", "(", "new_match_query", ")", "return", "compound_match_query", ".", "_replace", "(", "match_queries", "=", "lowered_match_queries", ")" ]
For each sub-query, remove one-step traversals that overlap a previous traversal location.
[ "For", "each", "sub", "-", "query", "remove", "one", "-", "step", "traversals", "that", "overlap", "a", "previous", "traversal", "location", "." ]
f6079c6d10f64932f6b3af309b79bcea2123ca8f
https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/ir_lowering.py#L364-L371