id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 51
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
246,900 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _prune_traverse_using_omitted_locations | def _prune_traverse_using_omitted_locations(match_traversal, omitted_locations,
complex_optional_roots, location_to_optional_roots):
"""Return a prefix of the given traverse, excluding any blocks after an omitted optional.
Given a subset (omitted_locations) of complex_optional_roots, return a new match traversal
removing all MatchStep objects that are within any omitted location.
Args:
match_traversal: list of MatchStep objects to be pruned
omitted_locations: subset of complex_optional_roots to be omitted
complex_optional_roots: list of all @optional locations (location immmediately preceding
an @optional traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
list of MatchStep objects as a copy of the given match traversal
with all steps within any omitted location removed.
"""
new_match_traversal = []
for step in match_traversal:
new_step = step
if isinstance(step.root_block, Traverse) and step.root_block.optional:
current_location = step.as_block.location
optional_root_locations_stack = location_to_optional_roots.get(current_location, None)
optional_root_location = optional_root_locations_stack[-1]
if optional_root_location is None:
raise AssertionError(u'Found optional Traverse location {} that was not present '
u'in location_to_optional_roots dict: {}'
.format(current_location, location_to_optional_roots))
elif optional_root_location in omitted_locations:
# Add filter to indicate that the omitted edge(s) shoud not exist
field_name = step.root_block.get_field_name()
new_predicate = filter_edge_field_non_existence(LocalField(field_name))
old_filter = new_match_traversal[-1].where_block
if old_filter is not None:
new_predicate = BinaryComposition(u'&&', old_filter.predicate, new_predicate)
new_match_step = new_match_traversal[-1]._replace(
where_block=Filter(new_predicate))
new_match_traversal[-1] = new_match_step
# Discard all steps following the omitted @optional traverse
new_step = None
elif optional_root_location in complex_optional_roots:
# Any non-omitted @optional traverse (that expands vertex fields)
# becomes a normal mandatory traverse (discard the optional flag).
new_root_block = Traverse(step.root_block.direction, step.root_block.edge_name)
new_step = step._replace(root_block=new_root_block)
else:
# The current optional traverse is a "simple optional" (one that does not
# expand vertex fields). No further action is required since MATCH supports it.
pass
# If new_step was set to None,
# we have encountered a Traverse that is within an omitted location.
# We discard the remainder of the match traversal (everything following is also omitted).
if new_step is None:
break
else:
new_match_traversal.append(new_step)
return new_match_traversal | python | def _prune_traverse_using_omitted_locations(match_traversal, omitted_locations,
complex_optional_roots, location_to_optional_roots):
new_match_traversal = []
for step in match_traversal:
new_step = step
if isinstance(step.root_block, Traverse) and step.root_block.optional:
current_location = step.as_block.location
optional_root_locations_stack = location_to_optional_roots.get(current_location, None)
optional_root_location = optional_root_locations_stack[-1]
if optional_root_location is None:
raise AssertionError(u'Found optional Traverse location {} that was not present '
u'in location_to_optional_roots dict: {}'
.format(current_location, location_to_optional_roots))
elif optional_root_location in omitted_locations:
# Add filter to indicate that the omitted edge(s) shoud not exist
field_name = step.root_block.get_field_name()
new_predicate = filter_edge_field_non_existence(LocalField(field_name))
old_filter = new_match_traversal[-1].where_block
if old_filter is not None:
new_predicate = BinaryComposition(u'&&', old_filter.predicate, new_predicate)
new_match_step = new_match_traversal[-1]._replace(
where_block=Filter(new_predicate))
new_match_traversal[-1] = new_match_step
# Discard all steps following the omitted @optional traverse
new_step = None
elif optional_root_location in complex_optional_roots:
# Any non-omitted @optional traverse (that expands vertex fields)
# becomes a normal mandatory traverse (discard the optional flag).
new_root_block = Traverse(step.root_block.direction, step.root_block.edge_name)
new_step = step._replace(root_block=new_root_block)
else:
# The current optional traverse is a "simple optional" (one that does not
# expand vertex fields). No further action is required since MATCH supports it.
pass
# If new_step was set to None,
# we have encountered a Traverse that is within an omitted location.
# We discard the remainder of the match traversal (everything following is also omitted).
if new_step is None:
break
else:
new_match_traversal.append(new_step)
return new_match_traversal | [
"def",
"_prune_traverse_using_omitted_locations",
"(",
"match_traversal",
",",
"omitted_locations",
",",
"complex_optional_roots",
",",
"location_to_optional_roots",
")",
":",
"new_match_traversal",
"=",
"[",
"]",
"for",
"step",
"in",
"match_traversal",
":",
"new_step",
"=",
"step",
"if",
"isinstance",
"(",
"step",
".",
"root_block",
",",
"Traverse",
")",
"and",
"step",
".",
"root_block",
".",
"optional",
":",
"current_location",
"=",
"step",
".",
"as_block",
".",
"location",
"optional_root_locations_stack",
"=",
"location_to_optional_roots",
".",
"get",
"(",
"current_location",
",",
"None",
")",
"optional_root_location",
"=",
"optional_root_locations_stack",
"[",
"-",
"1",
"]",
"if",
"optional_root_location",
"is",
"None",
":",
"raise",
"AssertionError",
"(",
"u'Found optional Traverse location {} that was not present '",
"u'in location_to_optional_roots dict: {}'",
".",
"format",
"(",
"current_location",
",",
"location_to_optional_roots",
")",
")",
"elif",
"optional_root_location",
"in",
"omitted_locations",
":",
"# Add filter to indicate that the omitted edge(s) shoud not exist",
"field_name",
"=",
"step",
".",
"root_block",
".",
"get_field_name",
"(",
")",
"new_predicate",
"=",
"filter_edge_field_non_existence",
"(",
"LocalField",
"(",
"field_name",
")",
")",
"old_filter",
"=",
"new_match_traversal",
"[",
"-",
"1",
"]",
".",
"where_block",
"if",
"old_filter",
"is",
"not",
"None",
":",
"new_predicate",
"=",
"BinaryComposition",
"(",
"u'&&'",
",",
"old_filter",
".",
"predicate",
",",
"new_predicate",
")",
"new_match_step",
"=",
"new_match_traversal",
"[",
"-",
"1",
"]",
".",
"_replace",
"(",
"where_block",
"=",
"Filter",
"(",
"new_predicate",
")",
")",
"new_match_traversal",
"[",
"-",
"1",
"]",
"=",
"new_match_step",
"# Discard all steps following the omitted @optional traverse",
"new_step",
"=",
"None",
"elif",
"optional_root_location",
"in",
"complex_optional_roots",
":",
"# Any non-omitted @optional traverse (that expands vertex fields)",
"# becomes a normal mandatory traverse (discard the optional flag).",
"new_root_block",
"=",
"Traverse",
"(",
"step",
".",
"root_block",
".",
"direction",
",",
"step",
".",
"root_block",
".",
"edge_name",
")",
"new_step",
"=",
"step",
".",
"_replace",
"(",
"root_block",
"=",
"new_root_block",
")",
"else",
":",
"# The current optional traverse is a \"simple optional\" (one that does not",
"# expand vertex fields). No further action is required since MATCH supports it.",
"pass",
"# If new_step was set to None,",
"# we have encountered a Traverse that is within an omitted location.",
"# We discard the remainder of the match traversal (everything following is also omitted).",
"if",
"new_step",
"is",
"None",
":",
"break",
"else",
":",
"new_match_traversal",
".",
"append",
"(",
"new_step",
")",
"return",
"new_match_traversal"
] | Return a prefix of the given traverse, excluding any blocks after an omitted optional.
Given a subset (omitted_locations) of complex_optional_roots, return a new match traversal
removing all MatchStep objects that are within any omitted location.
Args:
match_traversal: list of MatchStep objects to be pruned
omitted_locations: subset of complex_optional_roots to be omitted
complex_optional_roots: list of all @optional locations (location immmediately preceding
an @optional traverse) that expand vertex fields
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
list of MatchStep objects as a copy of the given match traversal
with all steps within any omitted location removed. | [
"Return",
"a",
"prefix",
"of",
"the",
"given",
"traverse",
"excluding",
"any",
"blocks",
"after",
"an",
"omitted",
"optional",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L18-L82 |
246,901 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | convert_optional_traversals_to_compound_match_query | def convert_optional_traversals_to_compound_match_query(
match_query, complex_optional_roots, location_to_optional_roots):
"""Return 2^n distinct MatchQuery objects in a CompoundMatchQuery.
Given a MatchQuery containing `n` optional traverses that expand vertex fields,
construct `2^n` different MatchQuery objects:
one for each possible subset of optional edges that can be followed.
For each edge `e` in a subset of optional edges chosen to be omitted,
discard all traversals following `e`, and add filters specifying that `e` *does not exist*.
Args:
match_query: MatchQuery object containing n `@optional` scopes which expand vertex fields
complex_optional_roots: list of @optional locations (location preceding an @optional
traverse) that expand vertex fields within
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
CompoundMatchQuery object containing 2^n MatchQuery objects,
one for each possible subset of the n optional edges being followed
"""
tree = construct_optional_traversal_tree(
complex_optional_roots, location_to_optional_roots)
rooted_optional_root_location_subsets = tree.get_all_rooted_subtrees_as_lists()
omitted_location_subsets = [
set(complex_optional_roots) - set(subset)
for subset in rooted_optional_root_location_subsets
]
sorted_omitted_location_subsets = sorted(omitted_location_subsets)
compound_match_traversals = []
for omitted_locations in reversed(sorted_omitted_location_subsets):
new_match_traversals = []
for match_traversal in match_query.match_traversals:
location = match_traversal[0].as_block.location
optional_root_locations_stack = location_to_optional_roots.get(location, None)
if optional_root_locations_stack is not None:
optional_root_location = optional_root_locations_stack[-1]
else:
optional_root_location = None
if optional_root_location is None or optional_root_location not in omitted_locations:
new_match_traversal = _prune_traverse_using_omitted_locations(
match_traversal, set(omitted_locations),
complex_optional_roots, location_to_optional_roots)
new_match_traversals.append(new_match_traversal)
else:
# The root_block is within an omitted scope.
# Discard the entire match traversal (do not append to new_match_traversals)
pass
compound_match_traversals.append(new_match_traversals)
match_queries = [
MatchQuery(
match_traversals=match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
for match_traversals in compound_match_traversals
]
return CompoundMatchQuery(match_queries=match_queries) | python | def convert_optional_traversals_to_compound_match_query(
match_query, complex_optional_roots, location_to_optional_roots):
tree = construct_optional_traversal_tree(
complex_optional_roots, location_to_optional_roots)
rooted_optional_root_location_subsets = tree.get_all_rooted_subtrees_as_lists()
omitted_location_subsets = [
set(complex_optional_roots) - set(subset)
for subset in rooted_optional_root_location_subsets
]
sorted_omitted_location_subsets = sorted(omitted_location_subsets)
compound_match_traversals = []
for omitted_locations in reversed(sorted_omitted_location_subsets):
new_match_traversals = []
for match_traversal in match_query.match_traversals:
location = match_traversal[0].as_block.location
optional_root_locations_stack = location_to_optional_roots.get(location, None)
if optional_root_locations_stack is not None:
optional_root_location = optional_root_locations_stack[-1]
else:
optional_root_location = None
if optional_root_location is None or optional_root_location not in omitted_locations:
new_match_traversal = _prune_traverse_using_omitted_locations(
match_traversal, set(omitted_locations),
complex_optional_roots, location_to_optional_roots)
new_match_traversals.append(new_match_traversal)
else:
# The root_block is within an omitted scope.
# Discard the entire match traversal (do not append to new_match_traversals)
pass
compound_match_traversals.append(new_match_traversals)
match_queries = [
MatchQuery(
match_traversals=match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
for match_traversals in compound_match_traversals
]
return CompoundMatchQuery(match_queries=match_queries) | [
"def",
"convert_optional_traversals_to_compound_match_query",
"(",
"match_query",
",",
"complex_optional_roots",
",",
"location_to_optional_roots",
")",
":",
"tree",
"=",
"construct_optional_traversal_tree",
"(",
"complex_optional_roots",
",",
"location_to_optional_roots",
")",
"rooted_optional_root_location_subsets",
"=",
"tree",
".",
"get_all_rooted_subtrees_as_lists",
"(",
")",
"omitted_location_subsets",
"=",
"[",
"set",
"(",
"complex_optional_roots",
")",
"-",
"set",
"(",
"subset",
")",
"for",
"subset",
"in",
"rooted_optional_root_location_subsets",
"]",
"sorted_omitted_location_subsets",
"=",
"sorted",
"(",
"omitted_location_subsets",
")",
"compound_match_traversals",
"=",
"[",
"]",
"for",
"omitted_locations",
"in",
"reversed",
"(",
"sorted_omitted_location_subsets",
")",
":",
"new_match_traversals",
"=",
"[",
"]",
"for",
"match_traversal",
"in",
"match_query",
".",
"match_traversals",
":",
"location",
"=",
"match_traversal",
"[",
"0",
"]",
".",
"as_block",
".",
"location",
"optional_root_locations_stack",
"=",
"location_to_optional_roots",
".",
"get",
"(",
"location",
",",
"None",
")",
"if",
"optional_root_locations_stack",
"is",
"not",
"None",
":",
"optional_root_location",
"=",
"optional_root_locations_stack",
"[",
"-",
"1",
"]",
"else",
":",
"optional_root_location",
"=",
"None",
"if",
"optional_root_location",
"is",
"None",
"or",
"optional_root_location",
"not",
"in",
"omitted_locations",
":",
"new_match_traversal",
"=",
"_prune_traverse_using_omitted_locations",
"(",
"match_traversal",
",",
"set",
"(",
"omitted_locations",
")",
",",
"complex_optional_roots",
",",
"location_to_optional_roots",
")",
"new_match_traversals",
".",
"append",
"(",
"new_match_traversal",
")",
"else",
":",
"# The root_block is within an omitted scope.",
"# Discard the entire match traversal (do not append to new_match_traversals)",
"pass",
"compound_match_traversals",
".",
"append",
"(",
"new_match_traversals",
")",
"match_queries",
"=",
"[",
"MatchQuery",
"(",
"match_traversals",
"=",
"match_traversals",
",",
"folds",
"=",
"match_query",
".",
"folds",
",",
"output_block",
"=",
"match_query",
".",
"output_block",
",",
"where_block",
"=",
"match_query",
".",
"where_block",
",",
")",
"for",
"match_traversals",
"in",
"compound_match_traversals",
"]",
"return",
"CompoundMatchQuery",
"(",
"match_queries",
"=",
"match_queries",
")"
] | Return 2^n distinct MatchQuery objects in a CompoundMatchQuery.
Given a MatchQuery containing `n` optional traverses that expand vertex fields,
construct `2^n` different MatchQuery objects:
one for each possible subset of optional edges that can be followed.
For each edge `e` in a subset of optional edges chosen to be omitted,
discard all traversals following `e`, and add filters specifying that `e` *does not exist*.
Args:
match_query: MatchQuery object containing n `@optional` scopes which expand vertex fields
complex_optional_roots: list of @optional locations (location preceding an @optional
traverse) that expand vertex fields within
location_to_optional_roots: dict mapping from location -> optional_roots where location is
within some number of @optionals and optional_roots is a list
of optional root locations preceding the successive @optional
scopes within which the location resides
Returns:
CompoundMatchQuery object containing 2^n MatchQuery objects,
one for each possible subset of the n optional edges being followed | [
"Return",
"2^n",
"distinct",
"MatchQuery",
"objects",
"in",
"a",
"CompoundMatchQuery",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L85-L151 |
246,902 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _get_present_locations | def _get_present_locations(match_traversals):
"""Return the set of locations and non-optional locations present in the given match traversals.
When enumerating the possibilities for optional traversals,
the resulting match traversals may have sections of the query omitted.
These locations will not be included in the returned `present_locations`.
All of the above locations that are not optional traverse locations
will be included in present_non_optional_locations.
Args:
match_traversals: one possible list of match traversals generated from a query
containing @optional traversal(s)
Returns:
tuple (present_locations, present_non_optional_locations):
- present_locations: set of all locations present in the given match traversals
- present_non_optional_locations: set of all locations present in the match traversals
that are not reached through optional traverses.
Guaranteed to be a subset of present_locations.
"""
present_locations = set()
present_non_optional_locations = set()
for match_traversal in match_traversals:
for step in match_traversal:
if step.as_block is not None:
location_name, _ = step.as_block.location.get_location_name()
present_locations.add(location_name)
if isinstance(step.root_block, Traverse) and not step.root_block.optional:
present_non_optional_locations.add(location_name)
if not present_non_optional_locations.issubset(present_locations):
raise AssertionError(u'present_non_optional_locations {} was not a subset of '
u'present_locations {}. THis hould never happen.'
.format(present_non_optional_locations, present_locations))
return present_locations, present_non_optional_locations | python | def _get_present_locations(match_traversals):
present_locations = set()
present_non_optional_locations = set()
for match_traversal in match_traversals:
for step in match_traversal:
if step.as_block is not None:
location_name, _ = step.as_block.location.get_location_name()
present_locations.add(location_name)
if isinstance(step.root_block, Traverse) and not step.root_block.optional:
present_non_optional_locations.add(location_name)
if not present_non_optional_locations.issubset(present_locations):
raise AssertionError(u'present_non_optional_locations {} was not a subset of '
u'present_locations {}. THis hould never happen.'
.format(present_non_optional_locations, present_locations))
return present_locations, present_non_optional_locations | [
"def",
"_get_present_locations",
"(",
"match_traversals",
")",
":",
"present_locations",
"=",
"set",
"(",
")",
"present_non_optional_locations",
"=",
"set",
"(",
")",
"for",
"match_traversal",
"in",
"match_traversals",
":",
"for",
"step",
"in",
"match_traversal",
":",
"if",
"step",
".",
"as_block",
"is",
"not",
"None",
":",
"location_name",
",",
"_",
"=",
"step",
".",
"as_block",
".",
"location",
".",
"get_location_name",
"(",
")",
"present_locations",
".",
"add",
"(",
"location_name",
")",
"if",
"isinstance",
"(",
"step",
".",
"root_block",
",",
"Traverse",
")",
"and",
"not",
"step",
".",
"root_block",
".",
"optional",
":",
"present_non_optional_locations",
".",
"add",
"(",
"location_name",
")",
"if",
"not",
"present_non_optional_locations",
".",
"issubset",
"(",
"present_locations",
")",
":",
"raise",
"AssertionError",
"(",
"u'present_non_optional_locations {} was not a subset of '",
"u'present_locations {}. THis hould never happen.'",
".",
"format",
"(",
"present_non_optional_locations",
",",
"present_locations",
")",
")",
"return",
"present_locations",
",",
"present_non_optional_locations"
] | Return the set of locations and non-optional locations present in the given match traversals.
When enumerating the possibilities for optional traversals,
the resulting match traversals may have sections of the query omitted.
These locations will not be included in the returned `present_locations`.
All of the above locations that are not optional traverse locations
will be included in present_non_optional_locations.
Args:
match_traversals: one possible list of match traversals generated from a query
containing @optional traversal(s)
Returns:
tuple (present_locations, present_non_optional_locations):
- present_locations: set of all locations present in the given match traversals
- present_non_optional_locations: set of all locations present in the match traversals
that are not reached through optional traverses.
Guaranteed to be a subset of present_locations. | [
"Return",
"the",
"set",
"of",
"locations",
"and",
"non",
"-",
"optional",
"locations",
"present",
"in",
"the",
"given",
"match",
"traversals",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L154-L190 |
246,903 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | prune_non_existent_outputs | def prune_non_existent_outputs(compound_match_query):
"""Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery.
Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks,
For each of these, remove the outputs (that have been implicitly pruned away) from each
corresponding ConstructResult block.
Args:
compound_match_query: CompoundMatchQuery object containing 2^n pruned MatchQuery objects
(see convert_optional_traversals_to_compound_match_query)
Returns:
CompoundMatchQuery with pruned ConstructResult blocks for each of the 2^n MatchQuery objects
"""
if len(compound_match_query.match_queries) == 1:
return compound_match_query
elif len(compound_match_query.match_queries) == 0:
raise AssertionError(u'Received CompoundMatchQuery with '
u'an empty list of MatchQuery objects.')
else:
match_queries = []
for match_query in compound_match_query.match_queries:
match_traversals = match_query.match_traversals
output_block = match_query.output_block
present_locations_tuple = _get_present_locations(match_traversals)
present_locations, present_non_optional_locations = present_locations_tuple
new_output_fields = {}
for output_name, expression in six.iteritems(output_block.fields):
if isinstance(expression, OutputContextField):
# An OutputContextField as an output Expression indicates that we are not
# within an @optional scope. Therefore, the location this output uses must
# be in present_locations, and the output is never pruned.
location_name, _ = expression.location.get_location_name()
if location_name not in present_locations:
raise AssertionError(u'Non-optional output location {} was not found in '
u'present_locations: {}'
.format(expression.location, present_locations))
new_output_fields[output_name] = expression
elif isinstance(expression, FoldedContextField):
# A FoldedContextField as an output Expression indicates that we are not
# within an @optional scope. Therefore, the location this output uses must
# be in present_locations, and the output is never pruned.
base_location = expression.fold_scope_location.base_location
location_name, _ = base_location.get_location_name()
if location_name not in present_locations:
raise AssertionError(u'Folded output location {} was found in '
u'present_locations: {}'
.format(base_location, present_locations))
new_output_fields[output_name] = expression
elif isinstance(expression, TernaryConditional):
# A TernaryConditional indicates that this output is within some optional scope.
# This may be pruned away based on the contents of present_locations.
location_name, _ = expression.if_true.location.get_location_name()
if location_name in present_locations:
if location_name in present_non_optional_locations:
new_output_fields[output_name] = expression.if_true
else:
new_output_fields[output_name] = expression
else:
raise AssertionError(u'Invalid expression of type {} in output block: '
u'{}'.format(type(expression).__name__, output_block))
match_queries.append(
MatchQuery(
match_traversals=match_traversals,
folds=match_query.folds,
output_block=ConstructResult(new_output_fields),
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=match_queries) | python | def prune_non_existent_outputs(compound_match_query):
if len(compound_match_query.match_queries) == 1:
return compound_match_query
elif len(compound_match_query.match_queries) == 0:
raise AssertionError(u'Received CompoundMatchQuery with '
u'an empty list of MatchQuery objects.')
else:
match_queries = []
for match_query in compound_match_query.match_queries:
match_traversals = match_query.match_traversals
output_block = match_query.output_block
present_locations_tuple = _get_present_locations(match_traversals)
present_locations, present_non_optional_locations = present_locations_tuple
new_output_fields = {}
for output_name, expression in six.iteritems(output_block.fields):
if isinstance(expression, OutputContextField):
# An OutputContextField as an output Expression indicates that we are not
# within an @optional scope. Therefore, the location this output uses must
# be in present_locations, and the output is never pruned.
location_name, _ = expression.location.get_location_name()
if location_name not in present_locations:
raise AssertionError(u'Non-optional output location {} was not found in '
u'present_locations: {}'
.format(expression.location, present_locations))
new_output_fields[output_name] = expression
elif isinstance(expression, FoldedContextField):
# A FoldedContextField as an output Expression indicates that we are not
# within an @optional scope. Therefore, the location this output uses must
# be in present_locations, and the output is never pruned.
base_location = expression.fold_scope_location.base_location
location_name, _ = base_location.get_location_name()
if location_name not in present_locations:
raise AssertionError(u'Folded output location {} was found in '
u'present_locations: {}'
.format(base_location, present_locations))
new_output_fields[output_name] = expression
elif isinstance(expression, TernaryConditional):
# A TernaryConditional indicates that this output is within some optional scope.
# This may be pruned away based on the contents of present_locations.
location_name, _ = expression.if_true.location.get_location_name()
if location_name in present_locations:
if location_name in present_non_optional_locations:
new_output_fields[output_name] = expression.if_true
else:
new_output_fields[output_name] = expression
else:
raise AssertionError(u'Invalid expression of type {} in output block: '
u'{}'.format(type(expression).__name__, output_block))
match_queries.append(
MatchQuery(
match_traversals=match_traversals,
folds=match_query.folds,
output_block=ConstructResult(new_output_fields),
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=match_queries) | [
"def",
"prune_non_existent_outputs",
"(",
"compound_match_query",
")",
":",
"if",
"len",
"(",
"compound_match_query",
".",
"match_queries",
")",
"==",
"1",
":",
"return",
"compound_match_query",
"elif",
"len",
"(",
"compound_match_query",
".",
"match_queries",
")",
"==",
"0",
":",
"raise",
"AssertionError",
"(",
"u'Received CompoundMatchQuery with '",
"u'an empty list of MatchQuery objects.'",
")",
"else",
":",
"match_queries",
"=",
"[",
"]",
"for",
"match_query",
"in",
"compound_match_query",
".",
"match_queries",
":",
"match_traversals",
"=",
"match_query",
".",
"match_traversals",
"output_block",
"=",
"match_query",
".",
"output_block",
"present_locations_tuple",
"=",
"_get_present_locations",
"(",
"match_traversals",
")",
"present_locations",
",",
"present_non_optional_locations",
"=",
"present_locations_tuple",
"new_output_fields",
"=",
"{",
"}",
"for",
"output_name",
",",
"expression",
"in",
"six",
".",
"iteritems",
"(",
"output_block",
".",
"fields",
")",
":",
"if",
"isinstance",
"(",
"expression",
",",
"OutputContextField",
")",
":",
"# An OutputContextField as an output Expression indicates that we are not",
"# within an @optional scope. Therefore, the location this output uses must",
"# be in present_locations, and the output is never pruned.",
"location_name",
",",
"_",
"=",
"expression",
".",
"location",
".",
"get_location_name",
"(",
")",
"if",
"location_name",
"not",
"in",
"present_locations",
":",
"raise",
"AssertionError",
"(",
"u'Non-optional output location {} was not found in '",
"u'present_locations: {}'",
".",
"format",
"(",
"expression",
".",
"location",
",",
"present_locations",
")",
")",
"new_output_fields",
"[",
"output_name",
"]",
"=",
"expression",
"elif",
"isinstance",
"(",
"expression",
",",
"FoldedContextField",
")",
":",
"# A FoldedContextField as an output Expression indicates that we are not",
"# within an @optional scope. Therefore, the location this output uses must",
"# be in present_locations, and the output is never pruned.",
"base_location",
"=",
"expression",
".",
"fold_scope_location",
".",
"base_location",
"location_name",
",",
"_",
"=",
"base_location",
".",
"get_location_name",
"(",
")",
"if",
"location_name",
"not",
"in",
"present_locations",
":",
"raise",
"AssertionError",
"(",
"u'Folded output location {} was found in '",
"u'present_locations: {}'",
".",
"format",
"(",
"base_location",
",",
"present_locations",
")",
")",
"new_output_fields",
"[",
"output_name",
"]",
"=",
"expression",
"elif",
"isinstance",
"(",
"expression",
",",
"TernaryConditional",
")",
":",
"# A TernaryConditional indicates that this output is within some optional scope.",
"# This may be pruned away based on the contents of present_locations.",
"location_name",
",",
"_",
"=",
"expression",
".",
"if_true",
".",
"location",
".",
"get_location_name",
"(",
")",
"if",
"location_name",
"in",
"present_locations",
":",
"if",
"location_name",
"in",
"present_non_optional_locations",
":",
"new_output_fields",
"[",
"output_name",
"]",
"=",
"expression",
".",
"if_true",
"else",
":",
"new_output_fields",
"[",
"output_name",
"]",
"=",
"expression",
"else",
":",
"raise",
"AssertionError",
"(",
"u'Invalid expression of type {} in output block: '",
"u'{}'",
".",
"format",
"(",
"type",
"(",
"expression",
")",
".",
"__name__",
",",
"output_block",
")",
")",
"match_queries",
".",
"append",
"(",
"MatchQuery",
"(",
"match_traversals",
"=",
"match_traversals",
",",
"folds",
"=",
"match_query",
".",
"folds",
",",
"output_block",
"=",
"ConstructResult",
"(",
"new_output_fields",
")",
",",
"where_block",
"=",
"match_query",
".",
"where_block",
",",
")",
")",
"return",
"CompoundMatchQuery",
"(",
"match_queries",
"=",
"match_queries",
")"
] | Remove non-existent outputs from each MatchQuery in the given CompoundMatchQuery.
Each of the 2^n MatchQuery objects (except one) has been pruned to exclude some Traverse blocks,
For each of these, remove the outputs (that have been implicitly pruned away) from each
corresponding ConstructResult block.
Args:
compound_match_query: CompoundMatchQuery object containing 2^n pruned MatchQuery objects
(see convert_optional_traversals_to_compound_match_query)
Returns:
CompoundMatchQuery with pruned ConstructResult blocks for each of the 2^n MatchQuery objects | [
"Remove",
"non",
"-",
"existent",
"outputs",
"from",
"each",
"MatchQuery",
"in",
"the",
"given",
"CompoundMatchQuery",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L193-L266 |
246,904 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _construct_location_to_filter_list | def _construct_location_to_filter_list(match_query):
"""Return a dict mapping location -> list of filters applied at that location.
Args:
match_query: MatchQuery object from which to extract location -> filters dict
Returns:
dict mapping each location in match_query to a list of
Filter objects applied at that location
"""
# For each location, all filters for that location should be applied at the first instance.
# This function collects a list of all filters corresponding to each location
# present in the given MatchQuery.
location_to_filters = {}
for match_traversal in match_query.match_traversals:
for match_step in match_traversal:
current_filter = match_step.where_block
if current_filter is not None:
current_location = match_step.as_block.location
location_to_filters.setdefault(current_location, []).append(
current_filter)
return location_to_filters | python | def _construct_location_to_filter_list(match_query):
# For each location, all filters for that location should be applied at the first instance.
# This function collects a list of all filters corresponding to each location
# present in the given MatchQuery.
location_to_filters = {}
for match_traversal in match_query.match_traversals:
for match_step in match_traversal:
current_filter = match_step.where_block
if current_filter is not None:
current_location = match_step.as_block.location
location_to_filters.setdefault(current_location, []).append(
current_filter)
return location_to_filters | [
"def",
"_construct_location_to_filter_list",
"(",
"match_query",
")",
":",
"# For each location, all filters for that location should be applied at the first instance.",
"# This function collects a list of all filters corresponding to each location",
"# present in the given MatchQuery.",
"location_to_filters",
"=",
"{",
"}",
"for",
"match_traversal",
"in",
"match_query",
".",
"match_traversals",
":",
"for",
"match_step",
"in",
"match_traversal",
":",
"current_filter",
"=",
"match_step",
".",
"where_block",
"if",
"current_filter",
"is",
"not",
"None",
":",
"current_location",
"=",
"match_step",
".",
"as_block",
".",
"location",
"location_to_filters",
".",
"setdefault",
"(",
"current_location",
",",
"[",
"]",
")",
".",
"append",
"(",
"current_filter",
")",
"return",
"location_to_filters"
] | Return a dict mapping location -> list of filters applied at that location.
Args:
match_query: MatchQuery object from which to extract location -> filters dict
Returns:
dict mapping each location in match_query to a list of
Filter objects applied at that location | [
"Return",
"a",
"dict",
"mapping",
"location",
"-",
">",
"list",
"of",
"filters",
"applied",
"at",
"that",
"location",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L269-L291 |
246,905 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _filter_list_to_conjunction_expression | def _filter_list_to_conjunction_expression(filter_list):
"""Convert a list of filters to an Expression that is the conjunction of all of them."""
if not isinstance(filter_list, list):
raise AssertionError(u'Expected `list`, Received: {}.'.format(filter_list))
if any((not isinstance(filter_block, Filter) for filter_block in filter_list)):
raise AssertionError(u'Expected list of Filter objects. Received: {}'.format(filter_list))
expression_list = [filter_block.predicate for filter_block in filter_list]
return expression_list_to_conjunction(expression_list) | python | def _filter_list_to_conjunction_expression(filter_list):
if not isinstance(filter_list, list):
raise AssertionError(u'Expected `list`, Received: {}.'.format(filter_list))
if any((not isinstance(filter_block, Filter) for filter_block in filter_list)):
raise AssertionError(u'Expected list of Filter objects. Received: {}'.format(filter_list))
expression_list = [filter_block.predicate for filter_block in filter_list]
return expression_list_to_conjunction(expression_list) | [
"def",
"_filter_list_to_conjunction_expression",
"(",
"filter_list",
")",
":",
"if",
"not",
"isinstance",
"(",
"filter_list",
",",
"list",
")",
":",
"raise",
"AssertionError",
"(",
"u'Expected `list`, Received: {}.'",
".",
"format",
"(",
"filter_list",
")",
")",
"if",
"any",
"(",
"(",
"not",
"isinstance",
"(",
"filter_block",
",",
"Filter",
")",
"for",
"filter_block",
"in",
"filter_list",
")",
")",
":",
"raise",
"AssertionError",
"(",
"u'Expected list of Filter objects. Received: {}'",
".",
"format",
"(",
"filter_list",
")",
")",
"expression_list",
"=",
"[",
"filter_block",
".",
"predicate",
"for",
"filter_block",
"in",
"filter_list",
"]",
"return",
"expression_list_to_conjunction",
"(",
"expression_list",
")"
] | Convert a list of filters to an Expression that is the conjunction of all of them. | [
"Convert",
"a",
"list",
"of",
"filters",
"to",
"an",
"Expression",
"that",
"is",
"the",
"conjunction",
"of",
"all",
"of",
"them",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L294-L302 |
246,906 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _apply_filters_to_first_location_occurrence | def _apply_filters_to_first_location_occurrence(match_traversal, location_to_filters,
already_filtered_locations):
"""Apply all filters for a specific location into its first occurrence in a given traversal.
For each location in the given match traversal,
construct a conjunction of all filters applied to that location,
and apply the resulting Filter to the first instance of the location.
Args:
match_traversal: list of MatchStep objects to be lowered
location_to_filters: dict mapping each location in the MatchQuery which contains
the given match traversal to a list of filters applied at that location
already_filtered_locations: set of locations that have already had their filters applied
Returns:
new list of MatchStep objects with all filters for any given location composed into
a single filter which is applied to the first instance of that location
"""
new_match_traversal = []
newly_filtered_locations = set()
for match_step in match_traversal:
# Apply all filters for a location to the first occurence of that location
current_location = match_step.as_block.location
if current_location in newly_filtered_locations:
raise AssertionError(u'The same location {} was encountered twice in a single '
u'match traversal: {}. This should never happen.'
.format(current_location, match_traversal))
if all((current_location in location_to_filters,
current_location not in already_filtered_locations)):
where_block = Filter(
_filter_list_to_conjunction_expression(
location_to_filters[current_location]
)
)
# No further filters needed for this location. If the same location is found in
# another call to this function, no filters will be added.
newly_filtered_locations.add(current_location)
else:
where_block = None
new_match_step = MatchStep(
root_block=match_step.root_block,
coerce_type_block=match_step.coerce_type_block,
where_block=where_block,
as_block=match_step.as_block
)
new_match_traversal.append(new_match_step)
return new_match_traversal, newly_filtered_locations | python | def _apply_filters_to_first_location_occurrence(match_traversal, location_to_filters,
already_filtered_locations):
new_match_traversal = []
newly_filtered_locations = set()
for match_step in match_traversal:
# Apply all filters for a location to the first occurence of that location
current_location = match_step.as_block.location
if current_location in newly_filtered_locations:
raise AssertionError(u'The same location {} was encountered twice in a single '
u'match traversal: {}. This should never happen.'
.format(current_location, match_traversal))
if all((current_location in location_to_filters,
current_location not in already_filtered_locations)):
where_block = Filter(
_filter_list_to_conjunction_expression(
location_to_filters[current_location]
)
)
# No further filters needed for this location. If the same location is found in
# another call to this function, no filters will be added.
newly_filtered_locations.add(current_location)
else:
where_block = None
new_match_step = MatchStep(
root_block=match_step.root_block,
coerce_type_block=match_step.coerce_type_block,
where_block=where_block,
as_block=match_step.as_block
)
new_match_traversal.append(new_match_step)
return new_match_traversal, newly_filtered_locations | [
"def",
"_apply_filters_to_first_location_occurrence",
"(",
"match_traversal",
",",
"location_to_filters",
",",
"already_filtered_locations",
")",
":",
"new_match_traversal",
"=",
"[",
"]",
"newly_filtered_locations",
"=",
"set",
"(",
")",
"for",
"match_step",
"in",
"match_traversal",
":",
"# Apply all filters for a location to the first occurence of that location",
"current_location",
"=",
"match_step",
".",
"as_block",
".",
"location",
"if",
"current_location",
"in",
"newly_filtered_locations",
":",
"raise",
"AssertionError",
"(",
"u'The same location {} was encountered twice in a single '",
"u'match traversal: {}. This should never happen.'",
".",
"format",
"(",
"current_location",
",",
"match_traversal",
")",
")",
"if",
"all",
"(",
"(",
"current_location",
"in",
"location_to_filters",
",",
"current_location",
"not",
"in",
"already_filtered_locations",
")",
")",
":",
"where_block",
"=",
"Filter",
"(",
"_filter_list_to_conjunction_expression",
"(",
"location_to_filters",
"[",
"current_location",
"]",
")",
")",
"# No further filters needed for this location. If the same location is found in",
"# another call to this function, no filters will be added.",
"newly_filtered_locations",
".",
"add",
"(",
"current_location",
")",
"else",
":",
"where_block",
"=",
"None",
"new_match_step",
"=",
"MatchStep",
"(",
"root_block",
"=",
"match_step",
".",
"root_block",
",",
"coerce_type_block",
"=",
"match_step",
".",
"coerce_type_block",
",",
"where_block",
"=",
"where_block",
",",
"as_block",
"=",
"match_step",
".",
"as_block",
")",
"new_match_traversal",
".",
"append",
"(",
"new_match_step",
")",
"return",
"new_match_traversal",
",",
"newly_filtered_locations"
] | Apply all filters for a specific location into its first occurrence in a given traversal.
For each location in the given match traversal,
construct a conjunction of all filters applied to that location,
and apply the resulting Filter to the first instance of the location.
Args:
match_traversal: list of MatchStep objects to be lowered
location_to_filters: dict mapping each location in the MatchQuery which contains
the given match traversal to a list of filters applied at that location
already_filtered_locations: set of locations that have already had their filters applied
Returns:
new list of MatchStep objects with all filters for any given location composed into
a single filter which is applied to the first instance of that location | [
"Apply",
"all",
"filters",
"for",
"a",
"specific",
"location",
"into",
"its",
"first",
"occurrence",
"in",
"a",
"given",
"traversal",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L305-L355 |
246,907 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | collect_filters_to_first_location_occurrence | def collect_filters_to_first_location_occurrence(compound_match_query):
"""Collect all filters for a particular location to the first instance of the location.
Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may
result in filters being applied to locations after their first occurence.
OrientDB does not resolve this behavior correctly. Therefore, for each MatchQuery,
we collect all the filters for each location in a list. For each location,
we make a conjunction of the filter list (`_predicate_list_to_where_block`) and apply
the new filter to only the first instance of that location.
All other instances will have no filters (None).
Args:
compound_match_query: CompoundMatchQuery object containing 2^n MatchQuery objects
Returns:
CompoundMatchQuery with all filters for each location applied to the first instance
of that location.
"""
new_match_queries = []
# Each MatchQuery has a different set of locations, and associated Filters.
# Hence, each of them is processed independently.
for match_query in compound_match_query.match_queries:
# Construct mapping from location -> list of filter predicates applied at that location
location_to_filters = _construct_location_to_filter_list(match_query)
already_filtered_locations = set()
new_match_traversals = []
for match_traversal in match_query.match_traversals:
result = _apply_filters_to_first_location_occurrence(
match_traversal, location_to_filters, already_filtered_locations)
new_match_traversal, newly_filtered_locations = result
new_match_traversals.append(new_match_traversal)
already_filtered_locations.update(newly_filtered_locations)
new_match_queries.append(
MatchQuery(
match_traversals=new_match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=new_match_queries) | python | def collect_filters_to_first_location_occurrence(compound_match_query):
new_match_queries = []
# Each MatchQuery has a different set of locations, and associated Filters.
# Hence, each of them is processed independently.
for match_query in compound_match_query.match_queries:
# Construct mapping from location -> list of filter predicates applied at that location
location_to_filters = _construct_location_to_filter_list(match_query)
already_filtered_locations = set()
new_match_traversals = []
for match_traversal in match_query.match_traversals:
result = _apply_filters_to_first_location_occurrence(
match_traversal, location_to_filters, already_filtered_locations)
new_match_traversal, newly_filtered_locations = result
new_match_traversals.append(new_match_traversal)
already_filtered_locations.update(newly_filtered_locations)
new_match_queries.append(
MatchQuery(
match_traversals=new_match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=new_match_queries) | [
"def",
"collect_filters_to_first_location_occurrence",
"(",
"compound_match_query",
")",
":",
"new_match_queries",
"=",
"[",
"]",
"# Each MatchQuery has a different set of locations, and associated Filters.",
"# Hence, each of them is processed independently.",
"for",
"match_query",
"in",
"compound_match_query",
".",
"match_queries",
":",
"# Construct mapping from location -> list of filter predicates applied at that location",
"location_to_filters",
"=",
"_construct_location_to_filter_list",
"(",
"match_query",
")",
"already_filtered_locations",
"=",
"set",
"(",
")",
"new_match_traversals",
"=",
"[",
"]",
"for",
"match_traversal",
"in",
"match_query",
".",
"match_traversals",
":",
"result",
"=",
"_apply_filters_to_first_location_occurrence",
"(",
"match_traversal",
",",
"location_to_filters",
",",
"already_filtered_locations",
")",
"new_match_traversal",
",",
"newly_filtered_locations",
"=",
"result",
"new_match_traversals",
".",
"append",
"(",
"new_match_traversal",
")",
"already_filtered_locations",
".",
"update",
"(",
"newly_filtered_locations",
")",
"new_match_queries",
".",
"append",
"(",
"MatchQuery",
"(",
"match_traversals",
"=",
"new_match_traversals",
",",
"folds",
"=",
"match_query",
".",
"folds",
",",
"output_block",
"=",
"match_query",
".",
"output_block",
",",
"where_block",
"=",
"match_query",
".",
"where_block",
",",
")",
")",
"return",
"CompoundMatchQuery",
"(",
"match_queries",
"=",
"new_match_queries",
")"
] | Collect all filters for a particular location to the first instance of the location.
Adding edge field non-exsistence filters in `_prune_traverse_using_omitted_locations` may
result in filters being applied to locations after their first occurence.
OrientDB does not resolve this behavior correctly. Therefore, for each MatchQuery,
we collect all the filters for each location in a list. For each location,
we make a conjunction of the filter list (`_predicate_list_to_where_block`) and apply
the new filter to only the first instance of that location.
All other instances will have no filters (None).
Args:
compound_match_query: CompoundMatchQuery object containing 2^n MatchQuery objects
Returns:
CompoundMatchQuery with all filters for each location applied to the first instance
of that location. | [
"Collect",
"all",
"filters",
"for",
"a",
"particular",
"location",
"to",
"the",
"first",
"instance",
"of",
"the",
"location",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L358-L402 |
246,908 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _update_context_field_binary_composition | def _update_context_field_binary_composition(present_locations, expression):
"""Lower BinaryCompositions involving non-existent ContextFields to True.
Args:
present_locations: set of all locations in the current MatchQuery that have not been pruned
expression: BinaryComposition with at least one ContextField operand
Returns:
TrueLiteral iff either ContextField operand is not in `present_locations`,
and the original expression otherwise
"""
if not any((isinstance(expression.left, ContextField),
isinstance(expression.right, ContextField))):
raise AssertionError(u'Received a BinaryComposition {} without any ContextField '
u'operands. This should never happen.'.format(expression))
if isinstance(expression.left, ContextField):
context_field = expression.left
location_name, _ = context_field.location.get_location_name()
if location_name not in present_locations:
return TrueLiteral
if isinstance(expression.right, ContextField):
context_field = expression.right
location_name, _ = context_field.location.get_location_name()
if location_name not in present_locations:
return TrueLiteral
return expression | python | def _update_context_field_binary_composition(present_locations, expression):
if not any((isinstance(expression.left, ContextField),
isinstance(expression.right, ContextField))):
raise AssertionError(u'Received a BinaryComposition {} without any ContextField '
u'operands. This should never happen.'.format(expression))
if isinstance(expression.left, ContextField):
context_field = expression.left
location_name, _ = context_field.location.get_location_name()
if location_name not in present_locations:
return TrueLiteral
if isinstance(expression.right, ContextField):
context_field = expression.right
location_name, _ = context_field.location.get_location_name()
if location_name not in present_locations:
return TrueLiteral
return expression | [
"def",
"_update_context_field_binary_composition",
"(",
"present_locations",
",",
"expression",
")",
":",
"if",
"not",
"any",
"(",
"(",
"isinstance",
"(",
"expression",
".",
"left",
",",
"ContextField",
")",
",",
"isinstance",
"(",
"expression",
".",
"right",
",",
"ContextField",
")",
")",
")",
":",
"raise",
"AssertionError",
"(",
"u'Received a BinaryComposition {} without any ContextField '",
"u'operands. This should never happen.'",
".",
"format",
"(",
"expression",
")",
")",
"if",
"isinstance",
"(",
"expression",
".",
"left",
",",
"ContextField",
")",
":",
"context_field",
"=",
"expression",
".",
"left",
"location_name",
",",
"_",
"=",
"context_field",
".",
"location",
".",
"get_location_name",
"(",
")",
"if",
"location_name",
"not",
"in",
"present_locations",
":",
"return",
"TrueLiteral",
"if",
"isinstance",
"(",
"expression",
".",
"right",
",",
"ContextField",
")",
":",
"context_field",
"=",
"expression",
".",
"right",
"location_name",
",",
"_",
"=",
"context_field",
".",
"location",
".",
"get_location_name",
"(",
")",
"if",
"location_name",
"not",
"in",
"present_locations",
":",
"return",
"TrueLiteral",
"return",
"expression"
] | Lower BinaryCompositions involving non-existent ContextFields to True.
Args:
present_locations: set of all locations in the current MatchQuery that have not been pruned
expression: BinaryComposition with at least one ContextField operand
Returns:
TrueLiteral iff either ContextField operand is not in `present_locations`,
and the original expression otherwise | [
"Lower",
"BinaryCompositions",
"involving",
"non",
"-",
"existent",
"ContextFields",
"to",
"True",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L405-L433 |
246,909 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _simplify_non_context_field_binary_composition | def _simplify_non_context_field_binary_composition(expression):
"""Return a simplified BinaryComposition if either operand is a TrueLiteral.
Args:
expression: BinaryComposition without any ContextField operand(s)
Returns:
simplified expression if the given expression is a disjunction/conjunction
and one of it's operands is a TrueLiteral,
and the original expression otherwise
"""
if any((isinstance(expression.left, ContextField),
isinstance(expression.right, ContextField))):
raise AssertionError(u'Received a BinaryComposition {} with a ContextField '
u'operand. This should never happen.'.format(expression))
if expression.operator == u'||':
if expression.left == TrueLiteral or expression.right == TrueLiteral:
return TrueLiteral
else:
return expression
elif expression.operator == u'&&':
if expression.left == TrueLiteral:
return expression.right
if expression.right == TrueLiteral:
return expression.left
else:
return expression
else:
return expression | python | def _simplify_non_context_field_binary_composition(expression):
if any((isinstance(expression.left, ContextField),
isinstance(expression.right, ContextField))):
raise AssertionError(u'Received a BinaryComposition {} with a ContextField '
u'operand. This should never happen.'.format(expression))
if expression.operator == u'||':
if expression.left == TrueLiteral or expression.right == TrueLiteral:
return TrueLiteral
else:
return expression
elif expression.operator == u'&&':
if expression.left == TrueLiteral:
return expression.right
if expression.right == TrueLiteral:
return expression.left
else:
return expression
else:
return expression | [
"def",
"_simplify_non_context_field_binary_composition",
"(",
"expression",
")",
":",
"if",
"any",
"(",
"(",
"isinstance",
"(",
"expression",
".",
"left",
",",
"ContextField",
")",
",",
"isinstance",
"(",
"expression",
".",
"right",
",",
"ContextField",
")",
")",
")",
":",
"raise",
"AssertionError",
"(",
"u'Received a BinaryComposition {} with a ContextField '",
"u'operand. This should never happen.'",
".",
"format",
"(",
"expression",
")",
")",
"if",
"expression",
".",
"operator",
"==",
"u'||'",
":",
"if",
"expression",
".",
"left",
"==",
"TrueLiteral",
"or",
"expression",
".",
"right",
"==",
"TrueLiteral",
":",
"return",
"TrueLiteral",
"else",
":",
"return",
"expression",
"elif",
"expression",
".",
"operator",
"==",
"u'&&'",
":",
"if",
"expression",
".",
"left",
"==",
"TrueLiteral",
":",
"return",
"expression",
".",
"right",
"if",
"expression",
".",
"right",
"==",
"TrueLiteral",
":",
"return",
"expression",
".",
"left",
"else",
":",
"return",
"expression",
"else",
":",
"return",
"expression"
] | Return a simplified BinaryComposition if either operand is a TrueLiteral.
Args:
expression: BinaryComposition without any ContextField operand(s)
Returns:
simplified expression if the given expression is a disjunction/conjunction
and one of it's operands is a TrueLiteral,
and the original expression otherwise | [
"Return",
"a",
"simplified",
"BinaryComposition",
"if",
"either",
"operand",
"is",
"a",
"TrueLiteral",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L436-L465 |
246,910 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _update_context_field_expression | def _update_context_field_expression(present_locations, expression):
"""Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result."""
no_op_blocks = (ContextField, Literal, LocalField, UnaryTransformation, Variable)
if isinstance(expression, BinaryComposition):
if isinstance(expression.left, ContextField) or isinstance(expression.right, ContextField):
return _update_context_field_binary_composition(present_locations, expression)
else:
return _simplify_non_context_field_binary_composition(expression)
elif isinstance(expression, TernaryConditional):
return _simplify_ternary_conditional(expression)
elif isinstance(expression, BetweenClause):
lower_bound = expression.lower_bound
upper_bound = expression.upper_bound
if isinstance(lower_bound, ContextField) or isinstance(upper_bound, ContextField):
raise AssertionError(u'Found BetweenClause with ContextFields as lower/upper bounds. '
u'This should never happen: {}'.format(expression))
return expression
elif isinstance(expression, (OutputContextField, FoldedContextField)):
raise AssertionError(u'Found unexpected expression of type {}. This should never happen: '
u'{}'.format(type(expression).__name__, expression))
elif isinstance(expression, no_op_blocks):
return expression
raise AssertionError(u'Found unhandled expression of type {}. This should never happen: '
u'{}'.format(type(expression).__name__, expression)) | python | def _update_context_field_expression(present_locations, expression):
no_op_blocks = (ContextField, Literal, LocalField, UnaryTransformation, Variable)
if isinstance(expression, BinaryComposition):
if isinstance(expression.left, ContextField) or isinstance(expression.right, ContextField):
return _update_context_field_binary_composition(present_locations, expression)
else:
return _simplify_non_context_field_binary_composition(expression)
elif isinstance(expression, TernaryConditional):
return _simplify_ternary_conditional(expression)
elif isinstance(expression, BetweenClause):
lower_bound = expression.lower_bound
upper_bound = expression.upper_bound
if isinstance(lower_bound, ContextField) or isinstance(upper_bound, ContextField):
raise AssertionError(u'Found BetweenClause with ContextFields as lower/upper bounds. '
u'This should never happen: {}'.format(expression))
return expression
elif isinstance(expression, (OutputContextField, FoldedContextField)):
raise AssertionError(u'Found unexpected expression of type {}. This should never happen: '
u'{}'.format(type(expression).__name__, expression))
elif isinstance(expression, no_op_blocks):
return expression
raise AssertionError(u'Found unhandled expression of type {}. This should never happen: '
u'{}'.format(type(expression).__name__, expression)) | [
"def",
"_update_context_field_expression",
"(",
"present_locations",
",",
"expression",
")",
":",
"no_op_blocks",
"=",
"(",
"ContextField",
",",
"Literal",
",",
"LocalField",
",",
"UnaryTransformation",
",",
"Variable",
")",
"if",
"isinstance",
"(",
"expression",
",",
"BinaryComposition",
")",
":",
"if",
"isinstance",
"(",
"expression",
".",
"left",
",",
"ContextField",
")",
"or",
"isinstance",
"(",
"expression",
".",
"right",
",",
"ContextField",
")",
":",
"return",
"_update_context_field_binary_composition",
"(",
"present_locations",
",",
"expression",
")",
"else",
":",
"return",
"_simplify_non_context_field_binary_composition",
"(",
"expression",
")",
"elif",
"isinstance",
"(",
"expression",
",",
"TernaryConditional",
")",
":",
"return",
"_simplify_ternary_conditional",
"(",
"expression",
")",
"elif",
"isinstance",
"(",
"expression",
",",
"BetweenClause",
")",
":",
"lower_bound",
"=",
"expression",
".",
"lower_bound",
"upper_bound",
"=",
"expression",
".",
"upper_bound",
"if",
"isinstance",
"(",
"lower_bound",
",",
"ContextField",
")",
"or",
"isinstance",
"(",
"upper_bound",
",",
"ContextField",
")",
":",
"raise",
"AssertionError",
"(",
"u'Found BetweenClause with ContextFields as lower/upper bounds. '",
"u'This should never happen: {}'",
".",
"format",
"(",
"expression",
")",
")",
"return",
"expression",
"elif",
"isinstance",
"(",
"expression",
",",
"(",
"OutputContextField",
",",
"FoldedContextField",
")",
")",
":",
"raise",
"AssertionError",
"(",
"u'Found unexpected expression of type {}. This should never happen: '",
"u'{}'",
".",
"format",
"(",
"type",
"(",
"expression",
")",
".",
"__name__",
",",
"expression",
")",
")",
"elif",
"isinstance",
"(",
"expression",
",",
"no_op_blocks",
")",
":",
"return",
"expression",
"raise",
"AssertionError",
"(",
"u'Found unhandled expression of type {}. This should never happen: '",
"u'{}'",
".",
"format",
"(",
"type",
"(",
"expression",
")",
".",
"__name__",
",",
"expression",
")",
")"
] | Lower Expressions involving non-existent ContextFields to TrueLiteral and simplify result. | [
"Lower",
"Expressions",
"involving",
"non",
"-",
"existent",
"ContextFields",
"to",
"TrueLiteral",
"and",
"simplify",
"result",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L484-L508 |
246,911 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | _lower_non_existent_context_field_filters | def _lower_non_existent_context_field_filters(match_traversals, visitor_fn):
"""Return new match traversals, lowering filters involving non-existent ContextFields.
Expressions involving non-existent ContextFields are evaluated to TrueLiteral.
BinaryCompositions, where one of the operands is lowered to a TrueLiteral,
are lowered appropriately based on the present operator (u'||' and u'&&' are affected).
TernaryConditionals, where the predicate is lowered to a TrueLiteral,
are replaced by their if_true predicate.
The `visitor_fn` implements these behaviors (see `_update_context_field_expression`).
Args:
match_traversals: list of match traversal enitities to be lowered
visitor_fn: visit_and_update function for lowering expressions in given match traversal
Returns:
new list of match_traversals, with all filter expressions lowered
"""
new_match_traversals = []
for match_traversal in match_traversals:
new_match_traversal = []
for step in match_traversal:
if step.where_block is not None:
new_filter = step.where_block.visit_and_update_expressions(visitor_fn)
if new_filter.predicate == TrueLiteral:
new_filter = None
new_step = step._replace(where_block=new_filter)
else:
new_step = step
new_match_traversal.append(new_step)
new_match_traversals.append(new_match_traversal)
return new_match_traversals | python | def _lower_non_existent_context_field_filters(match_traversals, visitor_fn):
new_match_traversals = []
for match_traversal in match_traversals:
new_match_traversal = []
for step in match_traversal:
if step.where_block is not None:
new_filter = step.where_block.visit_and_update_expressions(visitor_fn)
if new_filter.predicate == TrueLiteral:
new_filter = None
new_step = step._replace(where_block=new_filter)
else:
new_step = step
new_match_traversal.append(new_step)
new_match_traversals.append(new_match_traversal)
return new_match_traversals | [
"def",
"_lower_non_existent_context_field_filters",
"(",
"match_traversals",
",",
"visitor_fn",
")",
":",
"new_match_traversals",
"=",
"[",
"]",
"for",
"match_traversal",
"in",
"match_traversals",
":",
"new_match_traversal",
"=",
"[",
"]",
"for",
"step",
"in",
"match_traversal",
":",
"if",
"step",
".",
"where_block",
"is",
"not",
"None",
":",
"new_filter",
"=",
"step",
".",
"where_block",
".",
"visit_and_update_expressions",
"(",
"visitor_fn",
")",
"if",
"new_filter",
".",
"predicate",
"==",
"TrueLiteral",
":",
"new_filter",
"=",
"None",
"new_step",
"=",
"step",
".",
"_replace",
"(",
"where_block",
"=",
"new_filter",
")",
"else",
":",
"new_step",
"=",
"step",
"new_match_traversal",
".",
"append",
"(",
"new_step",
")",
"new_match_traversals",
".",
"append",
"(",
"new_match_traversal",
")",
"return",
"new_match_traversals"
] | Return new match traversals, lowering filters involving non-existent ContextFields.
Expressions involving non-existent ContextFields are evaluated to TrueLiteral.
BinaryCompositions, where one of the operands is lowered to a TrueLiteral,
are lowered appropriately based on the present operator (u'||' and u'&&' are affected).
TernaryConditionals, where the predicate is lowered to a TrueLiteral,
are replaced by their if_true predicate.
The `visitor_fn` implements these behaviors (see `_update_context_field_expression`).
Args:
match_traversals: list of match traversal enitities to be lowered
visitor_fn: visit_and_update function for lowering expressions in given match traversal
Returns:
new list of match_traversals, with all filter expressions lowered | [
"Return",
"new",
"match",
"traversals",
"lowering",
"filters",
"involving",
"non",
"-",
"existent",
"ContextFields",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L511-L544 |
246,912 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/optional_traversal.py | lower_context_field_expressions | def lower_context_field_expressions(compound_match_query):
"""Lower Expressons involving non-existent ContextFields."""
if len(compound_match_query.match_queries) == 0:
raise AssertionError(u'Received CompoundMatchQuery {} with no MatchQuery objects.'
.format(compound_match_query))
elif len(compound_match_query.match_queries) == 1:
# All ContextFields exist if there is only one MatchQuery
# becuase none of the traverses were omitted, and all locations exist (are defined).
return compound_match_query
else:
new_match_queries = []
for match_query in compound_match_query.match_queries:
match_traversals = match_query.match_traversals
present_locations, _ = _get_present_locations(match_traversals)
current_visitor_fn = partial(_update_context_field_expression, present_locations)
new_match_traversals = _lower_non_existent_context_field_filters(
match_traversals, current_visitor_fn)
new_match_queries.append(
MatchQuery(
match_traversals=new_match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=new_match_queries) | python | def lower_context_field_expressions(compound_match_query):
if len(compound_match_query.match_queries) == 0:
raise AssertionError(u'Received CompoundMatchQuery {} with no MatchQuery objects.'
.format(compound_match_query))
elif len(compound_match_query.match_queries) == 1:
# All ContextFields exist if there is only one MatchQuery
# becuase none of the traverses were omitted, and all locations exist (are defined).
return compound_match_query
else:
new_match_queries = []
for match_query in compound_match_query.match_queries:
match_traversals = match_query.match_traversals
present_locations, _ = _get_present_locations(match_traversals)
current_visitor_fn = partial(_update_context_field_expression, present_locations)
new_match_traversals = _lower_non_existent_context_field_filters(
match_traversals, current_visitor_fn)
new_match_queries.append(
MatchQuery(
match_traversals=new_match_traversals,
folds=match_query.folds,
output_block=match_query.output_block,
where_block=match_query.where_block,
)
)
return CompoundMatchQuery(match_queries=new_match_queries) | [
"def",
"lower_context_field_expressions",
"(",
"compound_match_query",
")",
":",
"if",
"len",
"(",
"compound_match_query",
".",
"match_queries",
")",
"==",
"0",
":",
"raise",
"AssertionError",
"(",
"u'Received CompoundMatchQuery {} with no MatchQuery objects.'",
".",
"format",
"(",
"compound_match_query",
")",
")",
"elif",
"len",
"(",
"compound_match_query",
".",
"match_queries",
")",
"==",
"1",
":",
"# All ContextFields exist if there is only one MatchQuery",
"# becuase none of the traverses were omitted, and all locations exist (are defined).",
"return",
"compound_match_query",
"else",
":",
"new_match_queries",
"=",
"[",
"]",
"for",
"match_query",
"in",
"compound_match_query",
".",
"match_queries",
":",
"match_traversals",
"=",
"match_query",
".",
"match_traversals",
"present_locations",
",",
"_",
"=",
"_get_present_locations",
"(",
"match_traversals",
")",
"current_visitor_fn",
"=",
"partial",
"(",
"_update_context_field_expression",
",",
"present_locations",
")",
"new_match_traversals",
"=",
"_lower_non_existent_context_field_filters",
"(",
"match_traversals",
",",
"current_visitor_fn",
")",
"new_match_queries",
".",
"append",
"(",
"MatchQuery",
"(",
"match_traversals",
"=",
"new_match_traversals",
",",
"folds",
"=",
"match_query",
".",
"folds",
",",
"output_block",
"=",
"match_query",
".",
"output_block",
",",
"where_block",
"=",
"match_query",
".",
"where_block",
",",
")",
")",
"return",
"CompoundMatchQuery",
"(",
"match_queries",
"=",
"new_match_queries",
")"
] | Lower Expressons involving non-existent ContextFields. | [
"Lower",
"Expressons",
"involving",
"non",
"-",
"existent",
"ContextFields",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/optional_traversal.py#L547-L574 |
246,913 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | _validate_edges_do_not_have_extra_links | def _validate_edges_do_not_have_extra_links(class_name, properties):
"""Validate that edges do not have properties of Link type that aren't the edge endpoints."""
for property_name, property_descriptor in six.iteritems(properties):
if property_name in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERTY_NAME}:
continue
if property_descriptor.type_id == PROPERTY_TYPE_LINK_ID:
raise IllegalSchemaStateError(u'Edge class "{}" has a property of type Link that is '
u'not an edge endpoint, this is not allowed: '
u'{}'.format(class_name, property_name)) | python | def _validate_edges_do_not_have_extra_links(class_name, properties):
for property_name, property_descriptor in six.iteritems(properties):
if property_name in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERTY_NAME}:
continue
if property_descriptor.type_id == PROPERTY_TYPE_LINK_ID:
raise IllegalSchemaStateError(u'Edge class "{}" has a property of type Link that is '
u'not an edge endpoint, this is not allowed: '
u'{}'.format(class_name, property_name)) | [
"def",
"_validate_edges_do_not_have_extra_links",
"(",
"class_name",
",",
"properties",
")",
":",
"for",
"property_name",
",",
"property_descriptor",
"in",
"six",
".",
"iteritems",
"(",
"properties",
")",
":",
"if",
"property_name",
"in",
"{",
"EDGE_SOURCE_PROPERTY_NAME",
",",
"EDGE_DESTINATION_PROPERTY_NAME",
"}",
":",
"continue",
"if",
"property_descriptor",
".",
"type_id",
"==",
"PROPERTY_TYPE_LINK_ID",
":",
"raise",
"IllegalSchemaStateError",
"(",
"u'Edge class \"{}\" has a property of type Link that is '",
"u'not an edge endpoint, this is not allowed: '",
"u'{}'",
".",
"format",
"(",
"class_name",
",",
"property_name",
")",
")"
] | Validate that edges do not have properties of Link type that aren't the edge endpoints. | [
"Validate",
"that",
"edges",
"do",
"not",
"have",
"properties",
"of",
"Link",
"type",
"that",
"aren",
"t",
"the",
"edge",
"endpoints",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L44-L53 |
246,914 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | _validate_property_names | def _validate_property_names(class_name, properties):
"""Validate that properties do not have names that may cause problems in the GraphQL schema."""
for property_name in properties:
if not property_name or property_name.startswith(ILLEGAL_PROPERTY_NAME_PREFIXES):
raise IllegalSchemaStateError(u'Class "{}" has a property with an illegal name: '
u'{}'.format(class_name, property_name)) | python | def _validate_property_names(class_name, properties):
for property_name in properties:
if not property_name or property_name.startswith(ILLEGAL_PROPERTY_NAME_PREFIXES):
raise IllegalSchemaStateError(u'Class "{}" has a property with an illegal name: '
u'{}'.format(class_name, property_name)) | [
"def",
"_validate_property_names",
"(",
"class_name",
",",
"properties",
")",
":",
"for",
"property_name",
"in",
"properties",
":",
"if",
"not",
"property_name",
"or",
"property_name",
".",
"startswith",
"(",
"ILLEGAL_PROPERTY_NAME_PREFIXES",
")",
":",
"raise",
"IllegalSchemaStateError",
"(",
"u'Class \"{}\" has a property with an illegal name: '",
"u'{}'",
".",
"format",
"(",
"class_name",
",",
"property_name",
")",
")"
] | Validate that properties do not have names that may cause problems in the GraphQL schema. | [
"Validate",
"that",
"properties",
"do",
"not",
"have",
"names",
"that",
"may",
"cause",
"problems",
"in",
"the",
"GraphQL",
"schema",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L56-L61 |
246,915 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | _validate_collections_have_default_values | def _validate_collections_have_default_values(class_name, property_name, property_descriptor):
"""Validate that if the property is of collection type, it has a specified default value."""
# We don't want properties of collection type having "null" values, since that may cause
# unexpected errors during GraphQL query execution and other operations.
if property_descriptor.type_id in COLLECTION_PROPERTY_TYPES:
if property_descriptor.default is None:
raise IllegalSchemaStateError(u'Class "{}" has a property "{}" of collection type with '
u'no default value.'.format(class_name, property_name)) | python | def _validate_collections_have_default_values(class_name, property_name, property_descriptor):
# We don't want properties of collection type having "null" values, since that may cause
# unexpected errors during GraphQL query execution and other operations.
if property_descriptor.type_id in COLLECTION_PROPERTY_TYPES:
if property_descriptor.default is None:
raise IllegalSchemaStateError(u'Class "{}" has a property "{}" of collection type with '
u'no default value.'.format(class_name, property_name)) | [
"def",
"_validate_collections_have_default_values",
"(",
"class_name",
",",
"property_name",
",",
"property_descriptor",
")",
":",
"# We don't want properties of collection type having \"null\" values, since that may cause",
"# unexpected errors during GraphQL query execution and other operations.",
"if",
"property_descriptor",
".",
"type_id",
"in",
"COLLECTION_PROPERTY_TYPES",
":",
"if",
"property_descriptor",
".",
"default",
"is",
"None",
":",
"raise",
"IllegalSchemaStateError",
"(",
"u'Class \"{}\" has a property \"{}\" of collection type with '",
"u'no default value.'",
".",
"format",
"(",
"class_name",
",",
"property_name",
")",
")"
] | Validate that if the property is of collection type, it has a specified default value. | [
"Validate",
"that",
"if",
"the",
"property",
"is",
"of",
"collection",
"type",
"it",
"has",
"a",
"specified",
"default",
"value",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L64-L71 |
246,916 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | get_superclasses_from_class_definition | def get_superclasses_from_class_definition(class_definition):
"""Extract a list of all superclass names from a class definition dict."""
# New-style superclasses definition, supporting multiple-inheritance.
superclasses = class_definition.get('superClasses', None)
if superclasses:
return list(superclasses)
# Old-style superclass definition, single inheritance only.
superclass = class_definition.get('superClass', None)
if superclass:
return [superclass]
# No superclasses are present.
return [] | python | def get_superclasses_from_class_definition(class_definition):
# New-style superclasses definition, supporting multiple-inheritance.
superclasses = class_definition.get('superClasses', None)
if superclasses:
return list(superclasses)
# Old-style superclass definition, single inheritance only.
superclass = class_definition.get('superClass', None)
if superclass:
return [superclass]
# No superclasses are present.
return [] | [
"def",
"get_superclasses_from_class_definition",
"(",
"class_definition",
")",
":",
"# New-style superclasses definition, supporting multiple-inheritance.",
"superclasses",
"=",
"class_definition",
".",
"get",
"(",
"'superClasses'",
",",
"None",
")",
"if",
"superclasses",
":",
"return",
"list",
"(",
"superclasses",
")",
"# Old-style superclass definition, single inheritance only.",
"superclass",
"=",
"class_definition",
".",
"get",
"(",
"'superClass'",
",",
"None",
")",
"if",
"superclass",
":",
"return",
"[",
"superclass",
"]",
"# No superclasses are present.",
"return",
"[",
"]"
] | Extract a list of all superclass names from a class definition dict. | [
"Extract",
"a",
"list",
"of",
"all",
"superclass",
"names",
"from",
"a",
"class",
"definition",
"dict",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L74-L88 |
246,917 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaElement.freeze | def freeze(self):
"""Make the SchemaElement's connections immutable."""
self.in_connections = frozenset(self.in_connections)
self.out_connections = frozenset(self.out_connections) | python | def freeze(self):
self.in_connections = frozenset(self.in_connections)
self.out_connections = frozenset(self.out_connections) | [
"def",
"freeze",
"(",
"self",
")",
":",
"self",
".",
"in_connections",
"=",
"frozenset",
"(",
"self",
".",
"in_connections",
")",
"self",
".",
"out_connections",
"=",
"frozenset",
"(",
"self",
".",
"out_connections",
")"
] | Make the SchemaElement's connections immutable. | [
"Make",
"the",
"SchemaElement",
"s",
"connections",
"immutable",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L180-L183 |
246,918 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.get_default_property_values | def get_default_property_values(self, classname):
"""Return a dict with default values for all properties declared on this class."""
schema_element = self.get_element_by_class_name(classname)
result = {
property_name: property_descriptor.default
for property_name, property_descriptor in six.iteritems(schema_element.properties)
}
if schema_element.is_edge:
# Remove the source/destination properties for edges, if they exist.
result.pop(EDGE_SOURCE_PROPERTY_NAME, None)
result.pop(EDGE_DESTINATION_PROPERTY_NAME, None)
return result | python | def get_default_property_values(self, classname):
schema_element = self.get_element_by_class_name(classname)
result = {
property_name: property_descriptor.default
for property_name, property_descriptor in six.iteritems(schema_element.properties)
}
if schema_element.is_edge:
# Remove the source/destination properties for edges, if they exist.
result.pop(EDGE_SOURCE_PROPERTY_NAME, None)
result.pop(EDGE_DESTINATION_PROPERTY_NAME, None)
return result | [
"def",
"get_default_property_values",
"(",
"self",
",",
"classname",
")",
":",
"schema_element",
"=",
"self",
".",
"get_element_by_class_name",
"(",
"classname",
")",
"result",
"=",
"{",
"property_name",
":",
"property_descriptor",
".",
"default",
"for",
"property_name",
",",
"property_descriptor",
"in",
"six",
".",
"iteritems",
"(",
"schema_element",
".",
"properties",
")",
"}",
"if",
"schema_element",
".",
"is_edge",
":",
"# Remove the source/destination properties for edges, if they exist.",
"result",
".",
"pop",
"(",
"EDGE_SOURCE_PROPERTY_NAME",
",",
"None",
")",
"result",
".",
"pop",
"(",
"EDGE_DESTINATION_PROPERTY_NAME",
",",
"None",
")",
"return",
"result"
] | Return a dict with default values for all properties declared on this class. | [
"Return",
"a",
"dict",
"with",
"default",
"values",
"for",
"all",
"properties",
"declared",
"on",
"this",
"class",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L297-L311 |
246,919 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph._get_property_values_with_defaults | def _get_property_values_with_defaults(self, classname, property_values):
"""Return the property values for the class, with default values applied where needed."""
# To uphold OrientDB semantics, make a new dict with all property values set
# to their default values, which are None if no default was set.
# Then, overwrite its data with the supplied property values.
final_values = self.get_default_property_values(classname)
final_values.update(property_values)
return final_values | python | def _get_property_values_with_defaults(self, classname, property_values):
# To uphold OrientDB semantics, make a new dict with all property values set
# to their default values, which are None if no default was set.
# Then, overwrite its data with the supplied property values.
final_values = self.get_default_property_values(classname)
final_values.update(property_values)
return final_values | [
"def",
"_get_property_values_with_defaults",
"(",
"self",
",",
"classname",
",",
"property_values",
")",
":",
"# To uphold OrientDB semantics, make a new dict with all property values set",
"# to their default values, which are None if no default was set.",
"# Then, overwrite its data with the supplied property values.",
"final_values",
"=",
"self",
".",
"get_default_property_values",
"(",
"classname",
")",
"final_values",
".",
"update",
"(",
"property_values",
")",
"return",
"final_values"
] | Return the property values for the class, with default values applied where needed. | [
"Return",
"the",
"property",
"values",
"for",
"the",
"class",
"with",
"default",
"values",
"applied",
"where",
"needed",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L313-L320 |
246,920 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.get_element_by_class_name_or_raise | def get_element_by_class_name_or_raise(self, class_name):
"""Return the SchemaElement for the specified class name, asserting that it exists."""
if class_name not in self._elements:
raise InvalidClassError(u'Class does not exist: {}'.format(class_name))
return self._elements[class_name] | python | def get_element_by_class_name_or_raise(self, class_name):
if class_name not in self._elements:
raise InvalidClassError(u'Class does not exist: {}'.format(class_name))
return self._elements[class_name] | [
"def",
"get_element_by_class_name_or_raise",
"(",
"self",
",",
"class_name",
")",
":",
"if",
"class_name",
"not",
"in",
"self",
".",
"_elements",
":",
"raise",
"InvalidClassError",
"(",
"u'Class does not exist: {}'",
".",
"format",
"(",
"class_name",
")",
")",
"return",
"self",
".",
"_elements",
"[",
"class_name",
"]"
] | Return the SchemaElement for the specified class name, asserting that it exists. | [
"Return",
"the",
"SchemaElement",
"for",
"the",
"specified",
"class",
"name",
"asserting",
"that",
"it",
"exists",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L322-L327 |
246,921 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.get_vertex_schema_element_or_raise | def get_vertex_schema_element_or_raise(self, vertex_classname):
"""Return the schema element with the given name, asserting that it's of vertex type."""
schema_element = self.get_element_by_class_name_or_raise(vertex_classname)
if not schema_element.is_vertex:
raise InvalidClassError(u'Non-vertex class provided: {}'.format(vertex_classname))
return schema_element | python | def get_vertex_schema_element_or_raise(self, vertex_classname):
schema_element = self.get_element_by_class_name_or_raise(vertex_classname)
if not schema_element.is_vertex:
raise InvalidClassError(u'Non-vertex class provided: {}'.format(vertex_classname))
return schema_element | [
"def",
"get_vertex_schema_element_or_raise",
"(",
"self",
",",
"vertex_classname",
")",
":",
"schema_element",
"=",
"self",
".",
"get_element_by_class_name_or_raise",
"(",
"vertex_classname",
")",
"if",
"not",
"schema_element",
".",
"is_vertex",
":",
"raise",
"InvalidClassError",
"(",
"u'Non-vertex class provided: {}'",
".",
"format",
"(",
"vertex_classname",
")",
")",
"return",
"schema_element"
] | Return the schema element with the given name, asserting that it's of vertex type. | [
"Return",
"the",
"schema",
"element",
"with",
"the",
"given",
"name",
"asserting",
"that",
"it",
"s",
"of",
"vertex",
"type",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L329-L336 |
246,922 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.get_edge_schema_element_or_raise | def get_edge_schema_element_or_raise(self, edge_classname):
"""Return the schema element with the given name, asserting that it's of edge type."""
schema_element = self.get_element_by_class_name_or_raise(edge_classname)
if not schema_element.is_edge:
raise InvalidClassError(u'Non-edge class provided: {}'.format(edge_classname))
return schema_element | python | def get_edge_schema_element_or_raise(self, edge_classname):
schema_element = self.get_element_by_class_name_or_raise(edge_classname)
if not schema_element.is_edge:
raise InvalidClassError(u'Non-edge class provided: {}'.format(edge_classname))
return schema_element | [
"def",
"get_edge_schema_element_or_raise",
"(",
"self",
",",
"edge_classname",
")",
":",
"schema_element",
"=",
"self",
".",
"get_element_by_class_name_or_raise",
"(",
"edge_classname",
")",
"if",
"not",
"schema_element",
".",
"is_edge",
":",
"raise",
"InvalidClassError",
"(",
"u'Non-edge class provided: {}'",
".",
"format",
"(",
"edge_classname",
")",
")",
"return",
"schema_element"
] | Return the schema element with the given name, asserting that it's of edge type. | [
"Return",
"the",
"schema",
"element",
"with",
"the",
"given",
"name",
"asserting",
"that",
"it",
"s",
"of",
"edge",
"type",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L338-L345 |
246,923 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.validate_is_non_abstract_vertex_type | def validate_is_non_abstract_vertex_type(self, vertex_classname):
"""Validate that a vertex classname corresponds to a non-abstract vertex class."""
element = self.get_vertex_schema_element_or_raise(vertex_classname)
if element.abstract:
raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract'
.format(vertex_classname)) | python | def validate_is_non_abstract_vertex_type(self, vertex_classname):
element = self.get_vertex_schema_element_or_raise(vertex_classname)
if element.abstract:
raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract'
.format(vertex_classname)) | [
"def",
"validate_is_non_abstract_vertex_type",
"(",
"self",
",",
"vertex_classname",
")",
":",
"element",
"=",
"self",
".",
"get_vertex_schema_element_or_raise",
"(",
"vertex_classname",
")",
"if",
"element",
".",
"abstract",
":",
"raise",
"InvalidClassError",
"(",
"u'Expected a non-abstract vertex class, but {} is abstract'",
".",
"format",
"(",
"vertex_classname",
")",
")"
] | Validate that a vertex classname corresponds to a non-abstract vertex class. | [
"Validate",
"that",
"a",
"vertex",
"classname",
"corresponds",
"to",
"a",
"non",
"-",
"abstract",
"vertex",
"class",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L355-L361 |
246,924 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.validate_is_non_abstract_edge_type | def validate_is_non_abstract_edge_type(self, edge_classname):
"""Validate that a edge classname corresponds to a non-abstract edge class."""
element = self.get_edge_schema_element_or_raise(edge_classname)
if element.abstract:
raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract'
.format(edge_classname)) | python | def validate_is_non_abstract_edge_type(self, edge_classname):
element = self.get_edge_schema_element_or_raise(edge_classname)
if element.abstract:
raise InvalidClassError(u'Expected a non-abstract vertex class, but {} is abstract'
.format(edge_classname)) | [
"def",
"validate_is_non_abstract_edge_type",
"(",
"self",
",",
"edge_classname",
")",
":",
"element",
"=",
"self",
".",
"get_edge_schema_element_or_raise",
"(",
"edge_classname",
")",
"if",
"element",
".",
"abstract",
":",
"raise",
"InvalidClassError",
"(",
"u'Expected a non-abstract vertex class, but {} is abstract'",
".",
"format",
"(",
"edge_classname",
")",
")"
] | Validate that a edge classname corresponds to a non-abstract edge class. | [
"Validate",
"that",
"a",
"edge",
"classname",
"corresponds",
"to",
"a",
"non",
"-",
"abstract",
"edge",
"class",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L363-L369 |
246,925 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph.validate_properties_exist | def validate_properties_exist(self, classname, property_names):
"""Validate that the specified property names are indeed defined on the given class."""
schema_element = self.get_element_by_class_name(classname)
requested_properties = set(property_names)
available_properties = set(schema_element.properties.keys())
non_existent_properties = requested_properties - available_properties
if non_existent_properties:
raise InvalidPropertyError(
u'Class "{}" does not have definitions for properties "{}": '
u'{}'.format(classname, non_existent_properties, property_names)) | python | def validate_properties_exist(self, classname, property_names):
schema_element = self.get_element_by_class_name(classname)
requested_properties = set(property_names)
available_properties = set(schema_element.properties.keys())
non_existent_properties = requested_properties - available_properties
if non_existent_properties:
raise InvalidPropertyError(
u'Class "{}" does not have definitions for properties "{}": '
u'{}'.format(classname, non_existent_properties, property_names)) | [
"def",
"validate_properties_exist",
"(",
"self",
",",
"classname",
",",
"property_names",
")",
":",
"schema_element",
"=",
"self",
".",
"get_element_by_class_name",
"(",
"classname",
")",
"requested_properties",
"=",
"set",
"(",
"property_names",
")",
"available_properties",
"=",
"set",
"(",
"schema_element",
".",
"properties",
".",
"keys",
"(",
")",
")",
"non_existent_properties",
"=",
"requested_properties",
"-",
"available_properties",
"if",
"non_existent_properties",
":",
"raise",
"InvalidPropertyError",
"(",
"u'Class \"{}\" does not have definitions for properties \"{}\": '",
"u'{}'",
".",
"format",
"(",
"classname",
",",
"non_existent_properties",
",",
"property_names",
")",
")"
] | Validate that the specified property names are indeed defined on the given class. | [
"Validate",
"that",
"the",
"specified",
"property",
"names",
"are",
"indeed",
"defined",
"on",
"the",
"given",
"class",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L371-L381 |
246,926 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph._split_classes_by_kind | def _split_classes_by_kind(self, class_name_to_definition):
"""Assign each class to the vertex, edge or non-graph type sets based on its kind."""
for class_name in class_name_to_definition:
inheritance_set = self._inheritance_sets[class_name]
is_vertex = ORIENTDB_BASE_VERTEX_CLASS_NAME in inheritance_set
is_edge = ORIENTDB_BASE_EDGE_CLASS_NAME in inheritance_set
if is_vertex and is_edge:
raise AssertionError(u'Class {} appears to be both a vertex and an edge class: '
u'{}'.format(class_name, inheritance_set))
elif is_vertex:
self._vertex_class_names.add(class_name)
elif is_edge:
self._edge_class_names.add(class_name)
else:
self._non_graph_class_names.add(class_name)
# Freeze the classname sets so they cannot be modified again.
self._vertex_class_names = frozenset(self._vertex_class_names)
self._edge_class_names = frozenset(self._edge_class_names)
self._non_graph_class_names = frozenset(self._non_graph_class_names) | python | def _split_classes_by_kind(self, class_name_to_definition):
for class_name in class_name_to_definition:
inheritance_set = self._inheritance_sets[class_name]
is_vertex = ORIENTDB_BASE_VERTEX_CLASS_NAME in inheritance_set
is_edge = ORIENTDB_BASE_EDGE_CLASS_NAME in inheritance_set
if is_vertex and is_edge:
raise AssertionError(u'Class {} appears to be both a vertex and an edge class: '
u'{}'.format(class_name, inheritance_set))
elif is_vertex:
self._vertex_class_names.add(class_name)
elif is_edge:
self._edge_class_names.add(class_name)
else:
self._non_graph_class_names.add(class_name)
# Freeze the classname sets so they cannot be modified again.
self._vertex_class_names = frozenset(self._vertex_class_names)
self._edge_class_names = frozenset(self._edge_class_names)
self._non_graph_class_names = frozenset(self._non_graph_class_names) | [
"def",
"_split_classes_by_kind",
"(",
"self",
",",
"class_name_to_definition",
")",
":",
"for",
"class_name",
"in",
"class_name_to_definition",
":",
"inheritance_set",
"=",
"self",
".",
"_inheritance_sets",
"[",
"class_name",
"]",
"is_vertex",
"=",
"ORIENTDB_BASE_VERTEX_CLASS_NAME",
"in",
"inheritance_set",
"is_edge",
"=",
"ORIENTDB_BASE_EDGE_CLASS_NAME",
"in",
"inheritance_set",
"if",
"is_vertex",
"and",
"is_edge",
":",
"raise",
"AssertionError",
"(",
"u'Class {} appears to be both a vertex and an edge class: '",
"u'{}'",
".",
"format",
"(",
"class_name",
",",
"inheritance_set",
")",
")",
"elif",
"is_vertex",
":",
"self",
".",
"_vertex_class_names",
".",
"add",
"(",
"class_name",
")",
"elif",
"is_edge",
":",
"self",
".",
"_edge_class_names",
".",
"add",
"(",
"class_name",
")",
"else",
":",
"self",
".",
"_non_graph_class_names",
".",
"add",
"(",
"class_name",
")",
"# Freeze the classname sets so they cannot be modified again.",
"self",
".",
"_vertex_class_names",
"=",
"frozenset",
"(",
"self",
".",
"_vertex_class_names",
")",
"self",
".",
"_edge_class_names",
"=",
"frozenset",
"(",
"self",
".",
"_edge_class_names",
")",
"self",
".",
"_non_graph_class_names",
"=",
"frozenset",
"(",
"self",
".",
"_non_graph_class_names",
")"
] | Assign each class to the vertex, edge or non-graph type sets based on its kind. | [
"Assign",
"each",
"class",
"to",
"the",
"vertex",
"edge",
"or",
"non",
"-",
"graph",
"type",
"sets",
"based",
"on",
"its",
"kind",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L440-L461 |
246,927 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph._create_descriptor_from_property_definition | def _create_descriptor_from_property_definition(self, class_name, property_definition,
class_name_to_definition):
"""Return a PropertyDescriptor corresponding to the given OrientDB property definition."""
name = property_definition['name']
type_id = property_definition['type']
linked_class = property_definition.get('linkedClass', None)
linked_type = property_definition.get('linkedType', None)
qualifier = None
validate_supported_property_type_id(name, type_id)
if type_id == PROPERTY_TYPE_LINK_ID:
if class_name not in self._edge_class_names:
raise AssertionError(u'Found a property of type Link on a non-edge class: '
u'{} {}'.format(name, class_name))
if name not in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERTY_NAME}:
raise AssertionError(u'Found a property of type Link with an unexpected name: '
u'{} {}'.format(name, class_name))
if linked_class is None:
raise AssertionError(u'Property "{}" is declared with type Link but has no '
u'linked class: {}'.format(name, property_definition))
if linked_class not in self._vertex_class_names:
is_linked_class_abstract = class_name_to_definition[linked_class]['abstract']
all_subclasses_are_vertices = True
for subclass in self._subclass_sets[linked_class]:
if subclass != linked_class and subclass not in self.vertex_class_names:
all_subclasses_are_vertices = False
break
if not (is_linked_class_abstract and all_subclasses_are_vertices):
raise AssertionError(u'Property "{}" is declared as a Link to class {}, but '
u'that class is neither a vertex nor is it an '
u'abstract class whose subclasses are all vertices!'
.format(name, linked_class))
qualifier = linked_class
elif type_id in COLLECTION_PROPERTY_TYPES:
if linked_class is not None and linked_type is not None:
raise AssertionError(u'Property "{}" unexpectedly has both a linked class and '
u'a linked type: {}'.format(name, property_definition))
elif linked_type is not None and linked_class is None:
# No linked class, must be a linked native OrientDB type.
validate_supported_property_type_id(name + ' inner type', linked_type)
qualifier = linked_type
elif linked_class is not None and linked_type is None:
# No linked type, must be a linked non-graph user-defined type.
if linked_class not in self._non_graph_class_names:
raise AssertionError(u'Property "{}" is declared as the inner type of '
u'an embedded collection, but is not a non-graph class: '
u'{}'.format(name, linked_class))
qualifier = linked_class
else:
raise AssertionError(u'Property "{}" is an embedded collection but has '
u'neither a linked class nor a linked type: '
u'{}'.format(name, property_definition))
default_value = None
default_value_string = property_definition.get('defaultValue', None)
if default_value_string is not None:
default_value = parse_default_property_value(name, type_id, default_value_string)
descriptor = PropertyDescriptor(type_id=type_id, qualifier=qualifier, default=default_value)
# Sanity-check the descriptor before returning it.
_validate_collections_have_default_values(class_name, name, descriptor)
return descriptor | python | def _create_descriptor_from_property_definition(self, class_name, property_definition,
class_name_to_definition):
name = property_definition['name']
type_id = property_definition['type']
linked_class = property_definition.get('linkedClass', None)
linked_type = property_definition.get('linkedType', None)
qualifier = None
validate_supported_property_type_id(name, type_id)
if type_id == PROPERTY_TYPE_LINK_ID:
if class_name not in self._edge_class_names:
raise AssertionError(u'Found a property of type Link on a non-edge class: '
u'{} {}'.format(name, class_name))
if name not in {EDGE_SOURCE_PROPERTY_NAME, EDGE_DESTINATION_PROPERTY_NAME}:
raise AssertionError(u'Found a property of type Link with an unexpected name: '
u'{} {}'.format(name, class_name))
if linked_class is None:
raise AssertionError(u'Property "{}" is declared with type Link but has no '
u'linked class: {}'.format(name, property_definition))
if linked_class not in self._vertex_class_names:
is_linked_class_abstract = class_name_to_definition[linked_class]['abstract']
all_subclasses_are_vertices = True
for subclass in self._subclass_sets[linked_class]:
if subclass != linked_class and subclass not in self.vertex_class_names:
all_subclasses_are_vertices = False
break
if not (is_linked_class_abstract and all_subclasses_are_vertices):
raise AssertionError(u'Property "{}" is declared as a Link to class {}, but '
u'that class is neither a vertex nor is it an '
u'abstract class whose subclasses are all vertices!'
.format(name, linked_class))
qualifier = linked_class
elif type_id in COLLECTION_PROPERTY_TYPES:
if linked_class is not None and linked_type is not None:
raise AssertionError(u'Property "{}" unexpectedly has both a linked class and '
u'a linked type: {}'.format(name, property_definition))
elif linked_type is not None and linked_class is None:
# No linked class, must be a linked native OrientDB type.
validate_supported_property_type_id(name + ' inner type', linked_type)
qualifier = linked_type
elif linked_class is not None and linked_type is None:
# No linked type, must be a linked non-graph user-defined type.
if linked_class not in self._non_graph_class_names:
raise AssertionError(u'Property "{}" is declared as the inner type of '
u'an embedded collection, but is not a non-graph class: '
u'{}'.format(name, linked_class))
qualifier = linked_class
else:
raise AssertionError(u'Property "{}" is an embedded collection but has '
u'neither a linked class nor a linked type: '
u'{}'.format(name, property_definition))
default_value = None
default_value_string = property_definition.get('defaultValue', None)
if default_value_string is not None:
default_value = parse_default_property_value(name, type_id, default_value_string)
descriptor = PropertyDescriptor(type_id=type_id, qualifier=qualifier, default=default_value)
# Sanity-check the descriptor before returning it.
_validate_collections_have_default_values(class_name, name, descriptor)
return descriptor | [
"def",
"_create_descriptor_from_property_definition",
"(",
"self",
",",
"class_name",
",",
"property_definition",
",",
"class_name_to_definition",
")",
":",
"name",
"=",
"property_definition",
"[",
"'name'",
"]",
"type_id",
"=",
"property_definition",
"[",
"'type'",
"]",
"linked_class",
"=",
"property_definition",
".",
"get",
"(",
"'linkedClass'",
",",
"None",
")",
"linked_type",
"=",
"property_definition",
".",
"get",
"(",
"'linkedType'",
",",
"None",
")",
"qualifier",
"=",
"None",
"validate_supported_property_type_id",
"(",
"name",
",",
"type_id",
")",
"if",
"type_id",
"==",
"PROPERTY_TYPE_LINK_ID",
":",
"if",
"class_name",
"not",
"in",
"self",
".",
"_edge_class_names",
":",
"raise",
"AssertionError",
"(",
"u'Found a property of type Link on a non-edge class: '",
"u'{} {}'",
".",
"format",
"(",
"name",
",",
"class_name",
")",
")",
"if",
"name",
"not",
"in",
"{",
"EDGE_SOURCE_PROPERTY_NAME",
",",
"EDGE_DESTINATION_PROPERTY_NAME",
"}",
":",
"raise",
"AssertionError",
"(",
"u'Found a property of type Link with an unexpected name: '",
"u'{} {}'",
".",
"format",
"(",
"name",
",",
"class_name",
")",
")",
"if",
"linked_class",
"is",
"None",
":",
"raise",
"AssertionError",
"(",
"u'Property \"{}\" is declared with type Link but has no '",
"u'linked class: {}'",
".",
"format",
"(",
"name",
",",
"property_definition",
")",
")",
"if",
"linked_class",
"not",
"in",
"self",
".",
"_vertex_class_names",
":",
"is_linked_class_abstract",
"=",
"class_name_to_definition",
"[",
"linked_class",
"]",
"[",
"'abstract'",
"]",
"all_subclasses_are_vertices",
"=",
"True",
"for",
"subclass",
"in",
"self",
".",
"_subclass_sets",
"[",
"linked_class",
"]",
":",
"if",
"subclass",
"!=",
"linked_class",
"and",
"subclass",
"not",
"in",
"self",
".",
"vertex_class_names",
":",
"all_subclasses_are_vertices",
"=",
"False",
"break",
"if",
"not",
"(",
"is_linked_class_abstract",
"and",
"all_subclasses_are_vertices",
")",
":",
"raise",
"AssertionError",
"(",
"u'Property \"{}\" is declared as a Link to class {}, but '",
"u'that class is neither a vertex nor is it an '",
"u'abstract class whose subclasses are all vertices!'",
".",
"format",
"(",
"name",
",",
"linked_class",
")",
")",
"qualifier",
"=",
"linked_class",
"elif",
"type_id",
"in",
"COLLECTION_PROPERTY_TYPES",
":",
"if",
"linked_class",
"is",
"not",
"None",
"and",
"linked_type",
"is",
"not",
"None",
":",
"raise",
"AssertionError",
"(",
"u'Property \"{}\" unexpectedly has both a linked class and '",
"u'a linked type: {}'",
".",
"format",
"(",
"name",
",",
"property_definition",
")",
")",
"elif",
"linked_type",
"is",
"not",
"None",
"and",
"linked_class",
"is",
"None",
":",
"# No linked class, must be a linked native OrientDB type.",
"validate_supported_property_type_id",
"(",
"name",
"+",
"' inner type'",
",",
"linked_type",
")",
"qualifier",
"=",
"linked_type",
"elif",
"linked_class",
"is",
"not",
"None",
"and",
"linked_type",
"is",
"None",
":",
"# No linked type, must be a linked non-graph user-defined type.",
"if",
"linked_class",
"not",
"in",
"self",
".",
"_non_graph_class_names",
":",
"raise",
"AssertionError",
"(",
"u'Property \"{}\" is declared as the inner type of '",
"u'an embedded collection, but is not a non-graph class: '",
"u'{}'",
".",
"format",
"(",
"name",
",",
"linked_class",
")",
")",
"qualifier",
"=",
"linked_class",
"else",
":",
"raise",
"AssertionError",
"(",
"u'Property \"{}\" is an embedded collection but has '",
"u'neither a linked class nor a linked type: '",
"u'{}'",
".",
"format",
"(",
"name",
",",
"property_definition",
")",
")",
"default_value",
"=",
"None",
"default_value_string",
"=",
"property_definition",
".",
"get",
"(",
"'defaultValue'",
",",
"None",
")",
"if",
"default_value_string",
"is",
"not",
"None",
":",
"default_value",
"=",
"parse_default_property_value",
"(",
"name",
",",
"type_id",
",",
"default_value_string",
")",
"descriptor",
"=",
"PropertyDescriptor",
"(",
"type_id",
"=",
"type_id",
",",
"qualifier",
"=",
"qualifier",
",",
"default",
"=",
"default_value",
")",
"# Sanity-check the descriptor before returning it.",
"_validate_collections_have_default_values",
"(",
"class_name",
",",
"name",
",",
"descriptor",
")",
"return",
"descriptor"
] | Return a PropertyDescriptor corresponding to the given OrientDB property definition. | [
"Return",
"a",
"PropertyDescriptor",
"corresponding",
"to",
"the",
"given",
"OrientDB",
"property",
"definition",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L548-L616 |
246,928 | kensho-technologies/graphql-compiler | graphql_compiler/schema_generation/schema_graph.py | SchemaGraph._link_vertex_and_edge_types | def _link_vertex_and_edge_types(self):
"""For each edge, link it to the vertex types it connects to each other."""
for edge_class_name in self._edge_class_names:
edge_element = self._elements[edge_class_name]
if (EDGE_SOURCE_PROPERTY_NAME not in edge_element.properties or
EDGE_DESTINATION_PROPERTY_NAME not in edge_element.properties):
if edge_element.abstract:
continue
else:
raise AssertionError(u'Found a non-abstract edge class with undefined '
u'endpoint types: {}'.format(edge_element))
from_class_name = edge_element.properties[EDGE_SOURCE_PROPERTY_NAME].qualifier
to_class_name = edge_element.properties[EDGE_DESTINATION_PROPERTY_NAME].qualifier
edge_schema_element = self._elements[edge_class_name]
# Link from_class_name with edge_class_name
for from_class in self._subclass_sets[from_class_name]:
from_schema_element = self._elements[from_class]
from_schema_element.out_connections.add(edge_class_name)
edge_schema_element.in_connections.add(from_class)
# Link edge_class_name with to_class_name
for to_class in self._subclass_sets[to_class_name]:
to_schema_element = self._elements[to_class]
edge_schema_element.out_connections.add(to_class)
to_schema_element.in_connections.add(edge_class_name) | python | def _link_vertex_and_edge_types(self):
for edge_class_name in self._edge_class_names:
edge_element = self._elements[edge_class_name]
if (EDGE_SOURCE_PROPERTY_NAME not in edge_element.properties or
EDGE_DESTINATION_PROPERTY_NAME not in edge_element.properties):
if edge_element.abstract:
continue
else:
raise AssertionError(u'Found a non-abstract edge class with undefined '
u'endpoint types: {}'.format(edge_element))
from_class_name = edge_element.properties[EDGE_SOURCE_PROPERTY_NAME].qualifier
to_class_name = edge_element.properties[EDGE_DESTINATION_PROPERTY_NAME].qualifier
edge_schema_element = self._elements[edge_class_name]
# Link from_class_name with edge_class_name
for from_class in self._subclass_sets[from_class_name]:
from_schema_element = self._elements[from_class]
from_schema_element.out_connections.add(edge_class_name)
edge_schema_element.in_connections.add(from_class)
# Link edge_class_name with to_class_name
for to_class in self._subclass_sets[to_class_name]:
to_schema_element = self._elements[to_class]
edge_schema_element.out_connections.add(to_class)
to_schema_element.in_connections.add(edge_class_name) | [
"def",
"_link_vertex_and_edge_types",
"(",
"self",
")",
":",
"for",
"edge_class_name",
"in",
"self",
".",
"_edge_class_names",
":",
"edge_element",
"=",
"self",
".",
"_elements",
"[",
"edge_class_name",
"]",
"if",
"(",
"EDGE_SOURCE_PROPERTY_NAME",
"not",
"in",
"edge_element",
".",
"properties",
"or",
"EDGE_DESTINATION_PROPERTY_NAME",
"not",
"in",
"edge_element",
".",
"properties",
")",
":",
"if",
"edge_element",
".",
"abstract",
":",
"continue",
"else",
":",
"raise",
"AssertionError",
"(",
"u'Found a non-abstract edge class with undefined '",
"u'endpoint types: {}'",
".",
"format",
"(",
"edge_element",
")",
")",
"from_class_name",
"=",
"edge_element",
".",
"properties",
"[",
"EDGE_SOURCE_PROPERTY_NAME",
"]",
".",
"qualifier",
"to_class_name",
"=",
"edge_element",
".",
"properties",
"[",
"EDGE_DESTINATION_PROPERTY_NAME",
"]",
".",
"qualifier",
"edge_schema_element",
"=",
"self",
".",
"_elements",
"[",
"edge_class_name",
"]",
"# Link from_class_name with edge_class_name",
"for",
"from_class",
"in",
"self",
".",
"_subclass_sets",
"[",
"from_class_name",
"]",
":",
"from_schema_element",
"=",
"self",
".",
"_elements",
"[",
"from_class",
"]",
"from_schema_element",
".",
"out_connections",
".",
"add",
"(",
"edge_class_name",
")",
"edge_schema_element",
".",
"in_connections",
".",
"add",
"(",
"from_class",
")",
"# Link edge_class_name with to_class_name",
"for",
"to_class",
"in",
"self",
".",
"_subclass_sets",
"[",
"to_class_name",
"]",
":",
"to_schema_element",
"=",
"self",
".",
"_elements",
"[",
"to_class",
"]",
"edge_schema_element",
".",
"out_connections",
".",
"add",
"(",
"to_class",
")",
"to_schema_element",
".",
"in_connections",
".",
"add",
"(",
"edge_class_name",
")"
] | For each edge, link it to the vertex types it connects to each other. | [
"For",
"each",
"edge",
"link",
"it",
"to",
"the",
"vertex",
"types",
"it",
"connects",
"to",
"each",
"other",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/schema_generation/schema_graph.py#L618-L646 |
246,929 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | _is_local_filter | def _is_local_filter(filter_block):
"""Return True if the Filter block references no non-local fields, and False otherwise."""
# We need the "result" value of this function to be mutated within the "visitor_fn".
# Since we support both Python 2 and Python 3, we can't use the "nonlocal" keyword here:
# https://www.python.org/dev/peps/pep-3104/
# Instead, we use a dict to store the value we need mutated, since the "visitor_fn"
# can mutate state in the parent scope, but not rebind variables in it without "nonlocal".
# TODO(predrag): Revisit this if we drop support for Python 2.
result = {
'is_local_filter': True
}
filter_predicate = filter_block.predicate
def visitor_fn(expression):
"""Expression visitor function that looks for uses of non-local fields."""
non_local_expression_types = (ContextField, ContextFieldExistence)
if isinstance(expression, non_local_expression_types):
result['is_local_filter'] = False
# Don't change the expression.
return expression
filter_predicate.visit_and_update(visitor_fn)
return result['is_local_filter'] | python | def _is_local_filter(filter_block):
# We need the "result" value of this function to be mutated within the "visitor_fn".
# Since we support both Python 2 and Python 3, we can't use the "nonlocal" keyword here:
# https://www.python.org/dev/peps/pep-3104/
# Instead, we use a dict to store the value we need mutated, since the "visitor_fn"
# can mutate state in the parent scope, but not rebind variables in it without "nonlocal".
# TODO(predrag): Revisit this if we drop support for Python 2.
result = {
'is_local_filter': True
}
filter_predicate = filter_block.predicate
def visitor_fn(expression):
"""Expression visitor function that looks for uses of non-local fields."""
non_local_expression_types = (ContextField, ContextFieldExistence)
if isinstance(expression, non_local_expression_types):
result['is_local_filter'] = False
# Don't change the expression.
return expression
filter_predicate.visit_and_update(visitor_fn)
return result['is_local_filter'] | [
"def",
"_is_local_filter",
"(",
"filter_block",
")",
":",
"# We need the \"result\" value of this function to be mutated within the \"visitor_fn\".",
"# Since we support both Python 2 and Python 3, we can't use the \"nonlocal\" keyword here:",
"# https://www.python.org/dev/peps/pep-3104/",
"# Instead, we use a dict to store the value we need mutated, since the \"visitor_fn\"",
"# can mutate state in the parent scope, but not rebind variables in it without \"nonlocal\".",
"# TODO(predrag): Revisit this if we drop support for Python 2.",
"result",
"=",
"{",
"'is_local_filter'",
":",
"True",
"}",
"filter_predicate",
"=",
"filter_block",
".",
"predicate",
"def",
"visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Expression visitor function that looks for uses of non-local fields.\"\"\"",
"non_local_expression_types",
"=",
"(",
"ContextField",
",",
"ContextFieldExistence",
")",
"if",
"isinstance",
"(",
"expression",
",",
"non_local_expression_types",
")",
":",
"result",
"[",
"'is_local_filter'",
"]",
"=",
"False",
"# Don't change the expression.",
"return",
"expression",
"filter_predicate",
".",
"visit_and_update",
"(",
"visitor_fn",
")",
"return",
"result",
"[",
"'is_local_filter'",
"]"
] | Return True if the Filter block references no non-local fields, and False otherwise. | [
"Return",
"True",
"if",
"the",
"Filter",
"block",
"references",
"no",
"non",
"-",
"local",
"fields",
"and",
"False",
"otherwise",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L53-L78 |
246,930 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | _calculate_type_bound_at_step | def _calculate_type_bound_at_step(match_step):
"""Return the GraphQL type bound at the given step, or None if no bound is given."""
current_type_bounds = []
if isinstance(match_step.root_block, QueryRoot):
# The QueryRoot start class is a type bound.
current_type_bounds.extend(match_step.root_block.start_class)
if match_step.coerce_type_block is not None:
# The CoerceType target class is also a type bound.
current_type_bounds.extend(match_step.coerce_type_block.target_class)
if current_type_bounds:
# A type bound exists. Assert that there is exactly one bound, defined in precisely one way.
return get_only_element_from_collection(current_type_bounds)
else:
# No type bound exists at this MATCH step.
return None | python | def _calculate_type_bound_at_step(match_step):
current_type_bounds = []
if isinstance(match_step.root_block, QueryRoot):
# The QueryRoot start class is a type bound.
current_type_bounds.extend(match_step.root_block.start_class)
if match_step.coerce_type_block is not None:
# The CoerceType target class is also a type bound.
current_type_bounds.extend(match_step.coerce_type_block.target_class)
if current_type_bounds:
# A type bound exists. Assert that there is exactly one bound, defined in precisely one way.
return get_only_element_from_collection(current_type_bounds)
else:
# No type bound exists at this MATCH step.
return None | [
"def",
"_calculate_type_bound_at_step",
"(",
"match_step",
")",
":",
"current_type_bounds",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"match_step",
".",
"root_block",
",",
"QueryRoot",
")",
":",
"# The QueryRoot start class is a type bound.",
"current_type_bounds",
".",
"extend",
"(",
"match_step",
".",
"root_block",
".",
"start_class",
")",
"if",
"match_step",
".",
"coerce_type_block",
"is",
"not",
"None",
":",
"# The CoerceType target class is also a type bound.",
"current_type_bounds",
".",
"extend",
"(",
"match_step",
".",
"coerce_type_block",
".",
"target_class",
")",
"if",
"current_type_bounds",
":",
"# A type bound exists. Assert that there is exactly one bound, defined in precisely one way.",
"return",
"get_only_element_from_collection",
"(",
"current_type_bounds",
")",
"else",
":",
"# No type bound exists at this MATCH step.",
"return",
"None"
] | Return the GraphQL type bound at the given step, or None if no bound is given. | [
"Return",
"the",
"GraphQL",
"type",
"bound",
"at",
"the",
"given",
"step",
"or",
"None",
"if",
"no",
"bound",
"is",
"given",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L188-L205 |
246,931 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | _assert_type_bounds_are_not_conflicting | def _assert_type_bounds_are_not_conflicting(current_type_bound, previous_type_bound,
location, match_query):
"""Ensure that the two bounds either are an exact match, or one of them is None."""
if all((current_type_bound is not None,
previous_type_bound is not None,
current_type_bound != previous_type_bound)):
raise AssertionError(
u'Conflicting type bounds calculated at location {}: {} vs {} '
u'for query {}'.format(location, previous_type_bound, current_type_bound, match_query)) | python | def _assert_type_bounds_are_not_conflicting(current_type_bound, previous_type_bound,
location, match_query):
if all((current_type_bound is not None,
previous_type_bound is not None,
current_type_bound != previous_type_bound)):
raise AssertionError(
u'Conflicting type bounds calculated at location {}: {} vs {} '
u'for query {}'.format(location, previous_type_bound, current_type_bound, match_query)) | [
"def",
"_assert_type_bounds_are_not_conflicting",
"(",
"current_type_bound",
",",
"previous_type_bound",
",",
"location",
",",
"match_query",
")",
":",
"if",
"all",
"(",
"(",
"current_type_bound",
"is",
"not",
"None",
",",
"previous_type_bound",
"is",
"not",
"None",
",",
"current_type_bound",
"!=",
"previous_type_bound",
")",
")",
":",
"raise",
"AssertionError",
"(",
"u'Conflicting type bounds calculated at location {}: {} vs {} '",
"u'for query {}'",
".",
"format",
"(",
"location",
",",
"previous_type_bound",
",",
"current_type_bound",
",",
"match_query",
")",
")"
] | Ensure that the two bounds either are an exact match, or one of them is None. | [
"Ensure",
"that",
"the",
"two",
"bounds",
"either",
"are",
"an",
"exact",
"match",
"or",
"one",
"of",
"them",
"is",
"None",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L208-L216 |
246,932 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | _expose_only_preferred_locations | def _expose_only_preferred_locations(match_query, location_types, coerced_locations,
preferred_locations, eligible_locations):
"""Return a MATCH query where only preferred locations are valid as query start locations."""
preferred_location_types = dict()
eligible_location_types = dict()
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_step = match_step
current_step_location = match_step.as_block.location
if current_step_location in preferred_locations:
# This location is preferred. We have to make sure that at least one occurrence
# of this location in the MATCH query has an associated "class:" clause,
# which would be generated by a type bound at the corresponding MATCH step.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = preferred_location_types.get(current_step_location, None)
if previous_type_bound is not None:
# The location is already valid. If so, make sure that this step either does
# not have any type bounds (e.g. via QueryRoot or CoerceType blocks),
# or has type bounds that match the previously-decided type bound.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
else:
# The location is not yet known to be valid. If it does not have
# a type bound in this MATCH step, add a type coercion to the type
# registered in "location_types".
if current_type_bound is None:
current_type_bound = location_types[current_step_location].name
new_step = match_step._replace(
coerce_type_block=CoerceType({current_type_bound}))
preferred_location_types[current_step_location] = current_type_bound
elif current_step_location in eligible_locations:
# This location is eligible, but not preferred. We have not make sure
# none of the MATCH steps with this location have type bounds, and therefore
# will not produce a corresponding "class:" clause in the resulting MATCH query.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = eligible_location_types.get(current_step_location, None)
if current_type_bound is not None:
# There is a type bound here that we need to neutralize.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
# Record the deduced type bound, so that if we encounter this location again,
# we ensure that we again infer the same type bound.
eligible_location_types[current_step_location] = current_type_bound
if (current_step_location not in coerced_locations or
previous_type_bound is not None):
# The type bound here is already implied by the GraphQL query structure,
# or has already been applied at a previous occurrence of this location.
# We can simply delete the QueryRoot / CoerceType blocks that impart it.
if isinstance(match_step.root_block, QueryRoot):
new_root_block = None
else:
new_root_block = match_step.root_block
new_step = match_step._replace(
root_block=new_root_block, coerce_type_block=None)
else:
# The type bound here is not already implied by the GraphQL query structure.
# This should only be possible via a CoerceType block. Lower this CoerceType
# block into a Filter with INSTANCEOF to ensure the resulting query has the
# same semantics, while making the location invalid as a query start point.
if (isinstance(match_step.root_block, QueryRoot) or
match_step.coerce_type_block is None):
raise AssertionError(u'Unexpected MATCH step applying a type bound not '
u'already implied by the GraphQL query structure: '
u'{} {}'.format(match_step, match_query))
new_where_block = convert_coerce_type_and_add_to_where_block(
match_step.coerce_type_block, match_step.where_block)
new_step = match_step._replace(
coerce_type_block=None, where_block=new_where_block)
else:
# There is no type bound that OrientDB can find defined at this location.
# No action is necessary.
pass
else:
# This location is neither preferred nor eligible.
# No action is necessary at this location.
pass
new_traversal.append(new_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | python | def _expose_only_preferred_locations(match_query, location_types, coerced_locations,
preferred_locations, eligible_locations):
preferred_location_types = dict()
eligible_location_types = dict()
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_step = match_step
current_step_location = match_step.as_block.location
if current_step_location in preferred_locations:
# This location is preferred. We have to make sure that at least one occurrence
# of this location in the MATCH query has an associated "class:" clause,
# which would be generated by a type bound at the corresponding MATCH step.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = preferred_location_types.get(current_step_location, None)
if previous_type_bound is not None:
# The location is already valid. If so, make sure that this step either does
# not have any type bounds (e.g. via QueryRoot or CoerceType blocks),
# or has type bounds that match the previously-decided type bound.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
else:
# The location is not yet known to be valid. If it does not have
# a type bound in this MATCH step, add a type coercion to the type
# registered in "location_types".
if current_type_bound is None:
current_type_bound = location_types[current_step_location].name
new_step = match_step._replace(
coerce_type_block=CoerceType({current_type_bound}))
preferred_location_types[current_step_location] = current_type_bound
elif current_step_location in eligible_locations:
# This location is eligible, but not preferred. We have not make sure
# none of the MATCH steps with this location have type bounds, and therefore
# will not produce a corresponding "class:" clause in the resulting MATCH query.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = eligible_location_types.get(current_step_location, None)
if current_type_bound is not None:
# There is a type bound here that we need to neutralize.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
# Record the deduced type bound, so that if we encounter this location again,
# we ensure that we again infer the same type bound.
eligible_location_types[current_step_location] = current_type_bound
if (current_step_location not in coerced_locations or
previous_type_bound is not None):
# The type bound here is already implied by the GraphQL query structure,
# or has already been applied at a previous occurrence of this location.
# We can simply delete the QueryRoot / CoerceType blocks that impart it.
if isinstance(match_step.root_block, QueryRoot):
new_root_block = None
else:
new_root_block = match_step.root_block
new_step = match_step._replace(
root_block=new_root_block, coerce_type_block=None)
else:
# The type bound here is not already implied by the GraphQL query structure.
# This should only be possible via a CoerceType block. Lower this CoerceType
# block into a Filter with INSTANCEOF to ensure the resulting query has the
# same semantics, while making the location invalid as a query start point.
if (isinstance(match_step.root_block, QueryRoot) or
match_step.coerce_type_block is None):
raise AssertionError(u'Unexpected MATCH step applying a type bound not '
u'already implied by the GraphQL query structure: '
u'{} {}'.format(match_step, match_query))
new_where_block = convert_coerce_type_and_add_to_where_block(
match_step.coerce_type_block, match_step.where_block)
new_step = match_step._replace(
coerce_type_block=None, where_block=new_where_block)
else:
# There is no type bound that OrientDB can find defined at this location.
# No action is necessary.
pass
else:
# This location is neither preferred nor eligible.
# No action is necessary at this location.
pass
new_traversal.append(new_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | [
"def",
"_expose_only_preferred_locations",
"(",
"match_query",
",",
"location_types",
",",
"coerced_locations",
",",
"preferred_locations",
",",
"eligible_locations",
")",
":",
"preferred_location_types",
"=",
"dict",
"(",
")",
"eligible_location_types",
"=",
"dict",
"(",
")",
"new_match_traversals",
"=",
"[",
"]",
"for",
"current_traversal",
"in",
"match_query",
".",
"match_traversals",
":",
"new_traversal",
"=",
"[",
"]",
"for",
"match_step",
"in",
"current_traversal",
":",
"new_step",
"=",
"match_step",
"current_step_location",
"=",
"match_step",
".",
"as_block",
".",
"location",
"if",
"current_step_location",
"in",
"preferred_locations",
":",
"# This location is preferred. We have to make sure that at least one occurrence",
"# of this location in the MATCH query has an associated \"class:\" clause,",
"# which would be generated by a type bound at the corresponding MATCH step.",
"current_type_bound",
"=",
"_calculate_type_bound_at_step",
"(",
"match_step",
")",
"previous_type_bound",
"=",
"preferred_location_types",
".",
"get",
"(",
"current_step_location",
",",
"None",
")",
"if",
"previous_type_bound",
"is",
"not",
"None",
":",
"# The location is already valid. If so, make sure that this step either does",
"# not have any type bounds (e.g. via QueryRoot or CoerceType blocks),",
"# or has type bounds that match the previously-decided type bound.",
"_assert_type_bounds_are_not_conflicting",
"(",
"current_type_bound",
",",
"previous_type_bound",
",",
"current_step_location",
",",
"match_query",
")",
"else",
":",
"# The location is not yet known to be valid. If it does not have",
"# a type bound in this MATCH step, add a type coercion to the type",
"# registered in \"location_types\".",
"if",
"current_type_bound",
"is",
"None",
":",
"current_type_bound",
"=",
"location_types",
"[",
"current_step_location",
"]",
".",
"name",
"new_step",
"=",
"match_step",
".",
"_replace",
"(",
"coerce_type_block",
"=",
"CoerceType",
"(",
"{",
"current_type_bound",
"}",
")",
")",
"preferred_location_types",
"[",
"current_step_location",
"]",
"=",
"current_type_bound",
"elif",
"current_step_location",
"in",
"eligible_locations",
":",
"# This location is eligible, but not preferred. We have not make sure",
"# none of the MATCH steps with this location have type bounds, and therefore",
"# will not produce a corresponding \"class:\" clause in the resulting MATCH query.",
"current_type_bound",
"=",
"_calculate_type_bound_at_step",
"(",
"match_step",
")",
"previous_type_bound",
"=",
"eligible_location_types",
".",
"get",
"(",
"current_step_location",
",",
"None",
")",
"if",
"current_type_bound",
"is",
"not",
"None",
":",
"# There is a type bound here that we need to neutralize.",
"_assert_type_bounds_are_not_conflicting",
"(",
"current_type_bound",
",",
"previous_type_bound",
",",
"current_step_location",
",",
"match_query",
")",
"# Record the deduced type bound, so that if we encounter this location again,",
"# we ensure that we again infer the same type bound.",
"eligible_location_types",
"[",
"current_step_location",
"]",
"=",
"current_type_bound",
"if",
"(",
"current_step_location",
"not",
"in",
"coerced_locations",
"or",
"previous_type_bound",
"is",
"not",
"None",
")",
":",
"# The type bound here is already implied by the GraphQL query structure,",
"# or has already been applied at a previous occurrence of this location.",
"# We can simply delete the QueryRoot / CoerceType blocks that impart it.",
"if",
"isinstance",
"(",
"match_step",
".",
"root_block",
",",
"QueryRoot",
")",
":",
"new_root_block",
"=",
"None",
"else",
":",
"new_root_block",
"=",
"match_step",
".",
"root_block",
"new_step",
"=",
"match_step",
".",
"_replace",
"(",
"root_block",
"=",
"new_root_block",
",",
"coerce_type_block",
"=",
"None",
")",
"else",
":",
"# The type bound here is not already implied by the GraphQL query structure.",
"# This should only be possible via a CoerceType block. Lower this CoerceType",
"# block into a Filter with INSTANCEOF to ensure the resulting query has the",
"# same semantics, while making the location invalid as a query start point.",
"if",
"(",
"isinstance",
"(",
"match_step",
".",
"root_block",
",",
"QueryRoot",
")",
"or",
"match_step",
".",
"coerce_type_block",
"is",
"None",
")",
":",
"raise",
"AssertionError",
"(",
"u'Unexpected MATCH step applying a type bound not '",
"u'already implied by the GraphQL query structure: '",
"u'{} {}'",
".",
"format",
"(",
"match_step",
",",
"match_query",
")",
")",
"new_where_block",
"=",
"convert_coerce_type_and_add_to_where_block",
"(",
"match_step",
".",
"coerce_type_block",
",",
"match_step",
".",
"where_block",
")",
"new_step",
"=",
"match_step",
".",
"_replace",
"(",
"coerce_type_block",
"=",
"None",
",",
"where_block",
"=",
"new_where_block",
")",
"else",
":",
"# There is no type bound that OrientDB can find defined at this location.",
"# No action is necessary.",
"pass",
"else",
":",
"# This location is neither preferred nor eligible.",
"# No action is necessary at this location.",
"pass",
"new_traversal",
".",
"append",
"(",
"new_step",
")",
"new_match_traversals",
".",
"append",
"(",
"new_traversal",
")",
"return",
"match_query",
".",
"_replace",
"(",
"match_traversals",
"=",
"new_match_traversals",
")"
] | Return a MATCH query where only preferred locations are valid as query start locations. | [
"Return",
"a",
"MATCH",
"query",
"where",
"only",
"preferred",
"locations",
"are",
"valid",
"as",
"query",
"start",
"locations",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L219-L308 |
246,933 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | _expose_all_eligible_locations | def _expose_all_eligible_locations(match_query, location_types, eligible_locations):
"""Return a MATCH query where all eligible locations are valid as query start locations."""
eligible_location_types = dict()
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_step = match_step
current_step_location = match_step.as_block.location
if current_step_location in eligible_locations:
# This location is eligible. We need to make sure it has an associated type bound,
# so that it produces a "class:" clause that will make it a valid query start
# location. It either already has such a type bound, or we can use the type
# implied by the GraphQL query structure to add one.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = eligible_location_types.get(current_step_location, None)
if current_type_bound is None:
current_type_bound = location_types[current_step_location].name
new_coerce_type_block = CoerceType({current_type_bound})
new_step = match_step._replace(coerce_type_block=new_coerce_type_block)
else:
# There is a type bound here. We simply ensure that the bound is not conflicting
# with any other type bound at a different MATCH step with the same location.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
# Record the deduced type bound, so that if we encounter this location again,
# we ensure that we again infer the same type bound.
eligible_location_types[current_step_location] = current_type_bound
else:
# This function may only be called if there are no preferred locations. Since this
# location cannot be preferred, and is not eligible, it must be ineligible.
# No action is necessary in this case.
pass
new_traversal.append(new_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | python | def _expose_all_eligible_locations(match_query, location_types, eligible_locations):
eligible_location_types = dict()
new_match_traversals = []
for current_traversal in match_query.match_traversals:
new_traversal = []
for match_step in current_traversal:
new_step = match_step
current_step_location = match_step.as_block.location
if current_step_location in eligible_locations:
# This location is eligible. We need to make sure it has an associated type bound,
# so that it produces a "class:" clause that will make it a valid query start
# location. It either already has such a type bound, or we can use the type
# implied by the GraphQL query structure to add one.
current_type_bound = _calculate_type_bound_at_step(match_step)
previous_type_bound = eligible_location_types.get(current_step_location, None)
if current_type_bound is None:
current_type_bound = location_types[current_step_location].name
new_coerce_type_block = CoerceType({current_type_bound})
new_step = match_step._replace(coerce_type_block=new_coerce_type_block)
else:
# There is a type bound here. We simply ensure that the bound is not conflicting
# with any other type bound at a different MATCH step with the same location.
_assert_type_bounds_are_not_conflicting(
current_type_bound, previous_type_bound, current_step_location, match_query)
# Record the deduced type bound, so that if we encounter this location again,
# we ensure that we again infer the same type bound.
eligible_location_types[current_step_location] = current_type_bound
else:
# This function may only be called if there are no preferred locations. Since this
# location cannot be preferred, and is not eligible, it must be ineligible.
# No action is necessary in this case.
pass
new_traversal.append(new_step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | [
"def",
"_expose_all_eligible_locations",
"(",
"match_query",
",",
"location_types",
",",
"eligible_locations",
")",
":",
"eligible_location_types",
"=",
"dict",
"(",
")",
"new_match_traversals",
"=",
"[",
"]",
"for",
"current_traversal",
"in",
"match_query",
".",
"match_traversals",
":",
"new_traversal",
"=",
"[",
"]",
"for",
"match_step",
"in",
"current_traversal",
":",
"new_step",
"=",
"match_step",
"current_step_location",
"=",
"match_step",
".",
"as_block",
".",
"location",
"if",
"current_step_location",
"in",
"eligible_locations",
":",
"# This location is eligible. We need to make sure it has an associated type bound,",
"# so that it produces a \"class:\" clause that will make it a valid query start",
"# location. It either already has such a type bound, or we can use the type",
"# implied by the GraphQL query structure to add one.",
"current_type_bound",
"=",
"_calculate_type_bound_at_step",
"(",
"match_step",
")",
"previous_type_bound",
"=",
"eligible_location_types",
".",
"get",
"(",
"current_step_location",
",",
"None",
")",
"if",
"current_type_bound",
"is",
"None",
":",
"current_type_bound",
"=",
"location_types",
"[",
"current_step_location",
"]",
".",
"name",
"new_coerce_type_block",
"=",
"CoerceType",
"(",
"{",
"current_type_bound",
"}",
")",
"new_step",
"=",
"match_step",
".",
"_replace",
"(",
"coerce_type_block",
"=",
"new_coerce_type_block",
")",
"else",
":",
"# There is a type bound here. We simply ensure that the bound is not conflicting",
"# with any other type bound at a different MATCH step with the same location.",
"_assert_type_bounds_are_not_conflicting",
"(",
"current_type_bound",
",",
"previous_type_bound",
",",
"current_step_location",
",",
"match_query",
")",
"# Record the deduced type bound, so that if we encounter this location again,",
"# we ensure that we again infer the same type bound.",
"eligible_location_types",
"[",
"current_step_location",
"]",
"=",
"current_type_bound",
"else",
":",
"# This function may only be called if there are no preferred locations. Since this",
"# location cannot be preferred, and is not eligible, it must be ineligible.",
"# No action is necessary in this case.",
"pass",
"new_traversal",
".",
"append",
"(",
"new_step",
")",
"new_match_traversals",
".",
"append",
"(",
"new_traversal",
")",
"return",
"match_query",
".",
"_replace",
"(",
"match_traversals",
"=",
"new_match_traversals",
")"
] | Return a MATCH query where all eligible locations are valid as query start locations. | [
"Return",
"a",
"MATCH",
"query",
"where",
"all",
"eligible",
"locations",
"are",
"valid",
"as",
"query",
"start",
"locations",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L311-L350 |
246,934 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/workarounds/orientdb_query_execution.py | expose_ideal_query_execution_start_points | def expose_ideal_query_execution_start_points(compound_match_query, location_types,
coerced_locations):
"""Ensure that OrientDB only considers desirable query start points in query planning."""
new_queries = []
for match_query in compound_match_query.match_queries:
location_classification = _classify_query_locations(match_query)
preferred_locations, eligible_locations, _ = location_classification
if preferred_locations:
# Convert all eligible locations into non-eligible ones, by removing
# their "class:" clause. The "class:" clause is provided either by having
# a QueryRoot block or a CoerceType block in the MatchStep corresponding
# to the location. We remove it by converting the class check into
# an "INSTANCEOF" Filter block, which OrientDB is unable to optimize away.
new_query = _expose_only_preferred_locations(
match_query, location_types, coerced_locations,
preferred_locations, eligible_locations)
elif eligible_locations:
# Make sure that all eligible locations have a "class:" clause by adding
# a CoerceType block that is a no-op as guaranteed by the schema. This merely
# ensures that OrientDB is able to use each of these locations as a query start point,
# and will choose the one whose class is of lowest cardinality.
new_query = _expose_all_eligible_locations(
match_query, location_types, eligible_locations)
else:
raise AssertionError(u'This query has no preferred or eligible query start locations. '
u'This is almost certainly a bug: {}'.format(match_query))
new_queries.append(new_query)
return compound_match_query._replace(match_queries=new_queries) | python | def expose_ideal_query_execution_start_points(compound_match_query, location_types,
coerced_locations):
new_queries = []
for match_query in compound_match_query.match_queries:
location_classification = _classify_query_locations(match_query)
preferred_locations, eligible_locations, _ = location_classification
if preferred_locations:
# Convert all eligible locations into non-eligible ones, by removing
# their "class:" clause. The "class:" clause is provided either by having
# a QueryRoot block or a CoerceType block in the MatchStep corresponding
# to the location. We remove it by converting the class check into
# an "INSTANCEOF" Filter block, which OrientDB is unable to optimize away.
new_query = _expose_only_preferred_locations(
match_query, location_types, coerced_locations,
preferred_locations, eligible_locations)
elif eligible_locations:
# Make sure that all eligible locations have a "class:" clause by adding
# a CoerceType block that is a no-op as guaranteed by the schema. This merely
# ensures that OrientDB is able to use each of these locations as a query start point,
# and will choose the one whose class is of lowest cardinality.
new_query = _expose_all_eligible_locations(
match_query, location_types, eligible_locations)
else:
raise AssertionError(u'This query has no preferred or eligible query start locations. '
u'This is almost certainly a bug: {}'.format(match_query))
new_queries.append(new_query)
return compound_match_query._replace(match_queries=new_queries) | [
"def",
"expose_ideal_query_execution_start_points",
"(",
"compound_match_query",
",",
"location_types",
",",
"coerced_locations",
")",
":",
"new_queries",
"=",
"[",
"]",
"for",
"match_query",
"in",
"compound_match_query",
".",
"match_queries",
":",
"location_classification",
"=",
"_classify_query_locations",
"(",
"match_query",
")",
"preferred_locations",
",",
"eligible_locations",
",",
"_",
"=",
"location_classification",
"if",
"preferred_locations",
":",
"# Convert all eligible locations into non-eligible ones, by removing",
"# their \"class:\" clause. The \"class:\" clause is provided either by having",
"# a QueryRoot block or a CoerceType block in the MatchStep corresponding",
"# to the location. We remove it by converting the class check into",
"# an \"INSTANCEOF\" Filter block, which OrientDB is unable to optimize away.",
"new_query",
"=",
"_expose_only_preferred_locations",
"(",
"match_query",
",",
"location_types",
",",
"coerced_locations",
",",
"preferred_locations",
",",
"eligible_locations",
")",
"elif",
"eligible_locations",
":",
"# Make sure that all eligible locations have a \"class:\" clause by adding",
"# a CoerceType block that is a no-op as guaranteed by the schema. This merely",
"# ensures that OrientDB is able to use each of these locations as a query start point,",
"# and will choose the one whose class is of lowest cardinality.",
"new_query",
"=",
"_expose_all_eligible_locations",
"(",
"match_query",
",",
"location_types",
",",
"eligible_locations",
")",
"else",
":",
"raise",
"AssertionError",
"(",
"u'This query has no preferred or eligible query start locations. '",
"u'This is almost certainly a bug: {}'",
".",
"format",
"(",
"match_query",
")",
")",
"new_queries",
".",
"append",
"(",
"new_query",
")",
"return",
"compound_match_query",
".",
"_replace",
"(",
"match_queries",
"=",
"new_queries",
")"
] | Ensure that OrientDB only considers desirable query start points in query planning. | [
"Ensure",
"that",
"OrientDB",
"only",
"considers",
"desirable",
"query",
"start",
"points",
"in",
"query",
"planning",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/workarounds/orientdb_query_execution.py#L353-L384 |
246,935 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/between_lowering.py | _expression_list_to_conjunction | def _expression_list_to_conjunction(expression_list):
"""Return an Expression that is the `&&` of all the expressions in the given list."""
if not isinstance(expression_list, list):
raise AssertionError(u'Expected list. Received {}: '
u'{}'.format(type(expression_list).__name__, expression_list))
if len(expression_list) == 0:
raise AssertionError(u'Received empty expression_list '
u'(function should never be called with empty list): '
u'{}'.format(expression_list))
elif len(expression_list) == 1:
return expression_list[0]
else:
remaining_conjunction = _expression_list_to_conjunction(expression_list[1:])
return BinaryComposition(u'&&', expression_list[0], remaining_conjunction) | python | def _expression_list_to_conjunction(expression_list):
if not isinstance(expression_list, list):
raise AssertionError(u'Expected list. Received {}: '
u'{}'.format(type(expression_list).__name__, expression_list))
if len(expression_list) == 0:
raise AssertionError(u'Received empty expression_list '
u'(function should never be called with empty list): '
u'{}'.format(expression_list))
elif len(expression_list) == 1:
return expression_list[0]
else:
remaining_conjunction = _expression_list_to_conjunction(expression_list[1:])
return BinaryComposition(u'&&', expression_list[0], remaining_conjunction) | [
"def",
"_expression_list_to_conjunction",
"(",
"expression_list",
")",
":",
"if",
"not",
"isinstance",
"(",
"expression_list",
",",
"list",
")",
":",
"raise",
"AssertionError",
"(",
"u'Expected list. Received {}: '",
"u'{}'",
".",
"format",
"(",
"type",
"(",
"expression_list",
")",
".",
"__name__",
",",
"expression_list",
")",
")",
"if",
"len",
"(",
"expression_list",
")",
"==",
"0",
":",
"raise",
"AssertionError",
"(",
"u'Received empty expression_list '",
"u'(function should never be called with empty list): '",
"u'{}'",
".",
"format",
"(",
"expression_list",
")",
")",
"elif",
"len",
"(",
"expression_list",
")",
"==",
"1",
":",
"return",
"expression_list",
"[",
"0",
"]",
"else",
":",
"remaining_conjunction",
"=",
"_expression_list_to_conjunction",
"(",
"expression_list",
"[",
"1",
":",
"]",
")",
"return",
"BinaryComposition",
"(",
"u'&&'",
",",
"expression_list",
"[",
"0",
"]",
",",
"remaining_conjunction",
")"
] | Return an Expression that is the `&&` of all the expressions in the given list. | [
"Return",
"an",
"Expression",
"that",
"is",
"the",
"&&",
"of",
"all",
"the",
"expressions",
"in",
"the",
"given",
"list",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/between_lowering.py#L9-L22 |
246,936 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/between_lowering.py | _extract_conjuction_elements_from_expression | def _extract_conjuction_elements_from_expression(expression):
"""Return a generator for expressions that are connected by `&&`s in the given expression."""
if isinstance(expression, BinaryComposition) and expression.operator == u'&&':
for element in _extract_conjuction_elements_from_expression(expression.left):
yield element
for element in _extract_conjuction_elements_from_expression(expression.right):
yield element
else:
yield expression | python | def _extract_conjuction_elements_from_expression(expression):
if isinstance(expression, BinaryComposition) and expression.operator == u'&&':
for element in _extract_conjuction_elements_from_expression(expression.left):
yield element
for element in _extract_conjuction_elements_from_expression(expression.right):
yield element
else:
yield expression | [
"def",
"_extract_conjuction_elements_from_expression",
"(",
"expression",
")",
":",
"if",
"isinstance",
"(",
"expression",
",",
"BinaryComposition",
")",
"and",
"expression",
".",
"operator",
"==",
"u'&&'",
":",
"for",
"element",
"in",
"_extract_conjuction_elements_from_expression",
"(",
"expression",
".",
"left",
")",
":",
"yield",
"element",
"for",
"element",
"in",
"_extract_conjuction_elements_from_expression",
"(",
"expression",
".",
"right",
")",
":",
"yield",
"element",
"else",
":",
"yield",
"expression"
] | Return a generator for expressions that are connected by `&&`s in the given expression. | [
"Return",
"a",
"generator",
"for",
"expressions",
"that",
"are",
"connected",
"by",
"&&",
"s",
"in",
"the",
"given",
"expression",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/between_lowering.py#L25-L33 |
246,937 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/between_lowering.py | _construct_field_operator_expression_dict | def _construct_field_operator_expression_dict(expression_list):
"""Construct a mapping from local fields to specified operators, and corresponding expressions.
Args:
expression_list: list of expressions to analyze
Returns:
local_field_to_expressions:
dict mapping local field names to "operator -> list of BinaryComposition" dictionaries,
for each BinaryComposition operator involving the LocalField
remaining_expression_list:
list of remaining expressions that were *not*
BinaryCompositions on a LocalField using any of the between operators
"""
between_operators = (u'<=', u'>=')
inverse_operator = {u'>=': u'<=', u'<=': u'>='}
local_field_to_expressions = {}
remaining_expression_list = deque([])
for expression in expression_list:
if all((
isinstance(expression, BinaryComposition),
expression.operator in between_operators,
isinstance(expression.left, LocalField) or isinstance(expression.right, LocalField)
)):
if isinstance(expression.right, LocalField):
new_operator = inverse_operator[expression.operator]
new_expression = BinaryComposition(new_operator, expression.right, expression.left)
else:
new_expression = expression
field_name = new_expression.left.field_name
expressions_dict = local_field_to_expressions.setdefault(field_name, {})
expressions_dict.setdefault(new_expression.operator, []).append(new_expression)
else:
remaining_expression_list.append(expression)
return local_field_to_expressions, remaining_expression_list | python | def _construct_field_operator_expression_dict(expression_list):
between_operators = (u'<=', u'>=')
inverse_operator = {u'>=': u'<=', u'<=': u'>='}
local_field_to_expressions = {}
remaining_expression_list = deque([])
for expression in expression_list:
if all((
isinstance(expression, BinaryComposition),
expression.operator in between_operators,
isinstance(expression.left, LocalField) or isinstance(expression.right, LocalField)
)):
if isinstance(expression.right, LocalField):
new_operator = inverse_operator[expression.operator]
new_expression = BinaryComposition(new_operator, expression.right, expression.left)
else:
new_expression = expression
field_name = new_expression.left.field_name
expressions_dict = local_field_to_expressions.setdefault(field_name, {})
expressions_dict.setdefault(new_expression.operator, []).append(new_expression)
else:
remaining_expression_list.append(expression)
return local_field_to_expressions, remaining_expression_list | [
"def",
"_construct_field_operator_expression_dict",
"(",
"expression_list",
")",
":",
"between_operators",
"=",
"(",
"u'<='",
",",
"u'>='",
")",
"inverse_operator",
"=",
"{",
"u'>='",
":",
"u'<='",
",",
"u'<='",
":",
"u'>='",
"}",
"local_field_to_expressions",
"=",
"{",
"}",
"remaining_expression_list",
"=",
"deque",
"(",
"[",
"]",
")",
"for",
"expression",
"in",
"expression_list",
":",
"if",
"all",
"(",
"(",
"isinstance",
"(",
"expression",
",",
"BinaryComposition",
")",
",",
"expression",
".",
"operator",
"in",
"between_operators",
",",
"isinstance",
"(",
"expression",
".",
"left",
",",
"LocalField",
")",
"or",
"isinstance",
"(",
"expression",
".",
"right",
",",
"LocalField",
")",
")",
")",
":",
"if",
"isinstance",
"(",
"expression",
".",
"right",
",",
"LocalField",
")",
":",
"new_operator",
"=",
"inverse_operator",
"[",
"expression",
".",
"operator",
"]",
"new_expression",
"=",
"BinaryComposition",
"(",
"new_operator",
",",
"expression",
".",
"right",
",",
"expression",
".",
"left",
")",
"else",
":",
"new_expression",
"=",
"expression",
"field_name",
"=",
"new_expression",
".",
"left",
".",
"field_name",
"expressions_dict",
"=",
"local_field_to_expressions",
".",
"setdefault",
"(",
"field_name",
",",
"{",
"}",
")",
"expressions_dict",
".",
"setdefault",
"(",
"new_expression",
".",
"operator",
",",
"[",
"]",
")",
".",
"append",
"(",
"new_expression",
")",
"else",
":",
"remaining_expression_list",
".",
"append",
"(",
"expression",
")",
"return",
"local_field_to_expressions",
",",
"remaining_expression_list"
] | Construct a mapping from local fields to specified operators, and corresponding expressions.
Args:
expression_list: list of expressions to analyze
Returns:
local_field_to_expressions:
dict mapping local field names to "operator -> list of BinaryComposition" dictionaries,
for each BinaryComposition operator involving the LocalField
remaining_expression_list:
list of remaining expressions that were *not*
BinaryCompositions on a LocalField using any of the between operators | [
"Construct",
"a",
"mapping",
"from",
"local",
"fields",
"to",
"specified",
"operators",
"and",
"corresponding",
"expressions",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/between_lowering.py#L36-L70 |
246,938 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/between_lowering.py | _lower_expressions_to_between | def _lower_expressions_to_between(base_expression):
"""Return a new expression, with any eligible comparisons lowered to `between` clauses."""
expression_list = list(_extract_conjuction_elements_from_expression(base_expression))
if len(expression_list) == 0:
raise AssertionError(u'Received empty expression_list {} from base_expression: '
u'{}'.format(expression_list, base_expression))
elif len(expression_list) == 1:
return base_expression
else:
between_operators = (u'<=', u'>=')
local_field_to_expressions, new_expression_list = _construct_field_operator_expression_dict(
expression_list)
lowering_occurred = False
for field_name in local_field_to_expressions:
expressions_dict = local_field_to_expressions[field_name]
if all(operator in expressions_dict and len(expressions_dict[operator]) == 1
for operator in between_operators):
field = LocalField(field_name)
lower_bound = expressions_dict[u'>='][0].right
upper_bound = expressions_dict[u'<='][0].right
new_expression_list.appendleft(BetweenClause(field, lower_bound, upper_bound))
lowering_occurred = True
else:
for expression in expressions_dict.values():
new_expression_list.extend(expression)
if lowering_occurred:
return _expression_list_to_conjunction(list(new_expression_list))
else:
return base_expression | python | def _lower_expressions_to_between(base_expression):
expression_list = list(_extract_conjuction_elements_from_expression(base_expression))
if len(expression_list) == 0:
raise AssertionError(u'Received empty expression_list {} from base_expression: '
u'{}'.format(expression_list, base_expression))
elif len(expression_list) == 1:
return base_expression
else:
between_operators = (u'<=', u'>=')
local_field_to_expressions, new_expression_list = _construct_field_operator_expression_dict(
expression_list)
lowering_occurred = False
for field_name in local_field_to_expressions:
expressions_dict = local_field_to_expressions[field_name]
if all(operator in expressions_dict and len(expressions_dict[operator]) == 1
for operator in between_operators):
field = LocalField(field_name)
lower_bound = expressions_dict[u'>='][0].right
upper_bound = expressions_dict[u'<='][0].right
new_expression_list.appendleft(BetweenClause(field, lower_bound, upper_bound))
lowering_occurred = True
else:
for expression in expressions_dict.values():
new_expression_list.extend(expression)
if lowering_occurred:
return _expression_list_to_conjunction(list(new_expression_list))
else:
return base_expression | [
"def",
"_lower_expressions_to_between",
"(",
"base_expression",
")",
":",
"expression_list",
"=",
"list",
"(",
"_extract_conjuction_elements_from_expression",
"(",
"base_expression",
")",
")",
"if",
"len",
"(",
"expression_list",
")",
"==",
"0",
":",
"raise",
"AssertionError",
"(",
"u'Received empty expression_list {} from base_expression: '",
"u'{}'",
".",
"format",
"(",
"expression_list",
",",
"base_expression",
")",
")",
"elif",
"len",
"(",
"expression_list",
")",
"==",
"1",
":",
"return",
"base_expression",
"else",
":",
"between_operators",
"=",
"(",
"u'<='",
",",
"u'>='",
")",
"local_field_to_expressions",
",",
"new_expression_list",
"=",
"_construct_field_operator_expression_dict",
"(",
"expression_list",
")",
"lowering_occurred",
"=",
"False",
"for",
"field_name",
"in",
"local_field_to_expressions",
":",
"expressions_dict",
"=",
"local_field_to_expressions",
"[",
"field_name",
"]",
"if",
"all",
"(",
"operator",
"in",
"expressions_dict",
"and",
"len",
"(",
"expressions_dict",
"[",
"operator",
"]",
")",
"==",
"1",
"for",
"operator",
"in",
"between_operators",
")",
":",
"field",
"=",
"LocalField",
"(",
"field_name",
")",
"lower_bound",
"=",
"expressions_dict",
"[",
"u'>='",
"]",
"[",
"0",
"]",
".",
"right",
"upper_bound",
"=",
"expressions_dict",
"[",
"u'<='",
"]",
"[",
"0",
"]",
".",
"right",
"new_expression_list",
".",
"appendleft",
"(",
"BetweenClause",
"(",
"field",
",",
"lower_bound",
",",
"upper_bound",
")",
")",
"lowering_occurred",
"=",
"True",
"else",
":",
"for",
"expression",
"in",
"expressions_dict",
".",
"values",
"(",
")",
":",
"new_expression_list",
".",
"extend",
"(",
"expression",
")",
"if",
"lowering_occurred",
":",
"return",
"_expression_list_to_conjunction",
"(",
"list",
"(",
"new_expression_list",
")",
")",
"else",
":",
"return",
"base_expression"
] | Return a new expression, with any eligible comparisons lowered to `between` clauses. | [
"Return",
"a",
"new",
"expression",
"with",
"any",
"eligible",
"comparisons",
"lowered",
"to",
"between",
"clauses",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/between_lowering.py#L73-L103 |
246,939 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_match/between_lowering.py | lower_comparisons_to_between | def lower_comparisons_to_between(match_query):
"""Return a new MatchQuery, with all eligible comparison filters lowered to between clauses."""
new_match_traversals = []
for current_match_traversal in match_query.match_traversals:
new_traversal = []
for step in current_match_traversal:
if step.where_block:
expression = step.where_block.predicate
new_where_block = Filter(_lower_expressions_to_between(expression))
new_traversal.append(step._replace(where_block=new_where_block))
else:
new_traversal.append(step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | python | def lower_comparisons_to_between(match_query):
new_match_traversals = []
for current_match_traversal in match_query.match_traversals:
new_traversal = []
for step in current_match_traversal:
if step.where_block:
expression = step.where_block.predicate
new_where_block = Filter(_lower_expressions_to_between(expression))
new_traversal.append(step._replace(where_block=new_where_block))
else:
new_traversal.append(step)
new_match_traversals.append(new_traversal)
return match_query._replace(match_traversals=new_match_traversals) | [
"def",
"lower_comparisons_to_between",
"(",
"match_query",
")",
":",
"new_match_traversals",
"=",
"[",
"]",
"for",
"current_match_traversal",
"in",
"match_query",
".",
"match_traversals",
":",
"new_traversal",
"=",
"[",
"]",
"for",
"step",
"in",
"current_match_traversal",
":",
"if",
"step",
".",
"where_block",
":",
"expression",
"=",
"step",
".",
"where_block",
".",
"predicate",
"new_where_block",
"=",
"Filter",
"(",
"_lower_expressions_to_between",
"(",
"expression",
")",
")",
"new_traversal",
".",
"append",
"(",
"step",
".",
"_replace",
"(",
"where_block",
"=",
"new_where_block",
")",
")",
"else",
":",
"new_traversal",
".",
"append",
"(",
"step",
")",
"new_match_traversals",
".",
"append",
"(",
"new_traversal",
")",
"return",
"match_query",
".",
"_replace",
"(",
"match_traversals",
"=",
"new_match_traversals",
")"
] | Return a new MatchQuery, with all eligible comparison filters lowered to between clauses. | [
"Return",
"a",
"new",
"MatchQuery",
"with",
"all",
"eligible",
"comparison",
"filters",
"lowered",
"to",
"between",
"clauses",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_match/between_lowering.py#L106-L122 |
246,940 | kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/common.py | _ensure_arguments_are_provided | def _ensure_arguments_are_provided(expected_types, arguments):
"""Ensure that all arguments expected by the query were actually provided."""
# This function only checks that the arguments were specified,
# and does not check types. Type checking is done as part of the actual formatting step.
expected_arg_names = set(six.iterkeys(expected_types))
provided_arg_names = set(six.iterkeys(arguments))
if expected_arg_names != provided_arg_names:
missing_args = expected_arg_names - provided_arg_names
unexpected_args = provided_arg_names - expected_arg_names
raise GraphQLInvalidArgumentError(u'Missing or unexpected arguments found: '
u'missing {}, unexpected '
u'{}'.format(missing_args, unexpected_args)) | python | def _ensure_arguments_are_provided(expected_types, arguments):
# This function only checks that the arguments were specified,
# and does not check types. Type checking is done as part of the actual formatting step.
expected_arg_names = set(six.iterkeys(expected_types))
provided_arg_names = set(six.iterkeys(arguments))
if expected_arg_names != provided_arg_names:
missing_args = expected_arg_names - provided_arg_names
unexpected_args = provided_arg_names - expected_arg_names
raise GraphQLInvalidArgumentError(u'Missing or unexpected arguments found: '
u'missing {}, unexpected '
u'{}'.format(missing_args, unexpected_args)) | [
"def",
"_ensure_arguments_are_provided",
"(",
"expected_types",
",",
"arguments",
")",
":",
"# This function only checks that the arguments were specified,",
"# and does not check types. Type checking is done as part of the actual formatting step.",
"expected_arg_names",
"=",
"set",
"(",
"six",
".",
"iterkeys",
"(",
"expected_types",
")",
")",
"provided_arg_names",
"=",
"set",
"(",
"six",
".",
"iterkeys",
"(",
"arguments",
")",
")",
"if",
"expected_arg_names",
"!=",
"provided_arg_names",
":",
"missing_args",
"=",
"expected_arg_names",
"-",
"provided_arg_names",
"unexpected_args",
"=",
"provided_arg_names",
"-",
"expected_arg_names",
"raise",
"GraphQLInvalidArgumentError",
"(",
"u'Missing or unexpected arguments found: '",
"u'missing {}, unexpected '",
"u'{}'",
".",
"format",
"(",
"missing_args",
",",
"unexpected_args",
")",
")"
] | Ensure that all arguments expected by the query were actually provided. | [
"Ensure",
"that",
"all",
"arguments",
"expected",
"by",
"the",
"query",
"were",
"actually",
"provided",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/common.py#L12-L24 |
246,941 | kensho-technologies/graphql-compiler | graphql_compiler/query_formatting/common.py | insert_arguments_into_query | def insert_arguments_into_query(compilation_result, arguments):
"""Insert the arguments into the compiled GraphQL query to form a complete query.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a query in the appropriate output language, with inserted argument data
"""
_ensure_arguments_are_provided(compilation_result.input_metadata, arguments)
if compilation_result.language == MATCH_LANGUAGE:
return insert_arguments_into_match_query(compilation_result, arguments)
elif compilation_result.language == GREMLIN_LANGUAGE:
return insert_arguments_into_gremlin_query(compilation_result, arguments)
elif compilation_result.language == SQL_LANGUAGE:
return insert_arguments_into_sql_query(compilation_result, arguments)
else:
raise AssertionError(u'Unrecognized language in compilation result: '
u'{}'.format(compilation_result)) | python | def insert_arguments_into_query(compilation_result, arguments):
_ensure_arguments_are_provided(compilation_result.input_metadata, arguments)
if compilation_result.language == MATCH_LANGUAGE:
return insert_arguments_into_match_query(compilation_result, arguments)
elif compilation_result.language == GREMLIN_LANGUAGE:
return insert_arguments_into_gremlin_query(compilation_result, arguments)
elif compilation_result.language == SQL_LANGUAGE:
return insert_arguments_into_sql_query(compilation_result, arguments)
else:
raise AssertionError(u'Unrecognized language in compilation result: '
u'{}'.format(compilation_result)) | [
"def",
"insert_arguments_into_query",
"(",
"compilation_result",
",",
"arguments",
")",
":",
"_ensure_arguments_are_provided",
"(",
"compilation_result",
".",
"input_metadata",
",",
"arguments",
")",
"if",
"compilation_result",
".",
"language",
"==",
"MATCH_LANGUAGE",
":",
"return",
"insert_arguments_into_match_query",
"(",
"compilation_result",
",",
"arguments",
")",
"elif",
"compilation_result",
".",
"language",
"==",
"GREMLIN_LANGUAGE",
":",
"return",
"insert_arguments_into_gremlin_query",
"(",
"compilation_result",
",",
"arguments",
")",
"elif",
"compilation_result",
".",
"language",
"==",
"SQL_LANGUAGE",
":",
"return",
"insert_arguments_into_sql_query",
"(",
"compilation_result",
",",
"arguments",
")",
"else",
":",
"raise",
"AssertionError",
"(",
"u'Unrecognized language in compilation result: '",
"u'{}'",
".",
"format",
"(",
"compilation_result",
")",
")"
] | Insert the arguments into the compiled GraphQL query to form a complete query.
Args:
compilation_result: a CompilationResult object derived from the GraphQL compiler
arguments: dict, mapping argument name to its value, for every parameter the query expects.
Returns:
string, a query in the appropriate output language, with inserted argument data | [
"Insert",
"the",
"arguments",
"into",
"the",
"compiled",
"GraphQL",
"query",
"to",
"form",
"a",
"complete",
"query",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/query_formatting/common.py#L31-L51 |
246,942 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | QueryRoot.validate | def validate(self):
"""Ensure that the QueryRoot block is valid."""
if not (isinstance(self.start_class, set) and
all(isinstance(x, six.string_types) for x in self.start_class)):
raise TypeError(u'Expected set of string start_class, got: {} {}'.format(
type(self.start_class).__name__, self.start_class))
for cls in self.start_class:
validate_safe_string(cls) | python | def validate(self):
if not (isinstance(self.start_class, set) and
all(isinstance(x, six.string_types) for x in self.start_class)):
raise TypeError(u'Expected set of string start_class, got: {} {}'.format(
type(self.start_class).__name__, self.start_class))
for cls in self.start_class:
validate_safe_string(cls) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"(",
"isinstance",
"(",
"self",
".",
"start_class",
",",
"set",
")",
"and",
"all",
"(",
"isinstance",
"(",
"x",
",",
"six",
".",
"string_types",
")",
"for",
"x",
"in",
"self",
".",
"start_class",
")",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected set of string start_class, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"start_class",
")",
".",
"__name__",
",",
"self",
".",
"start_class",
")",
")",
"for",
"cls",
"in",
"self",
".",
"start_class",
":",
"validate_safe_string",
"(",
"cls",
")"
] | Ensure that the QueryRoot block is valid. | [
"Ensure",
"that",
"the",
"QueryRoot",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L34-L42 |
246,943 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | CoerceType.validate | def validate(self):
"""Ensure that the CoerceType block is valid."""
if not (isinstance(self.target_class, set) and
all(isinstance(x, six.string_types) for x in self.target_class)):
raise TypeError(u'Expected set of string target_class, got: {} {}'.format(
type(self.target_class).__name__, self.target_class))
for cls in self.target_class:
validate_safe_string(cls) | python | def validate(self):
if not (isinstance(self.target_class, set) and
all(isinstance(x, six.string_types) for x in self.target_class)):
raise TypeError(u'Expected set of string target_class, got: {} {}'.format(
type(self.target_class).__name__, self.target_class))
for cls in self.target_class:
validate_safe_string(cls) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"(",
"isinstance",
"(",
"self",
".",
"target_class",
",",
"set",
")",
"and",
"all",
"(",
"isinstance",
"(",
"x",
",",
"six",
".",
"string_types",
")",
"for",
"x",
"in",
"self",
".",
"target_class",
")",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected set of string target_class, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"target_class",
")",
".",
"__name__",
",",
"self",
".",
"target_class",
")",
")",
"for",
"cls",
"in",
"self",
".",
"target_class",
":",
"validate_safe_string",
"(",
"cls",
")"
] | Ensure that the CoerceType block is valid. | [
"Ensure",
"that",
"the",
"CoerceType",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L79-L87 |
246,944 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | ConstructResult.validate | def validate(self):
"""Ensure that the ConstructResult block is valid."""
if not isinstance(self.fields, dict):
raise TypeError(u'Expected dict fields, got: {} {}'.format(
type(self.fields).__name__, self.fields))
for key, value in six.iteritems(self.fields):
validate_safe_string(key)
if not isinstance(value, Expression):
raise TypeError(
u'Expected Expression values in the fields dict, got: '
u'{} -> {}'.format(key, value)) | python | def validate(self):
if not isinstance(self.fields, dict):
raise TypeError(u'Expected dict fields, got: {} {}'.format(
type(self.fields).__name__, self.fields))
for key, value in six.iteritems(self.fields):
validate_safe_string(key)
if not isinstance(value, Expression):
raise TypeError(
u'Expected Expression values in the fields dict, got: '
u'{} -> {}'.format(key, value)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"fields",
",",
"dict",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected dict fields, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"fields",
")",
".",
"__name__",
",",
"self",
".",
"fields",
")",
")",
"for",
"key",
",",
"value",
"in",
"six",
".",
"iteritems",
"(",
"self",
".",
"fields",
")",
":",
"validate_safe_string",
"(",
"key",
")",
"if",
"not",
"isinstance",
"(",
"value",
",",
"Expression",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected Expression values in the fields dict, got: '",
"u'{} -> {}'",
".",
"format",
"(",
"key",
",",
"value",
")",
")"
] | Ensure that the ConstructResult block is valid. | [
"Ensure",
"that",
"the",
"ConstructResult",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L120-L131 |
246,945 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | Filter.validate | def validate(self):
"""Ensure that the Filter block is valid."""
if not isinstance(self.predicate, Expression):
raise TypeError(u'Expected Expression predicate, got: {} {}'.format(
type(self.predicate).__name__, self.predicate)) | python | def validate(self):
if not isinstance(self.predicate, Expression):
raise TypeError(u'Expected Expression predicate, got: {} {}'.format(
type(self.predicate).__name__, self.predicate)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"predicate",
",",
"Expression",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected Expression predicate, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"predicate",
")",
".",
"__name__",
",",
"self",
".",
"predicate",
")",
")"
] | Ensure that the Filter block is valid. | [
"Ensure",
"that",
"the",
"Filter",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L174-L178 |
246,946 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | Backtrack.validate | def validate(self):
"""Ensure that the Backtrack block is valid."""
validate_marked_location(self.location)
if not isinstance(self.optional, bool):
raise TypeError(u'Expected bool optional, got: {} {}'.format(
type(self.optional).__name__, self.optional)) | python | def validate(self):
validate_marked_location(self.location)
if not isinstance(self.optional, bool):
raise TypeError(u'Expected bool optional, got: {} {}'.format(
type(self.optional).__name__, self.optional)) | [
"def",
"validate",
"(",
"self",
")",
":",
"validate_marked_location",
"(",
"self",
".",
"location",
")",
"if",
"not",
"isinstance",
"(",
"self",
".",
"optional",
",",
"bool",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected bool optional, got: {} {}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"optional",
")",
".",
"__name__",
",",
"self",
".",
"optional",
")",
")"
] | Ensure that the Backtrack block is valid. | [
"Ensure",
"that",
"the",
"Backtrack",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L395-L400 |
246,947 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | Backtrack.to_gremlin | def to_gremlin(self):
"""Return a unicode object with the Gremlin representation of this BasicBlock."""
self.validate()
if self.optional:
operation = u'optional'
else:
operation = u'back'
mark_name, _ = self.location.get_location_name()
return u'{operation}({mark_name})'.format(
operation=operation,
mark_name=safe_quoted_string(mark_name)) | python | def to_gremlin(self):
self.validate()
if self.optional:
operation = u'optional'
else:
operation = u'back'
mark_name, _ = self.location.get_location_name()
return u'{operation}({mark_name})'.format(
operation=operation,
mark_name=safe_quoted_string(mark_name)) | [
"def",
"to_gremlin",
"(",
"self",
")",
":",
"self",
".",
"validate",
"(",
")",
"if",
"self",
".",
"optional",
":",
"operation",
"=",
"u'optional'",
"else",
":",
"operation",
"=",
"u'back'",
"mark_name",
",",
"_",
"=",
"self",
".",
"location",
".",
"get_location_name",
"(",
")",
"return",
"u'{operation}({mark_name})'",
".",
"format",
"(",
"operation",
"=",
"operation",
",",
"mark_name",
"=",
"safe_quoted_string",
"(",
"mark_name",
")",
")"
] | Return a unicode object with the Gremlin representation of this BasicBlock. | [
"Return",
"a",
"unicode",
"object",
"with",
"the",
"Gremlin",
"representation",
"of",
"this",
"BasicBlock",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L402-L414 |
246,948 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/blocks.py | Fold.validate | def validate(self):
"""Ensure the Fold block is valid."""
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected a FoldScopeLocation for fold_scope_location, got: {} '
u'{}'.format(type(self.fold_scope_location), self.fold_scope_location)) | python | def validate(self):
if not isinstance(self.fold_scope_location, FoldScopeLocation):
raise TypeError(u'Expected a FoldScopeLocation for fold_scope_location, got: {} '
u'{}'.format(type(self.fold_scope_location), self.fold_scope_location)) | [
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"fold_scope_location",
",",
"FoldScopeLocation",
")",
":",
"raise",
"TypeError",
"(",
"u'Expected a FoldScopeLocation for fold_scope_location, got: {} '",
"u'{}'",
".",
"format",
"(",
"type",
"(",
"self",
".",
"fold_scope_location",
")",
",",
"self",
".",
"fold_scope_location",
")",
")"
] | Ensure the Fold block is valid. | [
"Ensure",
"the",
"Fold",
"block",
"is",
"valid",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/blocks.py#L446-L450 |
246,949 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | lower_ir | def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None):
"""Lower the IR blocks into a form that can be represented by a SQL query.
Args:
ir_blocks: list of IR blocks to lower into SQL-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
tree representation of IR blocks for recursive traversal by SQL backend.
"""
_validate_all_blocks_supported(ir_blocks, query_metadata_table)
construct_result = _get_construct_result(ir_blocks)
query_path_to_location_info = _map_query_path_to_location_info(query_metadata_table)
query_path_to_output_fields = _map_query_path_to_outputs(
construct_result, query_path_to_location_info)
block_index_to_location = _map_block_index_to_location(ir_blocks)
# perform lowering steps
ir_blocks = lower_unary_transformations(ir_blocks)
ir_blocks = lower_unsupported_metafield_expressions(ir_blocks)
# iteratively construct SqlTree
query_path_to_node = {}
query_path_to_filters = {}
tree_root = None
for index, block in enumerate(ir_blocks):
if isinstance(block, constants.SKIPPABLE_BLOCK_TYPES):
continue
location = block_index_to_location[index]
if isinstance(block, (blocks.QueryRoot,)):
query_path = location.query_path
if tree_root is not None:
raise AssertionError(
u'Encountered QueryRoot {} but tree root is already set to {} during '
u'construction of SQL query tree for IR blocks {} with query '
u'metadata table {}'.format(
block, tree_root, ir_blocks, query_metadata_table))
tree_root = SqlNode(block=block, query_path=query_path)
query_path_to_node[query_path] = tree_root
elif isinstance(block, blocks.Filter):
query_path_to_filters.setdefault(query_path, []).append(block)
else:
raise AssertionError(
u'Unsupported block {} unexpectedly passed validation for IR blocks '
u'{} with query metadata table {} .'.format(block, ir_blocks, query_metadata_table))
return SqlQueryTree(tree_root, query_path_to_location_info, query_path_to_output_fields,
query_path_to_filters, query_path_to_node) | python | def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None):
_validate_all_blocks_supported(ir_blocks, query_metadata_table)
construct_result = _get_construct_result(ir_blocks)
query_path_to_location_info = _map_query_path_to_location_info(query_metadata_table)
query_path_to_output_fields = _map_query_path_to_outputs(
construct_result, query_path_to_location_info)
block_index_to_location = _map_block_index_to_location(ir_blocks)
# perform lowering steps
ir_blocks = lower_unary_transformations(ir_blocks)
ir_blocks = lower_unsupported_metafield_expressions(ir_blocks)
# iteratively construct SqlTree
query_path_to_node = {}
query_path_to_filters = {}
tree_root = None
for index, block in enumerate(ir_blocks):
if isinstance(block, constants.SKIPPABLE_BLOCK_TYPES):
continue
location = block_index_to_location[index]
if isinstance(block, (blocks.QueryRoot,)):
query_path = location.query_path
if tree_root is not None:
raise AssertionError(
u'Encountered QueryRoot {} but tree root is already set to {} during '
u'construction of SQL query tree for IR blocks {} with query '
u'metadata table {}'.format(
block, tree_root, ir_blocks, query_metadata_table))
tree_root = SqlNode(block=block, query_path=query_path)
query_path_to_node[query_path] = tree_root
elif isinstance(block, blocks.Filter):
query_path_to_filters.setdefault(query_path, []).append(block)
else:
raise AssertionError(
u'Unsupported block {} unexpectedly passed validation for IR blocks '
u'{} with query metadata table {} .'.format(block, ir_blocks, query_metadata_table))
return SqlQueryTree(tree_root, query_path_to_location_info, query_path_to_output_fields,
query_path_to_filters, query_path_to_node) | [
"def",
"lower_ir",
"(",
"ir_blocks",
",",
"query_metadata_table",
",",
"type_equivalence_hints",
"=",
"None",
")",
":",
"_validate_all_blocks_supported",
"(",
"ir_blocks",
",",
"query_metadata_table",
")",
"construct_result",
"=",
"_get_construct_result",
"(",
"ir_blocks",
")",
"query_path_to_location_info",
"=",
"_map_query_path_to_location_info",
"(",
"query_metadata_table",
")",
"query_path_to_output_fields",
"=",
"_map_query_path_to_outputs",
"(",
"construct_result",
",",
"query_path_to_location_info",
")",
"block_index_to_location",
"=",
"_map_block_index_to_location",
"(",
"ir_blocks",
")",
"# perform lowering steps",
"ir_blocks",
"=",
"lower_unary_transformations",
"(",
"ir_blocks",
")",
"ir_blocks",
"=",
"lower_unsupported_metafield_expressions",
"(",
"ir_blocks",
")",
"# iteratively construct SqlTree",
"query_path_to_node",
"=",
"{",
"}",
"query_path_to_filters",
"=",
"{",
"}",
"tree_root",
"=",
"None",
"for",
"index",
",",
"block",
"in",
"enumerate",
"(",
"ir_blocks",
")",
":",
"if",
"isinstance",
"(",
"block",
",",
"constants",
".",
"SKIPPABLE_BLOCK_TYPES",
")",
":",
"continue",
"location",
"=",
"block_index_to_location",
"[",
"index",
"]",
"if",
"isinstance",
"(",
"block",
",",
"(",
"blocks",
".",
"QueryRoot",
",",
")",
")",
":",
"query_path",
"=",
"location",
".",
"query_path",
"if",
"tree_root",
"is",
"not",
"None",
":",
"raise",
"AssertionError",
"(",
"u'Encountered QueryRoot {} but tree root is already set to {} during '",
"u'construction of SQL query tree for IR blocks {} with query '",
"u'metadata table {}'",
".",
"format",
"(",
"block",
",",
"tree_root",
",",
"ir_blocks",
",",
"query_metadata_table",
")",
")",
"tree_root",
"=",
"SqlNode",
"(",
"block",
"=",
"block",
",",
"query_path",
"=",
"query_path",
")",
"query_path_to_node",
"[",
"query_path",
"]",
"=",
"tree_root",
"elif",
"isinstance",
"(",
"block",
",",
"blocks",
".",
"Filter",
")",
":",
"query_path_to_filters",
".",
"setdefault",
"(",
"query_path",
",",
"[",
"]",
")",
".",
"append",
"(",
"block",
")",
"else",
":",
"raise",
"AssertionError",
"(",
"u'Unsupported block {} unexpectedly passed validation for IR blocks '",
"u'{} with query metadata table {} .'",
".",
"format",
"(",
"block",
",",
"ir_blocks",
",",
"query_metadata_table",
")",
")",
"return",
"SqlQueryTree",
"(",
"tree_root",
",",
"query_path_to_location_info",
",",
"query_path_to_output_fields",
",",
"query_path_to_filters",
",",
"query_path_to_node",
")"
] | Lower the IR blocks into a form that can be represented by a SQL query.
Args:
ir_blocks: list of IR blocks to lower into SQL-compatible form
query_metadata_table: QueryMetadataTable object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
type_equivalence_hints: optional dict of GraphQL interface or type -> GraphQL union.
Used as a workaround for GraphQL's lack of support for
inheritance across "types" (i.e. non-interfaces), as well as a
workaround for Gremlin's total lack of inheritance-awareness.
The key-value pairs in the dict specify that the "key" type
is equivalent to the "value" type, i.e. that the GraphQL type or
interface in the key is the most-derived common supertype
of every GraphQL type in the "value" GraphQL union.
Recursive expansion of type equivalence hints is not performed,
and only type-level correctness of this argument is enforced.
See README.md for more details on everything this parameter does.
*****
Be very careful with this option, as bad input here will
lead to incorrect output queries being generated.
*****
Returns:
tree representation of IR blocks for recursive traversal by SQL backend. | [
"Lower",
"the",
"IR",
"blocks",
"into",
"a",
"form",
"that",
"can",
"be",
"represented",
"by",
"a",
"SQL",
"query",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L17-L81 |
246,950 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _validate_all_blocks_supported | def _validate_all_blocks_supported(ir_blocks, query_metadata_table):
"""Validate that all IR blocks and ConstructResult fields passed to the backend are supported.
Args:
ir_blocks: List[BasicBlock], IR blocks to validate.
query_metadata_table: QueryMetadataTable, object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
Raises:
NotImplementedError, if any block or ConstructResult field is unsupported.
"""
if len(ir_blocks) < 3:
raise AssertionError(
u'Unexpectedly attempting to validate IR blocks with fewer than 3 blocks. A minimal '
u'query is expected to have at least a QueryRoot, GlobalOperationsStart, and '
u'ConstructResult block. The query metadata table is {}.'.format(query_metadata_table))
construct_result = _get_construct_result(ir_blocks)
unsupported_blocks = []
unsupported_fields = []
for block in ir_blocks[:-1]:
if isinstance(block, constants.SUPPORTED_BLOCK_TYPES):
continue
if isinstance(block, constants.SKIPPABLE_BLOCK_TYPES):
continue
unsupported_blocks.append(block)
for field_name, field in six.iteritems(construct_result.fields):
if not isinstance(field, constants.SUPPORTED_OUTPUT_EXPRESSION_TYPES):
unsupported_fields.append((field_name, field))
elif field.location.field in constants.UNSUPPORTED_META_FIELDS:
unsupported_fields.append((field_name, field))
if len(unsupported_blocks) > 0 or len(unsupported_fields) > 0:
raise NotImplementedError(
u'Encountered unsupported blocks {} and unsupported fields {} during construction of '
u'SQL query tree for IR blocks {} with query metadata table {}.'.format(
unsupported_blocks, unsupported_fields, ir_blocks, query_metadata_table)) | python | def _validate_all_blocks_supported(ir_blocks, query_metadata_table):
if len(ir_blocks) < 3:
raise AssertionError(
u'Unexpectedly attempting to validate IR blocks with fewer than 3 blocks. A minimal '
u'query is expected to have at least a QueryRoot, GlobalOperationsStart, and '
u'ConstructResult block. The query metadata table is {}.'.format(query_metadata_table))
construct_result = _get_construct_result(ir_blocks)
unsupported_blocks = []
unsupported_fields = []
for block in ir_blocks[:-1]:
if isinstance(block, constants.SUPPORTED_BLOCK_TYPES):
continue
if isinstance(block, constants.SKIPPABLE_BLOCK_TYPES):
continue
unsupported_blocks.append(block)
for field_name, field in six.iteritems(construct_result.fields):
if not isinstance(field, constants.SUPPORTED_OUTPUT_EXPRESSION_TYPES):
unsupported_fields.append((field_name, field))
elif field.location.field in constants.UNSUPPORTED_META_FIELDS:
unsupported_fields.append((field_name, field))
if len(unsupported_blocks) > 0 or len(unsupported_fields) > 0:
raise NotImplementedError(
u'Encountered unsupported blocks {} and unsupported fields {} during construction of '
u'SQL query tree for IR blocks {} with query metadata table {}.'.format(
unsupported_blocks, unsupported_fields, ir_blocks, query_metadata_table)) | [
"def",
"_validate_all_blocks_supported",
"(",
"ir_blocks",
",",
"query_metadata_table",
")",
":",
"if",
"len",
"(",
"ir_blocks",
")",
"<",
"3",
":",
"raise",
"AssertionError",
"(",
"u'Unexpectedly attempting to validate IR blocks with fewer than 3 blocks. A minimal '",
"u'query is expected to have at least a QueryRoot, GlobalOperationsStart, and '",
"u'ConstructResult block. The query metadata table is {}.'",
".",
"format",
"(",
"query_metadata_table",
")",
")",
"construct_result",
"=",
"_get_construct_result",
"(",
"ir_blocks",
")",
"unsupported_blocks",
"=",
"[",
"]",
"unsupported_fields",
"=",
"[",
"]",
"for",
"block",
"in",
"ir_blocks",
"[",
":",
"-",
"1",
"]",
":",
"if",
"isinstance",
"(",
"block",
",",
"constants",
".",
"SUPPORTED_BLOCK_TYPES",
")",
":",
"continue",
"if",
"isinstance",
"(",
"block",
",",
"constants",
".",
"SKIPPABLE_BLOCK_TYPES",
")",
":",
"continue",
"unsupported_blocks",
".",
"append",
"(",
"block",
")",
"for",
"field_name",
",",
"field",
"in",
"six",
".",
"iteritems",
"(",
"construct_result",
".",
"fields",
")",
":",
"if",
"not",
"isinstance",
"(",
"field",
",",
"constants",
".",
"SUPPORTED_OUTPUT_EXPRESSION_TYPES",
")",
":",
"unsupported_fields",
".",
"append",
"(",
"(",
"field_name",
",",
"field",
")",
")",
"elif",
"field",
".",
"location",
".",
"field",
"in",
"constants",
".",
"UNSUPPORTED_META_FIELDS",
":",
"unsupported_fields",
".",
"append",
"(",
"(",
"field_name",
",",
"field",
")",
")",
"if",
"len",
"(",
"unsupported_blocks",
")",
">",
"0",
"or",
"len",
"(",
"unsupported_fields",
")",
">",
"0",
":",
"raise",
"NotImplementedError",
"(",
"u'Encountered unsupported blocks {} and unsupported fields {} during construction of '",
"u'SQL query tree for IR blocks {} with query metadata table {}.'",
".",
"format",
"(",
"unsupported_blocks",
",",
"unsupported_fields",
",",
"ir_blocks",
",",
"query_metadata_table",
")",
")"
] | Validate that all IR blocks and ConstructResult fields passed to the backend are supported.
Args:
ir_blocks: List[BasicBlock], IR blocks to validate.
query_metadata_table: QueryMetadataTable, object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
Raises:
NotImplementedError, if any block or ConstructResult field is unsupported. | [
"Validate",
"that",
"all",
"IR",
"blocks",
"and",
"ConstructResult",
"fields",
"passed",
"to",
"the",
"backend",
"are",
"supported",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L84-L121 |
246,951 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _get_construct_result | def _get_construct_result(ir_blocks):
"""Return the ConstructResult block from a list of IR blocks."""
last_block = ir_blocks[-1]
if not isinstance(last_block, blocks.ConstructResult):
raise AssertionError(
u'The last IR block {} for IR blocks {} was unexpectedly not '
u'a ConstructResult block.'.format(last_block, ir_blocks))
return last_block | python | def _get_construct_result(ir_blocks):
last_block = ir_blocks[-1]
if not isinstance(last_block, blocks.ConstructResult):
raise AssertionError(
u'The last IR block {} for IR blocks {} was unexpectedly not '
u'a ConstructResult block.'.format(last_block, ir_blocks))
return last_block | [
"def",
"_get_construct_result",
"(",
"ir_blocks",
")",
":",
"last_block",
"=",
"ir_blocks",
"[",
"-",
"1",
"]",
"if",
"not",
"isinstance",
"(",
"last_block",
",",
"blocks",
".",
"ConstructResult",
")",
":",
"raise",
"AssertionError",
"(",
"u'The last IR block {} for IR blocks {} was unexpectedly not '",
"u'a ConstructResult block.'",
".",
"format",
"(",
"last_block",
",",
"ir_blocks",
")",
")",
"return",
"last_block"
] | Return the ConstructResult block from a list of IR blocks. | [
"Return",
"the",
"ConstructResult",
"block",
"from",
"a",
"list",
"of",
"IR",
"blocks",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L124-L131 |
246,952 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _map_query_path_to_location_info | def _map_query_path_to_location_info(query_metadata_table):
"""Create a map from each query path to a LocationInfo at that path.
Args:
query_metadata_table: QueryMetadataTable, object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
Returns:
Dict[Tuple[str], LocationInfo], dictionary mapping query path to LocationInfo at that path.
"""
query_path_to_location_info = {}
for location, location_info in query_metadata_table.registered_locations:
if not isinstance(location, Location):
continue
if location.query_path in query_path_to_location_info:
# make sure the stored location information equals the new location information
# for the fields the SQL backend requires.
equivalent_location_info = query_path_to_location_info[location.query_path]
if not _location_infos_equal(location_info, equivalent_location_info):
raise AssertionError(
u'Differing LocationInfos at query_path {} between {} and {}. Expected '
u'parent_location.query_path, optional_scopes_depth, recursive_scopes_depth '
u'and types to be equal for LocationInfos sharing the same query path.'.format(
location.query_path, location_info, equivalent_location_info))
query_path_to_location_info[location.query_path] = location_info
return query_path_to_location_info | python | def _map_query_path_to_location_info(query_metadata_table):
query_path_to_location_info = {}
for location, location_info in query_metadata_table.registered_locations:
if not isinstance(location, Location):
continue
if location.query_path in query_path_to_location_info:
# make sure the stored location information equals the new location information
# for the fields the SQL backend requires.
equivalent_location_info = query_path_to_location_info[location.query_path]
if not _location_infos_equal(location_info, equivalent_location_info):
raise AssertionError(
u'Differing LocationInfos at query_path {} between {} and {}. Expected '
u'parent_location.query_path, optional_scopes_depth, recursive_scopes_depth '
u'and types to be equal for LocationInfos sharing the same query path.'.format(
location.query_path, location_info, equivalent_location_info))
query_path_to_location_info[location.query_path] = location_info
return query_path_to_location_info | [
"def",
"_map_query_path_to_location_info",
"(",
"query_metadata_table",
")",
":",
"query_path_to_location_info",
"=",
"{",
"}",
"for",
"location",
",",
"location_info",
"in",
"query_metadata_table",
".",
"registered_locations",
":",
"if",
"not",
"isinstance",
"(",
"location",
",",
"Location",
")",
":",
"continue",
"if",
"location",
".",
"query_path",
"in",
"query_path_to_location_info",
":",
"# make sure the stored location information equals the new location information",
"# for the fields the SQL backend requires.",
"equivalent_location_info",
"=",
"query_path_to_location_info",
"[",
"location",
".",
"query_path",
"]",
"if",
"not",
"_location_infos_equal",
"(",
"location_info",
",",
"equivalent_location_info",
")",
":",
"raise",
"AssertionError",
"(",
"u'Differing LocationInfos at query_path {} between {} and {}. Expected '",
"u'parent_location.query_path, optional_scopes_depth, recursive_scopes_depth '",
"u'and types to be equal for LocationInfos sharing the same query path.'",
".",
"format",
"(",
"location",
".",
"query_path",
",",
"location_info",
",",
"equivalent_location_info",
")",
")",
"query_path_to_location_info",
"[",
"location",
".",
"query_path",
"]",
"=",
"location_info",
"return",
"query_path_to_location_info"
] | Create a map from each query path to a LocationInfo at that path.
Args:
query_metadata_table: QueryMetadataTable, object containing all metadata collected during
query processing, including location metadata (e.g. which locations
are folded or optional).
Returns:
Dict[Tuple[str], LocationInfo], dictionary mapping query path to LocationInfo at that path. | [
"Create",
"a",
"map",
"from",
"each",
"query",
"path",
"to",
"a",
"LocationInfo",
"at",
"that",
"path",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L134-L161 |
246,953 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _location_infos_equal | def _location_infos_equal(left, right):
"""Return True if LocationInfo objects are equivalent for the SQL backend, False otherwise.
LocationInfo objects are considered equal for the SQL backend iff the optional scopes depth,
recursive scopes depth, types and parent query paths are equal.
Args:
left: LocationInfo, left location info object to compare.
right: LocationInfo, right location info object to compare.
Returns:
bool, True if LocationInfo objects equivalent, False otherwise.
"""
if not isinstance(left, LocationInfo) or not isinstance(right, LocationInfo):
raise AssertionError(
u'Unsupported LocationInfo comparison between types {} and {} '
u'with values {}, {}'.format(type(left), type(right), left, right))
optional_scopes_depth_equal = (left.optional_scopes_depth == right.optional_scopes_depth)
parent_query_paths_equal = (
(left.parent_location is None and right.parent_location is None) or
(left.parent_location.query_path == right.parent_location.query_path))
recursive_scopes_depths_equal = (left.recursive_scopes_depth == right.recursive_scopes_depth)
types_equal = left.type == right.type
return all([
optional_scopes_depth_equal,
parent_query_paths_equal,
recursive_scopes_depths_equal,
types_equal,
]) | python | def _location_infos_equal(left, right):
if not isinstance(left, LocationInfo) or not isinstance(right, LocationInfo):
raise AssertionError(
u'Unsupported LocationInfo comparison between types {} and {} '
u'with values {}, {}'.format(type(left), type(right), left, right))
optional_scopes_depth_equal = (left.optional_scopes_depth == right.optional_scopes_depth)
parent_query_paths_equal = (
(left.parent_location is None and right.parent_location is None) or
(left.parent_location.query_path == right.parent_location.query_path))
recursive_scopes_depths_equal = (left.recursive_scopes_depth == right.recursive_scopes_depth)
types_equal = left.type == right.type
return all([
optional_scopes_depth_equal,
parent_query_paths_equal,
recursive_scopes_depths_equal,
types_equal,
]) | [
"def",
"_location_infos_equal",
"(",
"left",
",",
"right",
")",
":",
"if",
"not",
"isinstance",
"(",
"left",
",",
"LocationInfo",
")",
"or",
"not",
"isinstance",
"(",
"right",
",",
"LocationInfo",
")",
":",
"raise",
"AssertionError",
"(",
"u'Unsupported LocationInfo comparison between types {} and {} '",
"u'with values {}, {}'",
".",
"format",
"(",
"type",
"(",
"left",
")",
",",
"type",
"(",
"right",
")",
",",
"left",
",",
"right",
")",
")",
"optional_scopes_depth_equal",
"=",
"(",
"left",
".",
"optional_scopes_depth",
"==",
"right",
".",
"optional_scopes_depth",
")",
"parent_query_paths_equal",
"=",
"(",
"(",
"left",
".",
"parent_location",
"is",
"None",
"and",
"right",
".",
"parent_location",
"is",
"None",
")",
"or",
"(",
"left",
".",
"parent_location",
".",
"query_path",
"==",
"right",
".",
"parent_location",
".",
"query_path",
")",
")",
"recursive_scopes_depths_equal",
"=",
"(",
"left",
".",
"recursive_scopes_depth",
"==",
"right",
".",
"recursive_scopes_depth",
")",
"types_equal",
"=",
"left",
".",
"type",
"==",
"right",
".",
"type",
"return",
"all",
"(",
"[",
"optional_scopes_depth_equal",
",",
"parent_query_paths_equal",
",",
"recursive_scopes_depths_equal",
",",
"types_equal",
",",
"]",
")"
] | Return True if LocationInfo objects are equivalent for the SQL backend, False otherwise.
LocationInfo objects are considered equal for the SQL backend iff the optional scopes depth,
recursive scopes depth, types and parent query paths are equal.
Args:
left: LocationInfo, left location info object to compare.
right: LocationInfo, right location info object to compare.
Returns:
bool, True if LocationInfo objects equivalent, False otherwise. | [
"Return",
"True",
"if",
"LocationInfo",
"objects",
"are",
"equivalent",
"for",
"the",
"SQL",
"backend",
"False",
"otherwise",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L164-L196 |
246,954 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _map_query_path_to_outputs | def _map_query_path_to_outputs(construct_result, query_path_to_location_info):
"""Assign the output fields of a ConstructResult block to their respective query_path."""
query_path_to_output_fields = {}
for output_name, field in six.iteritems(construct_result.fields):
field_name = field.location.field
output_query_path = field.location.query_path
output_field_info = constants.SqlOutput(
field_name=field_name,
output_name=output_name,
graphql_type=query_path_to_location_info[output_query_path].type)
output_field_mapping = query_path_to_output_fields.setdefault(output_query_path, [])
output_field_mapping.append(output_field_info)
return query_path_to_output_fields | python | def _map_query_path_to_outputs(construct_result, query_path_to_location_info):
query_path_to_output_fields = {}
for output_name, field in six.iteritems(construct_result.fields):
field_name = field.location.field
output_query_path = field.location.query_path
output_field_info = constants.SqlOutput(
field_name=field_name,
output_name=output_name,
graphql_type=query_path_to_location_info[output_query_path].type)
output_field_mapping = query_path_to_output_fields.setdefault(output_query_path, [])
output_field_mapping.append(output_field_info)
return query_path_to_output_fields | [
"def",
"_map_query_path_to_outputs",
"(",
"construct_result",
",",
"query_path_to_location_info",
")",
":",
"query_path_to_output_fields",
"=",
"{",
"}",
"for",
"output_name",
",",
"field",
"in",
"six",
".",
"iteritems",
"(",
"construct_result",
".",
"fields",
")",
":",
"field_name",
"=",
"field",
".",
"location",
".",
"field",
"output_query_path",
"=",
"field",
".",
"location",
".",
"query_path",
"output_field_info",
"=",
"constants",
".",
"SqlOutput",
"(",
"field_name",
"=",
"field_name",
",",
"output_name",
"=",
"output_name",
",",
"graphql_type",
"=",
"query_path_to_location_info",
"[",
"output_query_path",
"]",
".",
"type",
")",
"output_field_mapping",
"=",
"query_path_to_output_fields",
".",
"setdefault",
"(",
"output_query_path",
",",
"[",
"]",
")",
"output_field_mapping",
".",
"append",
"(",
"output_field_info",
")",
"return",
"query_path_to_output_fields"
] | Assign the output fields of a ConstructResult block to their respective query_path. | [
"Assign",
"the",
"output",
"fields",
"of",
"a",
"ConstructResult",
"block",
"to",
"their",
"respective",
"query_path",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L199-L211 |
246,955 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | _map_block_index_to_location | def _map_block_index_to_location(ir_blocks):
"""Associate each IR block with its corresponding location, by index."""
block_index_to_location = {}
# MarkLocation blocks occur after the blocks related to that location.
# The core approach here is to buffer blocks until their MarkLocation is encountered
# after which all buffered blocks can be associated with the encountered MarkLocation.location.
current_block_ixs = []
for num, ir_block in enumerate(ir_blocks):
if isinstance(ir_block, blocks.GlobalOperationsStart):
if len(current_block_ixs) > 0:
unassociated_blocks = [ir_blocks[ix] for ix in current_block_ixs]
raise AssertionError(
u'Unexpectedly encountered global operations before mapping blocks '
u'{} to their respective locations.'.format(unassociated_blocks))
break
current_block_ixs.append(num)
if isinstance(ir_block, blocks.MarkLocation):
for ix in current_block_ixs:
block_index_to_location[ix] = ir_block.location
current_block_ixs = []
return block_index_to_location | python | def _map_block_index_to_location(ir_blocks):
block_index_to_location = {}
# MarkLocation blocks occur after the blocks related to that location.
# The core approach here is to buffer blocks until their MarkLocation is encountered
# after which all buffered blocks can be associated with the encountered MarkLocation.location.
current_block_ixs = []
for num, ir_block in enumerate(ir_blocks):
if isinstance(ir_block, blocks.GlobalOperationsStart):
if len(current_block_ixs) > 0:
unassociated_blocks = [ir_blocks[ix] for ix in current_block_ixs]
raise AssertionError(
u'Unexpectedly encountered global operations before mapping blocks '
u'{} to their respective locations.'.format(unassociated_blocks))
break
current_block_ixs.append(num)
if isinstance(ir_block, blocks.MarkLocation):
for ix in current_block_ixs:
block_index_to_location[ix] = ir_block.location
current_block_ixs = []
return block_index_to_location | [
"def",
"_map_block_index_to_location",
"(",
"ir_blocks",
")",
":",
"block_index_to_location",
"=",
"{",
"}",
"# MarkLocation blocks occur after the blocks related to that location.",
"# The core approach here is to buffer blocks until their MarkLocation is encountered",
"# after which all buffered blocks can be associated with the encountered MarkLocation.location.",
"current_block_ixs",
"=",
"[",
"]",
"for",
"num",
",",
"ir_block",
"in",
"enumerate",
"(",
"ir_blocks",
")",
":",
"if",
"isinstance",
"(",
"ir_block",
",",
"blocks",
".",
"GlobalOperationsStart",
")",
":",
"if",
"len",
"(",
"current_block_ixs",
")",
">",
"0",
":",
"unassociated_blocks",
"=",
"[",
"ir_blocks",
"[",
"ix",
"]",
"for",
"ix",
"in",
"current_block_ixs",
"]",
"raise",
"AssertionError",
"(",
"u'Unexpectedly encountered global operations before mapping blocks '",
"u'{} to their respective locations.'",
".",
"format",
"(",
"unassociated_blocks",
")",
")",
"break",
"current_block_ixs",
".",
"append",
"(",
"num",
")",
"if",
"isinstance",
"(",
"ir_block",
",",
"blocks",
".",
"MarkLocation",
")",
":",
"for",
"ix",
"in",
"current_block_ixs",
":",
"block_index_to_location",
"[",
"ix",
"]",
"=",
"ir_block",
".",
"location",
"current_block_ixs",
"=",
"[",
"]",
"return",
"block_index_to_location"
] | Associate each IR block with its corresponding location, by index. | [
"Associate",
"each",
"IR",
"block",
"with",
"its",
"corresponding",
"location",
"by",
"index",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L214-L234 |
246,956 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | lower_unary_transformations | def lower_unary_transformations(ir_blocks):
"""Raise exception if any unary transformation block encountered."""
def visitor_fn(expression):
"""Raise error if current expression is a UnaryTransformation."""
if not isinstance(expression, expressions.UnaryTransformation):
return expression
raise NotImplementedError(
u'UnaryTransformation expression "{}" encountered with IR blocks {} is unsupported by '
u'the SQL backend.'.format(expression, ir_blocks)
)
new_ir_blocks = [
block.visit_and_update_expressions(visitor_fn)
for block in ir_blocks
]
return new_ir_blocks | python | def lower_unary_transformations(ir_blocks):
def visitor_fn(expression):
"""Raise error if current expression is a UnaryTransformation."""
if not isinstance(expression, expressions.UnaryTransformation):
return expression
raise NotImplementedError(
u'UnaryTransformation expression "{}" encountered with IR blocks {} is unsupported by '
u'the SQL backend.'.format(expression, ir_blocks)
)
new_ir_blocks = [
block.visit_and_update_expressions(visitor_fn)
for block in ir_blocks
]
return new_ir_blocks | [
"def",
"lower_unary_transformations",
"(",
"ir_blocks",
")",
":",
"def",
"visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Raise error if current expression is a UnaryTransformation.\"\"\"",
"if",
"not",
"isinstance",
"(",
"expression",
",",
"expressions",
".",
"UnaryTransformation",
")",
":",
"return",
"expression",
"raise",
"NotImplementedError",
"(",
"u'UnaryTransformation expression \"{}\" encountered with IR blocks {} is unsupported by '",
"u'the SQL backend.'",
".",
"format",
"(",
"expression",
",",
"ir_blocks",
")",
")",
"new_ir_blocks",
"=",
"[",
"block",
".",
"visit_and_update_expressions",
"(",
"visitor_fn",
")",
"for",
"block",
"in",
"ir_blocks",
"]",
"return",
"new_ir_blocks"
] | Raise exception if any unary transformation block encountered. | [
"Raise",
"exception",
"if",
"any",
"unary",
"transformation",
"block",
"encountered",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L237-L252 |
246,957 | kensho-technologies/graphql-compiler | graphql_compiler/compiler/ir_lowering_sql/__init__.py | lower_unsupported_metafield_expressions | def lower_unsupported_metafield_expressions(ir_blocks):
"""Raise exception if an unsupported metafield is encountered in any LocalField expression."""
def visitor_fn(expression):
"""Visitor function raising exception for any unsupported metafield."""
if not isinstance(expression, expressions.LocalField):
return expression
if expression.field_name not in constants.UNSUPPORTED_META_FIELDS:
return expression
raise NotImplementedError(
u'Encountered unsupported metafield {} in LocalField {} during construction of '
u'SQL query tree for IR blocks {}.'.format(
constants.UNSUPPORTED_META_FIELDS[expression.field_name], expression, ir_blocks))
new_ir_blocks = [
block.visit_and_update_expressions(visitor_fn)
for block in ir_blocks
]
return new_ir_blocks | python | def lower_unsupported_metafield_expressions(ir_blocks):
def visitor_fn(expression):
"""Visitor function raising exception for any unsupported metafield."""
if not isinstance(expression, expressions.LocalField):
return expression
if expression.field_name not in constants.UNSUPPORTED_META_FIELDS:
return expression
raise NotImplementedError(
u'Encountered unsupported metafield {} in LocalField {} during construction of '
u'SQL query tree for IR blocks {}.'.format(
constants.UNSUPPORTED_META_FIELDS[expression.field_name], expression, ir_blocks))
new_ir_blocks = [
block.visit_and_update_expressions(visitor_fn)
for block in ir_blocks
]
return new_ir_blocks | [
"def",
"lower_unsupported_metafield_expressions",
"(",
"ir_blocks",
")",
":",
"def",
"visitor_fn",
"(",
"expression",
")",
":",
"\"\"\"Visitor function raising exception for any unsupported metafield.\"\"\"",
"if",
"not",
"isinstance",
"(",
"expression",
",",
"expressions",
".",
"LocalField",
")",
":",
"return",
"expression",
"if",
"expression",
".",
"field_name",
"not",
"in",
"constants",
".",
"UNSUPPORTED_META_FIELDS",
":",
"return",
"expression",
"raise",
"NotImplementedError",
"(",
"u'Encountered unsupported metafield {} in LocalField {} during construction of '",
"u'SQL query tree for IR blocks {}.'",
".",
"format",
"(",
"constants",
".",
"UNSUPPORTED_META_FIELDS",
"[",
"expression",
".",
"field_name",
"]",
",",
"expression",
",",
"ir_blocks",
")",
")",
"new_ir_blocks",
"=",
"[",
"block",
".",
"visit_and_update_expressions",
"(",
"visitor_fn",
")",
"for",
"block",
"in",
"ir_blocks",
"]",
"return",
"new_ir_blocks"
] | Raise exception if an unsupported metafield is encountered in any LocalField expression. | [
"Raise",
"exception",
"if",
"an",
"unsupported",
"metafield",
"is",
"encountered",
"in",
"any",
"LocalField",
"expression",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/compiler/ir_lowering_sql/__init__.py#L255-L272 |
246,958 | kensho-technologies/graphql-compiler | graphql_compiler/__init__.py | get_graphql_schema_from_orientdb_schema_data | def get_graphql_schema_from_orientdb_schema_data(schema_data, class_to_field_type_overrides=None,
hidden_classes=None):
"""Construct a GraphQL schema from an OrientDB schema.
Args:
schema_data: list of dicts describing the classes in the OrientDB schema. The following
format is the way the data is structured in OrientDB 2. See
the README.md file for an example of how to query this data.
Each dict has the following string fields:
- name: string, the name of the class.
- superClasses (optional): list of strings, the name of the class's
superclasses.
- superClass (optional): string, the name of the class's superclass. May be
used instead of superClasses if there is only one
superClass. Used for backwards compatibility with
OrientDB.
- customFields (optional): dict, string -> string, data defined on the class
instead of instances of the class.
- abstract: bool, true if the class is abstract.
- properties: list of dicts, describing the class's properties.
Each property dictionary has the following string fields:
- name: string, the name of the property.
- type: int, builtin OrientDB type ID of the property.
See schema_properties.py for the mapping.
- linkedType (optional): int, if the property is a
collection of builtin OrientDB
objects, then it indicates their
type ID.
- linkedClass (optional): string, if the property is a
collection of class instances,
then it indicates the name of
the class. If class is an edge
class, and the field name is
either 'in' or 'out', then it
describes the name of an
endpoint of the edge.
- defaultValue: string, the textual representation of the
default value for the property, as
returned by OrientDB's schema
introspection code, e.g., '{}' for
the embedded set type. Note that if the
property is a collection type, it must
have a default value.
class_to_field_type_overrides: optional dict, class name -> {field name -> field type},
(string -> {string -> GraphQLType}). Used to override the
type of a field in the class where it's first defined and all
the class's subclasses.
hidden_classes: optional set of strings, classes to not include in the GraphQL schema.
Returns:
tuple of (GraphQL schema object, GraphQL type equivalence hints dict).
The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}).
"""
if class_to_field_type_overrides is None:
class_to_field_type_overrides = dict()
if hidden_classes is None:
hidden_classes = set()
schema_graph = SchemaGraph(schema_data)
return get_graphql_schema_from_schema_graph(schema_graph, class_to_field_type_overrides,
hidden_classes) | python | def get_graphql_schema_from_orientdb_schema_data(schema_data, class_to_field_type_overrides=None,
hidden_classes=None):
if class_to_field_type_overrides is None:
class_to_field_type_overrides = dict()
if hidden_classes is None:
hidden_classes = set()
schema_graph = SchemaGraph(schema_data)
return get_graphql_schema_from_schema_graph(schema_graph, class_to_field_type_overrides,
hidden_classes) | [
"def",
"get_graphql_schema_from_orientdb_schema_data",
"(",
"schema_data",
",",
"class_to_field_type_overrides",
"=",
"None",
",",
"hidden_classes",
"=",
"None",
")",
":",
"if",
"class_to_field_type_overrides",
"is",
"None",
":",
"class_to_field_type_overrides",
"=",
"dict",
"(",
")",
"if",
"hidden_classes",
"is",
"None",
":",
"hidden_classes",
"=",
"set",
"(",
")",
"schema_graph",
"=",
"SchemaGraph",
"(",
"schema_data",
")",
"return",
"get_graphql_schema_from_schema_graph",
"(",
"schema_graph",
",",
"class_to_field_type_overrides",
",",
"hidden_classes",
")"
] | Construct a GraphQL schema from an OrientDB schema.
Args:
schema_data: list of dicts describing the classes in the OrientDB schema. The following
format is the way the data is structured in OrientDB 2. See
the README.md file for an example of how to query this data.
Each dict has the following string fields:
- name: string, the name of the class.
- superClasses (optional): list of strings, the name of the class's
superclasses.
- superClass (optional): string, the name of the class's superclass. May be
used instead of superClasses if there is only one
superClass. Used for backwards compatibility with
OrientDB.
- customFields (optional): dict, string -> string, data defined on the class
instead of instances of the class.
- abstract: bool, true if the class is abstract.
- properties: list of dicts, describing the class's properties.
Each property dictionary has the following string fields:
- name: string, the name of the property.
- type: int, builtin OrientDB type ID of the property.
See schema_properties.py for the mapping.
- linkedType (optional): int, if the property is a
collection of builtin OrientDB
objects, then it indicates their
type ID.
- linkedClass (optional): string, if the property is a
collection of class instances,
then it indicates the name of
the class. If class is an edge
class, and the field name is
either 'in' or 'out', then it
describes the name of an
endpoint of the edge.
- defaultValue: string, the textual representation of the
default value for the property, as
returned by OrientDB's schema
introspection code, e.g., '{}' for
the embedded set type. Note that if the
property is a collection type, it must
have a default value.
class_to_field_type_overrides: optional dict, class name -> {field name -> field type},
(string -> {string -> GraphQLType}). Used to override the
type of a field in the class where it's first defined and all
the class's subclasses.
hidden_classes: optional set of strings, classes to not include in the GraphQL schema.
Returns:
tuple of (GraphQL schema object, GraphQL type equivalence hints dict).
The tuple is of type (GraphQLSchema, {GraphQLObjectType -> GraphQLUnionType}). | [
"Construct",
"a",
"GraphQL",
"schema",
"from",
"an",
"OrientDB",
"schema",
"."
] | f6079c6d10f64932f6b3af309b79bcea2123ca8f | https://github.com/kensho-technologies/graphql-compiler/blob/f6079c6d10f64932f6b3af309b79bcea2123ca8f/graphql_compiler/__init__.py#L139-L199 |
246,959 | slackapi/python-slack-events-api | slackeventsapi/__init__.py | SlackEventAdapter.start | def start(self, host='127.0.0.1', port=None, debug=False, **kwargs):
"""
Start the built in webserver, bound to the host and port you'd like.
Default host is `127.0.0.1` and port 8080.
:param host: The host you want to bind the build in webserver to
:param port: The port number you want the webserver to run on
:param debug: Set to `True` to enable debug level logging
:param kwargs: Additional arguments you'd like to pass to Flask
"""
self.server.run(host=host, port=port, debug=debug, **kwargs) | python | def start(self, host='127.0.0.1', port=None, debug=False, **kwargs):
self.server.run(host=host, port=port, debug=debug, **kwargs) | [
"def",
"start",
"(",
"self",
",",
"host",
"=",
"'127.0.0.1'",
",",
"port",
"=",
"None",
",",
"debug",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"server",
".",
"run",
"(",
"host",
"=",
"host",
",",
"port",
"=",
"port",
",",
"debug",
"=",
"debug",
",",
"*",
"*",
"kwargs",
")"
] | Start the built in webserver, bound to the host and port you'd like.
Default host is `127.0.0.1` and port 8080.
:param host: The host you want to bind the build in webserver to
:param port: The port number you want the webserver to run on
:param debug: Set to `True` to enable debug level logging
:param kwargs: Additional arguments you'd like to pass to Flask | [
"Start",
"the",
"built",
"in",
"webserver",
"bound",
"to",
"the",
"host",
"and",
"port",
"you",
"d",
"like",
".",
"Default",
"host",
"is",
"127",
".",
"0",
".",
"0",
".",
"1",
"and",
"port",
"8080",
"."
] | 1254d83181eb939f124a0e4746dafea7e14047c1 | https://github.com/slackapi/python-slack-events-api/blob/1254d83181eb939f124a0e4746dafea7e14047c1/slackeventsapi/__init__.py#L13-L23 |
246,960 | apragacz/django-rest-registration | rest_registration/api/views/login.py | login | def login(request):
'''
Logs in the user via given login and password.
'''
serializer_class = registration_settings.LOGIN_SERIALIZER_CLASS
serializer = serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.get_authenticated_user()
if not user:
raise BadRequest('Login or password invalid.')
extra_data = perform_login(request, user)
return get_ok_response('Login successful', extra_data=extra_data) | python | def login(request):
'''
Logs in the user via given login and password.
'''
serializer_class = registration_settings.LOGIN_SERIALIZER_CLASS
serializer = serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.get_authenticated_user()
if not user:
raise BadRequest('Login or password invalid.')
extra_data = perform_login(request, user)
return get_ok_response('Login successful', extra_data=extra_data) | [
"def",
"login",
"(",
"request",
")",
":",
"serializer_class",
"=",
"registration_settings",
".",
"LOGIN_SERIALIZER_CLASS",
"serializer",
"=",
"serializer_class",
"(",
"data",
"=",
"request",
".",
"data",
")",
"serializer",
".",
"is_valid",
"(",
"raise_exception",
"=",
"True",
")",
"user",
"=",
"serializer",
".",
"get_authenticated_user",
"(",
")",
"if",
"not",
"user",
":",
"raise",
"BadRequest",
"(",
"'Login or password invalid.'",
")",
"extra_data",
"=",
"perform_login",
"(",
"request",
",",
"user",
")",
"return",
"get_ok_response",
"(",
"'Login successful'",
",",
"extra_data",
"=",
"extra_data",
")"
] | Logs in the user via given login and password. | [
"Logs",
"in",
"the",
"user",
"via",
"given",
"login",
"and",
"password",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/rest_registration/api/views/login.py#L25-L39 |
246,961 | apragacz/django-rest-registration | rest_registration/api/views/login.py | logout | def logout(request):
'''
Logs out the user. returns an error if the user is not
authenticated.
'''
user = request.user
serializer = LogoutSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
if should_authenticate_session():
auth.logout(request)
if should_retrieve_token() and data['revoke_token']:
try:
user.auth_token.delete()
except Token.DoesNotExist:
raise BadRequest('Cannot remove non-existent token')
return get_ok_response('Logout successful') | python | def logout(request):
'''
Logs out the user. returns an error if the user is not
authenticated.
'''
user = request.user
serializer = LogoutSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
data = serializer.validated_data
if should_authenticate_session():
auth.logout(request)
if should_retrieve_token() and data['revoke_token']:
try:
user.auth_token.delete()
except Token.DoesNotExist:
raise BadRequest('Cannot remove non-existent token')
return get_ok_response('Logout successful') | [
"def",
"logout",
"(",
"request",
")",
":",
"user",
"=",
"request",
".",
"user",
"serializer",
"=",
"LogoutSerializer",
"(",
"data",
"=",
"request",
".",
"data",
")",
"serializer",
".",
"is_valid",
"(",
"raise_exception",
"=",
"True",
")",
"data",
"=",
"serializer",
".",
"validated_data",
"if",
"should_authenticate_session",
"(",
")",
":",
"auth",
".",
"logout",
"(",
"request",
")",
"if",
"should_retrieve_token",
"(",
")",
"and",
"data",
"[",
"'revoke_token'",
"]",
":",
"try",
":",
"user",
".",
"auth_token",
".",
"delete",
"(",
")",
"except",
"Token",
".",
"DoesNotExist",
":",
"raise",
"BadRequest",
"(",
"'Cannot remove non-existent token'",
")",
"return",
"get_ok_response",
"(",
"'Logout successful'",
")"
] | Logs out the user. returns an error if the user is not
authenticated. | [
"Logs",
"out",
"the",
"user",
".",
"returns",
"an",
"error",
"if",
"the",
"user",
"is",
"not",
"authenticated",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/rest_registration/api/views/login.py#L49-L67 |
246,962 | apragacz/django-rest-registration | rest_registration/utils/users.py | get_object_or_404 | def get_object_or_404(queryset, *filter_args, **filter_kwargs):
"""
Same as Django's standard shortcut, but make sure to also raise 404
if the filter_kwargs don't match the required types.
This function was copied from rest_framework.generics because of issue #36.
"""
try:
return _get_object_or_404(queryset, *filter_args, **filter_kwargs)
except (TypeError, ValueError, ValidationError):
raise Http404 | python | def get_object_or_404(queryset, *filter_args, **filter_kwargs):
try:
return _get_object_or_404(queryset, *filter_args, **filter_kwargs)
except (TypeError, ValueError, ValidationError):
raise Http404 | [
"def",
"get_object_or_404",
"(",
"queryset",
",",
"*",
"filter_args",
",",
"*",
"*",
"filter_kwargs",
")",
":",
"try",
":",
"return",
"_get_object_or_404",
"(",
"queryset",
",",
"*",
"filter_args",
",",
"*",
"*",
"filter_kwargs",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
",",
"ValidationError",
")",
":",
"raise",
"Http404"
] | Same as Django's standard shortcut, but make sure to also raise 404
if the filter_kwargs don't match the required types.
This function was copied from rest_framework.generics because of issue #36. | [
"Same",
"as",
"Django",
"s",
"standard",
"shortcut",
"but",
"make",
"sure",
"to",
"also",
"raise",
"404",
"if",
"the",
"filter_kwargs",
"don",
"t",
"match",
"the",
"required",
"types",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/rest_registration/utils/users.py#L13-L23 |
246,963 | apragacz/django-rest-registration | rest_registration/api/views/profile.py | profile | def profile(request):
'''
Get or set user profile.
'''
serializer_class = registration_settings.PROFILE_SERIALIZER_CLASS
if request.method in ['POST', 'PUT', 'PATCH']:
partial = request.method == 'PATCH'
serializer = serializer_class(
instance=request.user,
data=request.data,
partial=partial,
)
serializer.is_valid(raise_exception=True)
serializer.save()
else: # request.method == 'GET':
serializer = serializer_class(instance=request.user)
return Response(serializer.data) | python | def profile(request):
'''
Get or set user profile.
'''
serializer_class = registration_settings.PROFILE_SERIALIZER_CLASS
if request.method in ['POST', 'PUT', 'PATCH']:
partial = request.method == 'PATCH'
serializer = serializer_class(
instance=request.user,
data=request.data,
partial=partial,
)
serializer.is_valid(raise_exception=True)
serializer.save()
else: # request.method == 'GET':
serializer = serializer_class(instance=request.user)
return Response(serializer.data) | [
"def",
"profile",
"(",
"request",
")",
":",
"serializer_class",
"=",
"registration_settings",
".",
"PROFILE_SERIALIZER_CLASS",
"if",
"request",
".",
"method",
"in",
"[",
"'POST'",
",",
"'PUT'",
",",
"'PATCH'",
"]",
":",
"partial",
"=",
"request",
".",
"method",
"==",
"'PATCH'",
"serializer",
"=",
"serializer_class",
"(",
"instance",
"=",
"request",
".",
"user",
",",
"data",
"=",
"request",
".",
"data",
",",
"partial",
"=",
"partial",
",",
")",
"serializer",
".",
"is_valid",
"(",
"raise_exception",
"=",
"True",
")",
"serializer",
".",
"save",
"(",
")",
"else",
":",
"# request.method == 'GET':",
"serializer",
"=",
"serializer_class",
"(",
"instance",
"=",
"request",
".",
"user",
")",
"return",
"Response",
"(",
"serializer",
".",
"data",
")"
] | Get or set user profile. | [
"Get",
"or",
"set",
"user",
"profile",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/rest_registration/api/views/profile.py#L13-L30 |
246,964 | apragacz/django-rest-registration | rest_registration/api/views/register.py | register | def register(request):
'''
Register new user.
'''
serializer_class = registration_settings.REGISTER_SERIALIZER_CLASS
serializer = serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
kwargs = {}
if registration_settings.REGISTER_VERIFICATION_ENABLED:
verification_flag_field = get_user_setting('VERIFICATION_FLAG_FIELD')
kwargs[verification_flag_field] = False
email_field = get_user_setting('EMAIL_FIELD')
if (email_field not in serializer.validated_data
or not serializer.validated_data[email_field]):
raise BadRequest("User without email cannot be verified")
user = serializer.save(**kwargs)
output_serializer_class = registration_settings.REGISTER_OUTPUT_SERIALIZER_CLASS # noqa: E501
output_serializer = output_serializer_class(instance=user)
user_data = output_serializer.data
if registration_settings.REGISTER_VERIFICATION_ENABLED:
signer = RegisterSigner({
'user_id': user.pk,
}, request=request)
template_config = (
registration_settings.REGISTER_VERIFICATION_EMAIL_TEMPLATES)
send_verification_notification(user, signer, template_config)
return Response(user_data, status=status.HTTP_201_CREATED) | python | def register(request):
'''
Register new user.
'''
serializer_class = registration_settings.REGISTER_SERIALIZER_CLASS
serializer = serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
kwargs = {}
if registration_settings.REGISTER_VERIFICATION_ENABLED:
verification_flag_field = get_user_setting('VERIFICATION_FLAG_FIELD')
kwargs[verification_flag_field] = False
email_field = get_user_setting('EMAIL_FIELD')
if (email_field not in serializer.validated_data
or not serializer.validated_data[email_field]):
raise BadRequest("User without email cannot be verified")
user = serializer.save(**kwargs)
output_serializer_class = registration_settings.REGISTER_OUTPUT_SERIALIZER_CLASS # noqa: E501
output_serializer = output_serializer_class(instance=user)
user_data = output_serializer.data
if registration_settings.REGISTER_VERIFICATION_ENABLED:
signer = RegisterSigner({
'user_id': user.pk,
}, request=request)
template_config = (
registration_settings.REGISTER_VERIFICATION_EMAIL_TEMPLATES)
send_verification_notification(user, signer, template_config)
return Response(user_data, status=status.HTTP_201_CREATED) | [
"def",
"register",
"(",
"request",
")",
":",
"serializer_class",
"=",
"registration_settings",
".",
"REGISTER_SERIALIZER_CLASS",
"serializer",
"=",
"serializer_class",
"(",
"data",
"=",
"request",
".",
"data",
")",
"serializer",
".",
"is_valid",
"(",
"raise_exception",
"=",
"True",
")",
"kwargs",
"=",
"{",
"}",
"if",
"registration_settings",
".",
"REGISTER_VERIFICATION_ENABLED",
":",
"verification_flag_field",
"=",
"get_user_setting",
"(",
"'VERIFICATION_FLAG_FIELD'",
")",
"kwargs",
"[",
"verification_flag_field",
"]",
"=",
"False",
"email_field",
"=",
"get_user_setting",
"(",
"'EMAIL_FIELD'",
")",
"if",
"(",
"email_field",
"not",
"in",
"serializer",
".",
"validated_data",
"or",
"not",
"serializer",
".",
"validated_data",
"[",
"email_field",
"]",
")",
":",
"raise",
"BadRequest",
"(",
"\"User without email cannot be verified\"",
")",
"user",
"=",
"serializer",
".",
"save",
"(",
"*",
"*",
"kwargs",
")",
"output_serializer_class",
"=",
"registration_settings",
".",
"REGISTER_OUTPUT_SERIALIZER_CLASS",
"# noqa: E501",
"output_serializer",
"=",
"output_serializer_class",
"(",
"instance",
"=",
"user",
")",
"user_data",
"=",
"output_serializer",
".",
"data",
"if",
"registration_settings",
".",
"REGISTER_VERIFICATION_ENABLED",
":",
"signer",
"=",
"RegisterSigner",
"(",
"{",
"'user_id'",
":",
"user",
".",
"pk",
",",
"}",
",",
"request",
"=",
"request",
")",
"template_config",
"=",
"(",
"registration_settings",
".",
"REGISTER_VERIFICATION_EMAIL_TEMPLATES",
")",
"send_verification_notification",
"(",
"user",
",",
"signer",
",",
"template_config",
")",
"return",
"Response",
"(",
"user_data",
",",
"status",
"=",
"status",
".",
"HTTP_201_CREATED",
")"
] | Register new user. | [
"Register",
"new",
"user",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/rest_registration/api/views/register.py#L54-L86 |
246,965 | apragacz/django-rest-registration | rest_registration/api/views/register.py | verify_registration | def verify_registration(request):
"""
Verify registration via signature.
"""
user = process_verify_registration_data(request.data)
extra_data = None
if registration_settings.REGISTER_VERIFICATION_AUTO_LOGIN:
extra_data = perform_login(request, user)
return get_ok_response('User verified successfully', extra_data=extra_data) | python | def verify_registration(request):
user = process_verify_registration_data(request.data)
extra_data = None
if registration_settings.REGISTER_VERIFICATION_AUTO_LOGIN:
extra_data = perform_login(request, user)
return get_ok_response('User verified successfully', extra_data=extra_data) | [
"def",
"verify_registration",
"(",
"request",
")",
":",
"user",
"=",
"process_verify_registration_data",
"(",
"request",
".",
"data",
")",
"extra_data",
"=",
"None",
"if",
"registration_settings",
".",
"REGISTER_VERIFICATION_AUTO_LOGIN",
":",
"extra_data",
"=",
"perform_login",
"(",
"request",
",",
"user",
")",
"return",
"get_ok_response",
"(",
"'User verified successfully'",
",",
"extra_data",
"=",
"extra_data",
")"
] | Verify registration via signature. | [
"Verify",
"registration",
"via",
"signature",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/rest_registration/api/views/register.py#L98-L106 |
246,966 | apragacz/django-rest-registration | setup.py | get_requirements | def get_requirements(requirements_filepath):
'''
Return list of this package requirements via local filepath.
'''
requirements = []
with open(os.path.join(ROOT_DIR, requirements_filepath), 'rt') as f:
for line in f:
if line.startswith('#'):
continue
line = line.rstrip()
if not line:
continue
requirements.append(line)
return requirements | python | def get_requirements(requirements_filepath):
'''
Return list of this package requirements via local filepath.
'''
requirements = []
with open(os.path.join(ROOT_DIR, requirements_filepath), 'rt') as f:
for line in f:
if line.startswith('#'):
continue
line = line.rstrip()
if not line:
continue
requirements.append(line)
return requirements | [
"def",
"get_requirements",
"(",
"requirements_filepath",
")",
":",
"requirements",
"=",
"[",
"]",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"ROOT_DIR",
",",
"requirements_filepath",
")",
",",
"'rt'",
")",
"as",
"f",
":",
"for",
"line",
"in",
"f",
":",
"if",
"line",
".",
"startswith",
"(",
"'#'",
")",
":",
"continue",
"line",
"=",
"line",
".",
"rstrip",
"(",
")",
"if",
"not",
"line",
":",
"continue",
"requirements",
".",
"append",
"(",
"line",
")",
"return",
"requirements"
] | Return list of this package requirements via local filepath. | [
"Return",
"list",
"of",
"this",
"package",
"requirements",
"via",
"local",
"filepath",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/setup.py#L15-L28 |
246,967 | apragacz/django-rest-registration | rest_registration/api/views/reset_password.py | send_reset_password_link | def send_reset_password_link(request):
'''
Send email with reset password link.
'''
if not registration_settings.RESET_PASSWORD_VERIFICATION_ENABLED:
raise Http404()
serializer = SendResetPasswordLinkSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
login = serializer.validated_data['login']
user = None
for login_field in get_login_fields():
user = get_user_by_lookup_dict(
{login_field: login}, default=None, require_verified=False)
if user:
break
if not user:
raise UserNotFound()
signer = ResetPasswordSigner({
'user_id': user.pk,
}, request=request)
template_config = (
registration_settings.RESET_PASSWORD_VERIFICATION_EMAIL_TEMPLATES)
send_verification_notification(user, signer, template_config)
return get_ok_response('Reset link sent') | python | def send_reset_password_link(request):
'''
Send email with reset password link.
'''
if not registration_settings.RESET_PASSWORD_VERIFICATION_ENABLED:
raise Http404()
serializer = SendResetPasswordLinkSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
login = serializer.validated_data['login']
user = None
for login_field in get_login_fields():
user = get_user_by_lookup_dict(
{login_field: login}, default=None, require_verified=False)
if user:
break
if not user:
raise UserNotFound()
signer = ResetPasswordSigner({
'user_id': user.pk,
}, request=request)
template_config = (
registration_settings.RESET_PASSWORD_VERIFICATION_EMAIL_TEMPLATES)
send_verification_notification(user, signer, template_config)
return get_ok_response('Reset link sent') | [
"def",
"send_reset_password_link",
"(",
"request",
")",
":",
"if",
"not",
"registration_settings",
".",
"RESET_PASSWORD_VERIFICATION_ENABLED",
":",
"raise",
"Http404",
"(",
")",
"serializer",
"=",
"SendResetPasswordLinkSerializer",
"(",
"data",
"=",
"request",
".",
"data",
")",
"serializer",
".",
"is_valid",
"(",
"raise_exception",
"=",
"True",
")",
"login",
"=",
"serializer",
".",
"validated_data",
"[",
"'login'",
"]",
"user",
"=",
"None",
"for",
"login_field",
"in",
"get_login_fields",
"(",
")",
":",
"user",
"=",
"get_user_by_lookup_dict",
"(",
"{",
"login_field",
":",
"login",
"}",
",",
"default",
"=",
"None",
",",
"require_verified",
"=",
"False",
")",
"if",
"user",
":",
"break",
"if",
"not",
"user",
":",
"raise",
"UserNotFound",
"(",
")",
"signer",
"=",
"ResetPasswordSigner",
"(",
"{",
"'user_id'",
":",
"user",
".",
"pk",
",",
"}",
",",
"request",
"=",
"request",
")",
"template_config",
"=",
"(",
"registration_settings",
".",
"RESET_PASSWORD_VERIFICATION_EMAIL_TEMPLATES",
")",
"send_verification_notification",
"(",
"user",
",",
"signer",
",",
"template_config",
")",
"return",
"get_ok_response",
"(",
"'Reset link sent'",
")"
] | Send email with reset password link. | [
"Send",
"email",
"with",
"reset",
"password",
"link",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/rest_registration/api/views/reset_password.py#L61-L89 |
246,968 | apragacz/django-rest-registration | rest_registration/api/views/register_email.py | register_email | def register_email(request):
'''
Register new email.
'''
user = request.user
serializer = RegisterEmailSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
email = serializer.validated_data['email']
template_config = (
registration_settings.REGISTER_EMAIL_VERIFICATION_EMAIL_TEMPLATES)
if registration_settings.REGISTER_EMAIL_VERIFICATION_ENABLED:
signer = RegisterEmailSigner({
'user_id': user.pk,
'email': email,
}, request=request)
send_verification_notification(
user, signer, template_config, email=email)
else:
email_field = get_user_setting('EMAIL_FIELD')
setattr(user, email_field, email)
user.save()
return get_ok_response('Register email link email sent') | python | def register_email(request):
'''
Register new email.
'''
user = request.user
serializer = RegisterEmailSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
email = serializer.validated_data['email']
template_config = (
registration_settings.REGISTER_EMAIL_VERIFICATION_EMAIL_TEMPLATES)
if registration_settings.REGISTER_EMAIL_VERIFICATION_ENABLED:
signer = RegisterEmailSigner({
'user_id': user.pk,
'email': email,
}, request=request)
send_verification_notification(
user, signer, template_config, email=email)
else:
email_field = get_user_setting('EMAIL_FIELD')
setattr(user, email_field, email)
user.save()
return get_ok_response('Register email link email sent') | [
"def",
"register_email",
"(",
"request",
")",
":",
"user",
"=",
"request",
".",
"user",
"serializer",
"=",
"RegisterEmailSerializer",
"(",
"data",
"=",
"request",
".",
"data",
")",
"serializer",
".",
"is_valid",
"(",
"raise_exception",
"=",
"True",
")",
"email",
"=",
"serializer",
".",
"validated_data",
"[",
"'email'",
"]",
"template_config",
"=",
"(",
"registration_settings",
".",
"REGISTER_EMAIL_VERIFICATION_EMAIL_TEMPLATES",
")",
"if",
"registration_settings",
".",
"REGISTER_EMAIL_VERIFICATION_ENABLED",
":",
"signer",
"=",
"RegisterEmailSigner",
"(",
"{",
"'user_id'",
":",
"user",
".",
"pk",
",",
"'email'",
":",
"email",
",",
"}",
",",
"request",
"=",
"request",
")",
"send_verification_notification",
"(",
"user",
",",
"signer",
",",
"template_config",
",",
"email",
"=",
"email",
")",
"else",
":",
"email_field",
"=",
"get_user_setting",
"(",
"'EMAIL_FIELD'",
")",
"setattr",
"(",
"user",
",",
"email_field",
",",
"email",
")",
"user",
".",
"save",
"(",
")",
"return",
"get_ok_response",
"(",
"'Register email link email sent'",
")"
] | Register new email. | [
"Register",
"new",
"email",
"."
] | 7373571264dd567c2a73a97ff4c45b64f113605b | https://github.com/apragacz/django-rest-registration/blob/7373571264dd567c2a73a97ff4c45b64f113605b/rest_registration/api/views/register_email.py#L33-L58 |
246,969 | nschloe/matplotlib2tikz | matplotlib2tikz/axes.py | _is_colorbar_heuristic | def _is_colorbar_heuristic(obj):
"""Find out if the object is in fact a color bar.
"""
# TODO come up with something more accurate here
# Might help:
# TODO Are the colorbars exactly the l.collections.PolyCollection's?
try:
aspect = float(obj.get_aspect())
except ValueError:
# e.g., aspect == 'equal'
return False
# Assume that something is a colorbar if and only if the ratio is above 5.0
# and there are no ticks on the corresponding axis. This isn't always true,
# though: The ratio of a color can be freely adjusted by the aspect
# keyword, e.g.,
#
# plt.colorbar(im, aspect=5)
#
limit_ratio = 5.0
return (aspect >= limit_ratio and len(obj.get_xticks()) == 0) or (
aspect <= 1.0 / limit_ratio and len(obj.get_yticks()) == 0
) | python | def _is_colorbar_heuristic(obj):
# TODO come up with something more accurate here
# Might help:
# TODO Are the colorbars exactly the l.collections.PolyCollection's?
try:
aspect = float(obj.get_aspect())
except ValueError:
# e.g., aspect == 'equal'
return False
# Assume that something is a colorbar if and only if the ratio is above 5.0
# and there are no ticks on the corresponding axis. This isn't always true,
# though: The ratio of a color can be freely adjusted by the aspect
# keyword, e.g.,
#
# plt.colorbar(im, aspect=5)
#
limit_ratio = 5.0
return (aspect >= limit_ratio and len(obj.get_xticks()) == 0) or (
aspect <= 1.0 / limit_ratio and len(obj.get_yticks()) == 0
) | [
"def",
"_is_colorbar_heuristic",
"(",
"obj",
")",
":",
"# TODO come up with something more accurate here",
"# Might help:",
"# TODO Are the colorbars exactly the l.collections.PolyCollection's?",
"try",
":",
"aspect",
"=",
"float",
"(",
"obj",
".",
"get_aspect",
"(",
")",
")",
"except",
"ValueError",
":",
"# e.g., aspect == 'equal'",
"return",
"False",
"# Assume that something is a colorbar if and only if the ratio is above 5.0",
"# and there are no ticks on the corresponding axis. This isn't always true,",
"# though: The ratio of a color can be freely adjusted by the aspect",
"# keyword, e.g.,",
"#",
"# plt.colorbar(im, aspect=5)",
"#",
"limit_ratio",
"=",
"5.0",
"return",
"(",
"aspect",
">=",
"limit_ratio",
"and",
"len",
"(",
"obj",
".",
"get_xticks",
"(",
")",
")",
"==",
"0",
")",
"or",
"(",
"aspect",
"<=",
"1.0",
"/",
"limit_ratio",
"and",
"len",
"(",
"obj",
".",
"get_yticks",
"(",
")",
")",
"==",
"0",
")"
] | Find out if the object is in fact a color bar. | [
"Find",
"out",
"if",
"the",
"object",
"is",
"in",
"fact",
"a",
"color",
"bar",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/axes.py#L582-L605 |
246,970 | nschloe/matplotlib2tikz | matplotlib2tikz/axes.py | _mpl_cmap2pgf_cmap | def _mpl_cmap2pgf_cmap(cmap, data):
"""Converts a color map as given in matplotlib to a color map as
represented in PGFPlots.
"""
if isinstance(cmap, mpl.colors.LinearSegmentedColormap):
return _handle_linear_segmented_color_map(cmap, data)
assert isinstance(
cmap, mpl.colors.ListedColormap
), "Only LinearSegmentedColormap and ListedColormap are supported"
return _handle_listed_color_map(cmap, data) | python | def _mpl_cmap2pgf_cmap(cmap, data):
if isinstance(cmap, mpl.colors.LinearSegmentedColormap):
return _handle_linear_segmented_color_map(cmap, data)
assert isinstance(
cmap, mpl.colors.ListedColormap
), "Only LinearSegmentedColormap and ListedColormap are supported"
return _handle_listed_color_map(cmap, data) | [
"def",
"_mpl_cmap2pgf_cmap",
"(",
"cmap",
",",
"data",
")",
":",
"if",
"isinstance",
"(",
"cmap",
",",
"mpl",
".",
"colors",
".",
"LinearSegmentedColormap",
")",
":",
"return",
"_handle_linear_segmented_color_map",
"(",
"cmap",
",",
"data",
")",
"assert",
"isinstance",
"(",
"cmap",
",",
"mpl",
".",
"colors",
".",
"ListedColormap",
")",
",",
"\"Only LinearSegmentedColormap and ListedColormap are supported\"",
"return",
"_handle_listed_color_map",
"(",
"cmap",
",",
"data",
")"
] | Converts a color map as given in matplotlib to a color map as
represented in PGFPlots. | [
"Converts",
"a",
"color",
"map",
"as",
"given",
"in",
"matplotlib",
"to",
"a",
"color",
"map",
"as",
"represented",
"in",
"PGFPlots",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/axes.py#L608-L618 |
246,971 | nschloe/matplotlib2tikz | matplotlib2tikz/axes.py | _scale_to_int | def _scale_to_int(X, max_val=None):
"""
Scales the array X such that it contains only integers.
"""
if max_val is None:
X = X / _gcd_array(X)
else:
X = X / max(1 / max_val, _gcd_array(X))
return [int(entry) for entry in X] | python | def _scale_to_int(X, max_val=None):
if max_val is None:
X = X / _gcd_array(X)
else:
X = X / max(1 / max_val, _gcd_array(X))
return [int(entry) for entry in X] | [
"def",
"_scale_to_int",
"(",
"X",
",",
"max_val",
"=",
"None",
")",
":",
"if",
"max_val",
"is",
"None",
":",
"X",
"=",
"X",
"/",
"_gcd_array",
"(",
"X",
")",
"else",
":",
"X",
"=",
"X",
"/",
"max",
"(",
"1",
"/",
"max_val",
",",
"_gcd_array",
"(",
"X",
")",
")",
"return",
"[",
"int",
"(",
"entry",
")",
"for",
"entry",
"in",
"X",
"]"
] | Scales the array X such that it contains only integers. | [
"Scales",
"the",
"array",
"X",
"such",
"that",
"it",
"contains",
"only",
"integers",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/axes.py#L771-L780 |
246,972 | nschloe/matplotlib2tikz | matplotlib2tikz/axes.py | _gcd_array | def _gcd_array(X):
"""
Return the largest real value h such that all elements in x are integer
multiples of h.
"""
greatest_common_divisor = 0.0
for x in X:
greatest_common_divisor = _gcd(greatest_common_divisor, x)
return greatest_common_divisor | python | def _gcd_array(X):
greatest_common_divisor = 0.0
for x in X:
greatest_common_divisor = _gcd(greatest_common_divisor, x)
return greatest_common_divisor | [
"def",
"_gcd_array",
"(",
"X",
")",
":",
"greatest_common_divisor",
"=",
"0.0",
"for",
"x",
"in",
"X",
":",
"greatest_common_divisor",
"=",
"_gcd",
"(",
"greatest_common_divisor",
",",
"x",
")",
"return",
"greatest_common_divisor"
] | Return the largest real value h such that all elements in x are integer
multiples of h. | [
"Return",
"the",
"largest",
"real",
"value",
"h",
"such",
"that",
"all",
"elements",
"in",
"x",
"are",
"integer",
"multiples",
"of",
"h",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/axes.py#L783-L792 |
246,973 | nschloe/matplotlib2tikz | matplotlib2tikz/files.py | new_filename | def new_filename(data, file_kind, ext):
"""Returns an available filename.
:param file_kind: Name under which numbering is recorded, such as 'img' or
'table'.
:type file_kind: str
:param ext: Filename extension.
:type ext: str
:returns: (filename, rel_filepath) where filename is a path in the
filesystem and rel_filepath is the path to be used in the tex
code.
"""
nb_key = file_kind + "number"
if nb_key not in data.keys():
data[nb_key] = -1
if not data["override externals"]:
# Make sure not to overwrite anything.
file_exists = True
while file_exists:
data[nb_key] = data[nb_key] + 1
filename, name = _gen_filename(data, nb_key, ext)
file_exists = os.path.isfile(filename)
else:
data[nb_key] = data[nb_key] + 1
filename, name = _gen_filename(data, nb_key, ext)
if data["rel data path"]:
rel_filepath = posixpath.join(data["rel data path"], name)
else:
rel_filepath = name
return filename, rel_filepath | python | def new_filename(data, file_kind, ext):
nb_key = file_kind + "number"
if nb_key not in data.keys():
data[nb_key] = -1
if not data["override externals"]:
# Make sure not to overwrite anything.
file_exists = True
while file_exists:
data[nb_key] = data[nb_key] + 1
filename, name = _gen_filename(data, nb_key, ext)
file_exists = os.path.isfile(filename)
else:
data[nb_key] = data[nb_key] + 1
filename, name = _gen_filename(data, nb_key, ext)
if data["rel data path"]:
rel_filepath = posixpath.join(data["rel data path"], name)
else:
rel_filepath = name
return filename, rel_filepath | [
"def",
"new_filename",
"(",
"data",
",",
"file_kind",
",",
"ext",
")",
":",
"nb_key",
"=",
"file_kind",
"+",
"\"number\"",
"if",
"nb_key",
"not",
"in",
"data",
".",
"keys",
"(",
")",
":",
"data",
"[",
"nb_key",
"]",
"=",
"-",
"1",
"if",
"not",
"data",
"[",
"\"override externals\"",
"]",
":",
"# Make sure not to overwrite anything.",
"file_exists",
"=",
"True",
"while",
"file_exists",
":",
"data",
"[",
"nb_key",
"]",
"=",
"data",
"[",
"nb_key",
"]",
"+",
"1",
"filename",
",",
"name",
"=",
"_gen_filename",
"(",
"data",
",",
"nb_key",
",",
"ext",
")",
"file_exists",
"=",
"os",
".",
"path",
".",
"isfile",
"(",
"filename",
")",
"else",
":",
"data",
"[",
"nb_key",
"]",
"=",
"data",
"[",
"nb_key",
"]",
"+",
"1",
"filename",
",",
"name",
"=",
"_gen_filename",
"(",
"data",
",",
"nb_key",
",",
"ext",
")",
"if",
"data",
"[",
"\"rel data path\"",
"]",
":",
"rel_filepath",
"=",
"posixpath",
".",
"join",
"(",
"data",
"[",
"\"rel data path\"",
"]",
",",
"name",
")",
"else",
":",
"rel_filepath",
"=",
"name",
"return",
"filename",
",",
"rel_filepath"
] | Returns an available filename.
:param file_kind: Name under which numbering is recorded, such as 'img' or
'table'.
:type file_kind: str
:param ext: Filename extension.
:type ext: str
:returns: (filename, rel_filepath) where filename is a path in the
filesystem and rel_filepath is the path to be used in the tex
code. | [
"Returns",
"an",
"available",
"filename",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/files.py#L12-L47 |
246,974 | nschloe/matplotlib2tikz | matplotlib2tikz/path.py | mpl_linestyle2pgfplots_linestyle | def mpl_linestyle2pgfplots_linestyle(line_style, line=None):
"""Translates a line style of matplotlib to the corresponding style
in PGFPlots.
"""
# linestyle is a string or dash tuple. Legal string values are
# solid|dashed|dashdot|dotted. The dash tuple is (offset, onoffseq) where onoffseq
# is an even length tuple of on and off ink in points.
#
# solid: [(None, None), (None, None), ..., (None, None)]
# dashed: (0, (6.0, 6.0))
# dotted: (0, (1.0, 3.0))
# dashdot: (0, (3.0, 5.0, 1.0, 5.0))
if isinstance(line_style, tuple):
if line_style[0] is None:
return None
if len(line_style[1]) == 2:
return "dash pattern=on {}pt off {}pt".format(*line_style[1])
assert len(line_style[1]) == 4
return "dash pattern=on {}pt off {}pt on {}pt off {}pt".format(*line_style[1])
if isinstance(line, mpl.lines.Line2D) and line.is_dashed():
# see matplotlib.lines.Line2D.set_dashes
# get defaults
default_dashOffset, default_dashSeq = mpl.lines._get_dash_pattern(line_style)
# get dash format of line under test
dashSeq = line._us_dashSeq
dashOffset = line._us_dashOffset
lst = list()
if dashSeq != default_dashSeq:
# generate own dash sequence
format_string = " ".join(len(dashSeq) // 2 * ["on {}pt off {}pt"])
lst.append("dash pattern=" + format_string.format(*dashSeq))
if dashOffset != default_dashOffset:
lst.append("dash phase={}pt".format(dashOffset))
if len(lst) > 0:
return ", ".join(lst)
return {
"": None,
"None": None,
"none": None, # happens when using plt.boxplot()
"-": "solid",
"solid": "solid",
":": "dotted",
"--": "dashed",
"-.": "dash pattern=on 1pt off 3pt on 3pt off 3pt",
}[line_style] | python | def mpl_linestyle2pgfplots_linestyle(line_style, line=None):
# linestyle is a string or dash tuple. Legal string values are
# solid|dashed|dashdot|dotted. The dash tuple is (offset, onoffseq) where onoffseq
# is an even length tuple of on and off ink in points.
#
# solid: [(None, None), (None, None), ..., (None, None)]
# dashed: (0, (6.0, 6.0))
# dotted: (0, (1.0, 3.0))
# dashdot: (0, (3.0, 5.0, 1.0, 5.0))
if isinstance(line_style, tuple):
if line_style[0] is None:
return None
if len(line_style[1]) == 2:
return "dash pattern=on {}pt off {}pt".format(*line_style[1])
assert len(line_style[1]) == 4
return "dash pattern=on {}pt off {}pt on {}pt off {}pt".format(*line_style[1])
if isinstance(line, mpl.lines.Line2D) and line.is_dashed():
# see matplotlib.lines.Line2D.set_dashes
# get defaults
default_dashOffset, default_dashSeq = mpl.lines._get_dash_pattern(line_style)
# get dash format of line under test
dashSeq = line._us_dashSeq
dashOffset = line._us_dashOffset
lst = list()
if dashSeq != default_dashSeq:
# generate own dash sequence
format_string = " ".join(len(dashSeq) // 2 * ["on {}pt off {}pt"])
lst.append("dash pattern=" + format_string.format(*dashSeq))
if dashOffset != default_dashOffset:
lst.append("dash phase={}pt".format(dashOffset))
if len(lst) > 0:
return ", ".join(lst)
return {
"": None,
"None": None,
"none": None, # happens when using plt.boxplot()
"-": "solid",
"solid": "solid",
":": "dotted",
"--": "dashed",
"-.": "dash pattern=on 1pt off 3pt on 3pt off 3pt",
}[line_style] | [
"def",
"mpl_linestyle2pgfplots_linestyle",
"(",
"line_style",
",",
"line",
"=",
"None",
")",
":",
"# linestyle is a string or dash tuple. Legal string values are",
"# solid|dashed|dashdot|dotted. The dash tuple is (offset, onoffseq) where onoffseq",
"# is an even length tuple of on and off ink in points.",
"#",
"# solid: [(None, None), (None, None), ..., (None, None)]",
"# dashed: (0, (6.0, 6.0))",
"# dotted: (0, (1.0, 3.0))",
"# dashdot: (0, (3.0, 5.0, 1.0, 5.0))",
"if",
"isinstance",
"(",
"line_style",
",",
"tuple",
")",
":",
"if",
"line_style",
"[",
"0",
"]",
"is",
"None",
":",
"return",
"None",
"if",
"len",
"(",
"line_style",
"[",
"1",
"]",
")",
"==",
"2",
":",
"return",
"\"dash pattern=on {}pt off {}pt\"",
".",
"format",
"(",
"*",
"line_style",
"[",
"1",
"]",
")",
"assert",
"len",
"(",
"line_style",
"[",
"1",
"]",
")",
"==",
"4",
"return",
"\"dash pattern=on {}pt off {}pt on {}pt off {}pt\"",
".",
"format",
"(",
"*",
"line_style",
"[",
"1",
"]",
")",
"if",
"isinstance",
"(",
"line",
",",
"mpl",
".",
"lines",
".",
"Line2D",
")",
"and",
"line",
".",
"is_dashed",
"(",
")",
":",
"# see matplotlib.lines.Line2D.set_dashes",
"# get defaults",
"default_dashOffset",
",",
"default_dashSeq",
"=",
"mpl",
".",
"lines",
".",
"_get_dash_pattern",
"(",
"line_style",
")",
"# get dash format of line under test",
"dashSeq",
"=",
"line",
".",
"_us_dashSeq",
"dashOffset",
"=",
"line",
".",
"_us_dashOffset",
"lst",
"=",
"list",
"(",
")",
"if",
"dashSeq",
"!=",
"default_dashSeq",
":",
"# generate own dash sequence",
"format_string",
"=",
"\" \"",
".",
"join",
"(",
"len",
"(",
"dashSeq",
")",
"//",
"2",
"*",
"[",
"\"on {}pt off {}pt\"",
"]",
")",
"lst",
".",
"append",
"(",
"\"dash pattern=\"",
"+",
"format_string",
".",
"format",
"(",
"*",
"dashSeq",
")",
")",
"if",
"dashOffset",
"!=",
"default_dashOffset",
":",
"lst",
".",
"append",
"(",
"\"dash phase={}pt\"",
".",
"format",
"(",
"dashOffset",
")",
")",
"if",
"len",
"(",
"lst",
")",
">",
"0",
":",
"return",
"\", \"",
".",
"join",
"(",
"lst",
")",
"return",
"{",
"\"\"",
":",
"None",
",",
"\"None\"",
":",
"None",
",",
"\"none\"",
":",
"None",
",",
"# happens when using plt.boxplot()",
"\"-\"",
":",
"\"solid\"",
",",
"\"solid\"",
":",
"\"solid\"",
",",
"\":\"",
":",
"\"dotted\"",
",",
"\"--\"",
":",
"\"dashed\"",
",",
"\"-.\"",
":",
"\"dash pattern=on 1pt off 3pt on 3pt off 3pt\"",
",",
"}",
"[",
"line_style",
"]"
] | Translates a line style of matplotlib to the corresponding style
in PGFPlots. | [
"Translates",
"a",
"line",
"style",
"of",
"matplotlib",
"to",
"the",
"corresponding",
"style",
"in",
"PGFPlots",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/path.py#L296-L349 |
246,975 | nschloe/matplotlib2tikz | matplotlib2tikz/quadmesh.py | draw_quadmesh | def draw_quadmesh(data, obj):
"""Returns the PGFPlots code for an graphics environment holding a
rendering of the object.
"""
content = []
# Generate file name for current object
filename, rel_filepath = files.new_filename(data, "img", ".png")
# Get the dpi for rendering and store the original dpi of the figure
dpi = data["dpi"]
fig_dpi = obj.figure.get_dpi()
obj.figure.set_dpi(dpi)
# Render the object and save as png file
from matplotlib.backends.backend_agg import RendererAgg
cbox = obj.get_clip_box()
width = int(round(cbox.extents[2]))
height = int(round(cbox.extents[3]))
ren = RendererAgg(width, height, dpi)
obj.draw(ren)
# Generate a image from the render buffer
image = Image.frombuffer(
"RGBA", ren.get_canvas_width_height(), ren.buffer_rgba(), "raw", "RGBA", 0, 1
)
# Crop the image to the actual content (removing the the regions otherwise
# used for axes, etc.)
# 'image.crop' expects the crop box to specify the left, upper, right, and
# lower pixel. 'cbox.extents' gives the left, lower, right, and upper
# pixel.
box = (
int(round(cbox.extents[0])),
0,
int(round(cbox.extents[2])),
int(round(cbox.extents[3] - cbox.extents[1])),
)
cropped = image.crop(box)
cropped.save(filename)
# Restore the original dpi of the figure
obj.figure.set_dpi(fig_dpi)
# write the corresponding information to the TikZ file
extent = obj.axes.get_xlim() + obj.axes.get_ylim()
# Explicitly use \pgfimage as includegrapics command, as the default
# \includegraphics fails unexpectedly in some cases
ff = data["float format"]
content.append(
(
"\\addplot graphics [includegraphics cmd=\\pgfimage,"
"xmin=" + ff + ", xmax=" + ff + ", "
"ymin=" + ff + ", ymax=" + ff + "] {{{}}};\n"
).format(*(extent + (rel_filepath,)))
)
return data, content | python | def draw_quadmesh(data, obj):
content = []
# Generate file name for current object
filename, rel_filepath = files.new_filename(data, "img", ".png")
# Get the dpi for rendering and store the original dpi of the figure
dpi = data["dpi"]
fig_dpi = obj.figure.get_dpi()
obj.figure.set_dpi(dpi)
# Render the object and save as png file
from matplotlib.backends.backend_agg import RendererAgg
cbox = obj.get_clip_box()
width = int(round(cbox.extents[2]))
height = int(round(cbox.extents[3]))
ren = RendererAgg(width, height, dpi)
obj.draw(ren)
# Generate a image from the render buffer
image = Image.frombuffer(
"RGBA", ren.get_canvas_width_height(), ren.buffer_rgba(), "raw", "RGBA", 0, 1
)
# Crop the image to the actual content (removing the the regions otherwise
# used for axes, etc.)
# 'image.crop' expects the crop box to specify the left, upper, right, and
# lower pixel. 'cbox.extents' gives the left, lower, right, and upper
# pixel.
box = (
int(round(cbox.extents[0])),
0,
int(round(cbox.extents[2])),
int(round(cbox.extents[3] - cbox.extents[1])),
)
cropped = image.crop(box)
cropped.save(filename)
# Restore the original dpi of the figure
obj.figure.set_dpi(fig_dpi)
# write the corresponding information to the TikZ file
extent = obj.axes.get_xlim() + obj.axes.get_ylim()
# Explicitly use \pgfimage as includegrapics command, as the default
# \includegraphics fails unexpectedly in some cases
ff = data["float format"]
content.append(
(
"\\addplot graphics [includegraphics cmd=\\pgfimage,"
"xmin=" + ff + ", xmax=" + ff + ", "
"ymin=" + ff + ", ymax=" + ff + "] {{{}}};\n"
).format(*(extent + (rel_filepath,)))
)
return data, content | [
"def",
"draw_quadmesh",
"(",
"data",
",",
"obj",
")",
":",
"content",
"=",
"[",
"]",
"# Generate file name for current object",
"filename",
",",
"rel_filepath",
"=",
"files",
".",
"new_filename",
"(",
"data",
",",
"\"img\"",
",",
"\".png\"",
")",
"# Get the dpi for rendering and store the original dpi of the figure",
"dpi",
"=",
"data",
"[",
"\"dpi\"",
"]",
"fig_dpi",
"=",
"obj",
".",
"figure",
".",
"get_dpi",
"(",
")",
"obj",
".",
"figure",
".",
"set_dpi",
"(",
"dpi",
")",
"# Render the object and save as png file",
"from",
"matplotlib",
".",
"backends",
".",
"backend_agg",
"import",
"RendererAgg",
"cbox",
"=",
"obj",
".",
"get_clip_box",
"(",
")",
"width",
"=",
"int",
"(",
"round",
"(",
"cbox",
".",
"extents",
"[",
"2",
"]",
")",
")",
"height",
"=",
"int",
"(",
"round",
"(",
"cbox",
".",
"extents",
"[",
"3",
"]",
")",
")",
"ren",
"=",
"RendererAgg",
"(",
"width",
",",
"height",
",",
"dpi",
")",
"obj",
".",
"draw",
"(",
"ren",
")",
"# Generate a image from the render buffer",
"image",
"=",
"Image",
".",
"frombuffer",
"(",
"\"RGBA\"",
",",
"ren",
".",
"get_canvas_width_height",
"(",
")",
",",
"ren",
".",
"buffer_rgba",
"(",
")",
",",
"\"raw\"",
",",
"\"RGBA\"",
",",
"0",
",",
"1",
")",
"# Crop the image to the actual content (removing the the regions otherwise",
"# used for axes, etc.)",
"# 'image.crop' expects the crop box to specify the left, upper, right, and",
"# lower pixel. 'cbox.extents' gives the left, lower, right, and upper",
"# pixel.",
"box",
"=",
"(",
"int",
"(",
"round",
"(",
"cbox",
".",
"extents",
"[",
"0",
"]",
")",
")",
",",
"0",
",",
"int",
"(",
"round",
"(",
"cbox",
".",
"extents",
"[",
"2",
"]",
")",
")",
",",
"int",
"(",
"round",
"(",
"cbox",
".",
"extents",
"[",
"3",
"]",
"-",
"cbox",
".",
"extents",
"[",
"1",
"]",
")",
")",
",",
")",
"cropped",
"=",
"image",
".",
"crop",
"(",
"box",
")",
"cropped",
".",
"save",
"(",
"filename",
")",
"# Restore the original dpi of the figure",
"obj",
".",
"figure",
".",
"set_dpi",
"(",
"fig_dpi",
")",
"# write the corresponding information to the TikZ file",
"extent",
"=",
"obj",
".",
"axes",
".",
"get_xlim",
"(",
")",
"+",
"obj",
".",
"axes",
".",
"get_ylim",
"(",
")",
"# Explicitly use \\pgfimage as includegrapics command, as the default",
"# \\includegraphics fails unexpectedly in some cases",
"ff",
"=",
"data",
"[",
"\"float format\"",
"]",
"content",
".",
"append",
"(",
"(",
"\"\\\\addplot graphics [includegraphics cmd=\\\\pgfimage,\"",
"\"xmin=\"",
"+",
"ff",
"+",
"\", xmax=\"",
"+",
"ff",
"+",
"\", \"",
"\"ymin=\"",
"+",
"ff",
"+",
"\", ymax=\"",
"+",
"ff",
"+",
"\"] {{{}}};\\n\"",
")",
".",
"format",
"(",
"*",
"(",
"extent",
"+",
"(",
"rel_filepath",
",",
")",
")",
")",
")",
"return",
"data",
",",
"content"
] | Returns the PGFPlots code for an graphics environment holding a
rendering of the object. | [
"Returns",
"the",
"PGFPlots",
"code",
"for",
"an",
"graphics",
"environment",
"holding",
"a",
"rendering",
"of",
"the",
"object",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/quadmesh.py#L8-L66 |
246,976 | nschloe/matplotlib2tikz | matplotlib2tikz/color.py | mpl_color2xcolor | def mpl_color2xcolor(data, matplotlib_color):
"""Translates a matplotlib color specification into a proper LaTeX xcolor.
"""
# Convert it to RGBA.
my_col = numpy.array(mpl.colors.ColorConverter().to_rgba(matplotlib_color))
# If the alpha channel is exactly 0, then the color is really 'none'
# regardless of the RGB channels.
if my_col[-1] == 0.0:
return data, "none", my_col
xcol = None
# RGB values (as taken from xcolor.dtx):
available_colors = {
# List white first such that for gray values, the combination
# white!<x>!black is preferred over, e.g., gray!<y>!black. Note that
# the order of the dictionary is respected from Python 3.6 on.
"white": numpy.array([1, 1, 1]),
"lightgray": numpy.array([0.75, 0.75, 0.75]),
"gray": numpy.array([0.5, 0.5, 0.5]),
"darkgray": numpy.array([0.25, 0.25, 0.25]),
"black": numpy.array([0, 0, 0]),
#
"red": numpy.array([1, 0, 0]),
"green": numpy.array([0, 1, 0]),
"blue": numpy.array([0, 0, 1]),
"brown": numpy.array([0.75, 0.5, 0.25]),
"lime": numpy.array([0.75, 1, 0]),
"orange": numpy.array([1, 0.5, 0]),
"pink": numpy.array([1, 0.75, 0.75]),
"purple": numpy.array([0.75, 0, 0.25]),
"teal": numpy.array([0, 0.5, 0.5]),
"violet": numpy.array([0.5, 0, 0.5]),
# The colors cyan, magenta, yellow, and olive are also
# predefined by xcolor, but their RGB approximation of the
# native CMYK values is not very good. Don't use them here.
}
available_colors.update(data["custom colors"])
# Check if it exactly matches any of the colors already available.
# This case is actually treated below (alpha==1), but that loop
# may pick up combinations with black before finding the exact
# match. Hence, first check all colors.
for name, rgb in available_colors.items():
if all(my_col[:3] == rgb):
xcol = name
return data, xcol, my_col
# Check if my_col is a multiple of a predefined color and 'black'.
for name, rgb in available_colors.items():
if name == "black":
continue
if rgb[0] != 0.0:
alpha = my_col[0] / rgb[0]
elif rgb[1] != 0.0:
alpha = my_col[1] / rgb[1]
else:
assert rgb[2] != 0.0
alpha = my_col[2] / rgb[2]
# The cases 0.0 (my_col == black) and 1.0 (my_col == rgb) are
# already accounted for by checking in available_colors above.
if all(my_col[:3] == alpha * rgb) and 0.0 < alpha < 1.0:
xcol = name + ("!{}!black".format(alpha * 100))
return data, xcol, my_col
# Lookup failed, add it to the custom list.
xcol = "color" + str(len(data["custom colors"]))
data["custom colors"][xcol] = my_col[:3]
return data, xcol, my_col | python | def mpl_color2xcolor(data, matplotlib_color):
# Convert it to RGBA.
my_col = numpy.array(mpl.colors.ColorConverter().to_rgba(matplotlib_color))
# If the alpha channel is exactly 0, then the color is really 'none'
# regardless of the RGB channels.
if my_col[-1] == 0.0:
return data, "none", my_col
xcol = None
# RGB values (as taken from xcolor.dtx):
available_colors = {
# List white first such that for gray values, the combination
# white!<x>!black is preferred over, e.g., gray!<y>!black. Note that
# the order of the dictionary is respected from Python 3.6 on.
"white": numpy.array([1, 1, 1]),
"lightgray": numpy.array([0.75, 0.75, 0.75]),
"gray": numpy.array([0.5, 0.5, 0.5]),
"darkgray": numpy.array([0.25, 0.25, 0.25]),
"black": numpy.array([0, 0, 0]),
#
"red": numpy.array([1, 0, 0]),
"green": numpy.array([0, 1, 0]),
"blue": numpy.array([0, 0, 1]),
"brown": numpy.array([0.75, 0.5, 0.25]),
"lime": numpy.array([0.75, 1, 0]),
"orange": numpy.array([1, 0.5, 0]),
"pink": numpy.array([1, 0.75, 0.75]),
"purple": numpy.array([0.75, 0, 0.25]),
"teal": numpy.array([0, 0.5, 0.5]),
"violet": numpy.array([0.5, 0, 0.5]),
# The colors cyan, magenta, yellow, and olive are also
# predefined by xcolor, but their RGB approximation of the
# native CMYK values is not very good. Don't use them here.
}
available_colors.update(data["custom colors"])
# Check if it exactly matches any of the colors already available.
# This case is actually treated below (alpha==1), but that loop
# may pick up combinations with black before finding the exact
# match. Hence, first check all colors.
for name, rgb in available_colors.items():
if all(my_col[:3] == rgb):
xcol = name
return data, xcol, my_col
# Check if my_col is a multiple of a predefined color and 'black'.
for name, rgb in available_colors.items():
if name == "black":
continue
if rgb[0] != 0.0:
alpha = my_col[0] / rgb[0]
elif rgb[1] != 0.0:
alpha = my_col[1] / rgb[1]
else:
assert rgb[2] != 0.0
alpha = my_col[2] / rgb[2]
# The cases 0.0 (my_col == black) and 1.0 (my_col == rgb) are
# already accounted for by checking in available_colors above.
if all(my_col[:3] == alpha * rgb) and 0.0 < alpha < 1.0:
xcol = name + ("!{}!black".format(alpha * 100))
return data, xcol, my_col
# Lookup failed, add it to the custom list.
xcol = "color" + str(len(data["custom colors"]))
data["custom colors"][xcol] = my_col[:3]
return data, xcol, my_col | [
"def",
"mpl_color2xcolor",
"(",
"data",
",",
"matplotlib_color",
")",
":",
"# Convert it to RGBA.",
"my_col",
"=",
"numpy",
".",
"array",
"(",
"mpl",
".",
"colors",
".",
"ColorConverter",
"(",
")",
".",
"to_rgba",
"(",
"matplotlib_color",
")",
")",
"# If the alpha channel is exactly 0, then the color is really 'none'",
"# regardless of the RGB channels.",
"if",
"my_col",
"[",
"-",
"1",
"]",
"==",
"0.0",
":",
"return",
"data",
",",
"\"none\"",
",",
"my_col",
"xcol",
"=",
"None",
"# RGB values (as taken from xcolor.dtx):",
"available_colors",
"=",
"{",
"# List white first such that for gray values, the combination",
"# white!<x>!black is preferred over, e.g., gray!<y>!black. Note that",
"# the order of the dictionary is respected from Python 3.6 on.",
"\"white\"",
":",
"numpy",
".",
"array",
"(",
"[",
"1",
",",
"1",
",",
"1",
"]",
")",
",",
"\"lightgray\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0.75",
",",
"0.75",
",",
"0.75",
"]",
")",
",",
"\"gray\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0.5",
",",
"0.5",
",",
"0.5",
"]",
")",
",",
"\"darkgray\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0.25",
",",
"0.25",
",",
"0.25",
"]",
")",
",",
"\"black\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0",
",",
"0",
",",
"0",
"]",
")",
",",
"#",
"\"red\"",
":",
"numpy",
".",
"array",
"(",
"[",
"1",
",",
"0",
",",
"0",
"]",
")",
",",
"\"green\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0",
",",
"1",
",",
"0",
"]",
")",
",",
"\"blue\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0",
",",
"0",
",",
"1",
"]",
")",
",",
"\"brown\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0.75",
",",
"0.5",
",",
"0.25",
"]",
")",
",",
"\"lime\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0.75",
",",
"1",
",",
"0",
"]",
")",
",",
"\"orange\"",
":",
"numpy",
".",
"array",
"(",
"[",
"1",
",",
"0.5",
",",
"0",
"]",
")",
",",
"\"pink\"",
":",
"numpy",
".",
"array",
"(",
"[",
"1",
",",
"0.75",
",",
"0.75",
"]",
")",
",",
"\"purple\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0.75",
",",
"0",
",",
"0.25",
"]",
")",
",",
"\"teal\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0",
",",
"0.5",
",",
"0.5",
"]",
")",
",",
"\"violet\"",
":",
"numpy",
".",
"array",
"(",
"[",
"0.5",
",",
"0",
",",
"0.5",
"]",
")",
",",
"# The colors cyan, magenta, yellow, and olive are also",
"# predefined by xcolor, but their RGB approximation of the",
"# native CMYK values is not very good. Don't use them here.",
"}",
"available_colors",
".",
"update",
"(",
"data",
"[",
"\"custom colors\"",
"]",
")",
"# Check if it exactly matches any of the colors already available.",
"# This case is actually treated below (alpha==1), but that loop",
"# may pick up combinations with black before finding the exact",
"# match. Hence, first check all colors.",
"for",
"name",
",",
"rgb",
"in",
"available_colors",
".",
"items",
"(",
")",
":",
"if",
"all",
"(",
"my_col",
"[",
":",
"3",
"]",
"==",
"rgb",
")",
":",
"xcol",
"=",
"name",
"return",
"data",
",",
"xcol",
",",
"my_col",
"# Check if my_col is a multiple of a predefined color and 'black'.",
"for",
"name",
",",
"rgb",
"in",
"available_colors",
".",
"items",
"(",
")",
":",
"if",
"name",
"==",
"\"black\"",
":",
"continue",
"if",
"rgb",
"[",
"0",
"]",
"!=",
"0.0",
":",
"alpha",
"=",
"my_col",
"[",
"0",
"]",
"/",
"rgb",
"[",
"0",
"]",
"elif",
"rgb",
"[",
"1",
"]",
"!=",
"0.0",
":",
"alpha",
"=",
"my_col",
"[",
"1",
"]",
"/",
"rgb",
"[",
"1",
"]",
"else",
":",
"assert",
"rgb",
"[",
"2",
"]",
"!=",
"0.0",
"alpha",
"=",
"my_col",
"[",
"2",
"]",
"/",
"rgb",
"[",
"2",
"]",
"# The cases 0.0 (my_col == black) and 1.0 (my_col == rgb) are",
"# already accounted for by checking in available_colors above.",
"if",
"all",
"(",
"my_col",
"[",
":",
"3",
"]",
"==",
"alpha",
"*",
"rgb",
")",
"and",
"0.0",
"<",
"alpha",
"<",
"1.0",
":",
"xcol",
"=",
"name",
"+",
"(",
"\"!{}!black\"",
".",
"format",
"(",
"alpha",
"*",
"100",
")",
")",
"return",
"data",
",",
"xcol",
",",
"my_col",
"# Lookup failed, add it to the custom list.",
"xcol",
"=",
"\"color\"",
"+",
"str",
"(",
"len",
"(",
"data",
"[",
"\"custom colors\"",
"]",
")",
")",
"data",
"[",
"\"custom colors\"",
"]",
"[",
"xcol",
"]",
"=",
"my_col",
"[",
":",
"3",
"]",
"return",
"data",
",",
"xcol",
",",
"my_col"
] | Translates a matplotlib color specification into a proper LaTeX xcolor. | [
"Translates",
"a",
"matplotlib",
"color",
"specification",
"into",
"a",
"proper",
"LaTeX",
"xcolor",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/color.py#L9-L81 |
246,977 | nschloe/matplotlib2tikz | matplotlib2tikz/patch.py | draw_patch | def draw_patch(data, obj):
"""Return the PGFPlots code for patches.
"""
# Gather the draw options.
data, draw_options = mypath.get_draw_options(
data,
obj,
obj.get_edgecolor(),
obj.get_facecolor(),
obj.get_linestyle(),
obj.get_linewidth(),
)
if isinstance(obj, mpl.patches.Rectangle):
# rectangle specialization
return _draw_rectangle(data, obj, draw_options)
elif isinstance(obj, mpl.patches.Ellipse):
# ellipse specialization
return _draw_ellipse(data, obj, draw_options)
# regular patch
data, path_command, _, _ = mypath.draw_path(
data, obj.get_path(), draw_options=draw_options
)
return data, path_command | python | def draw_patch(data, obj):
# Gather the draw options.
data, draw_options = mypath.get_draw_options(
data,
obj,
obj.get_edgecolor(),
obj.get_facecolor(),
obj.get_linestyle(),
obj.get_linewidth(),
)
if isinstance(obj, mpl.patches.Rectangle):
# rectangle specialization
return _draw_rectangle(data, obj, draw_options)
elif isinstance(obj, mpl.patches.Ellipse):
# ellipse specialization
return _draw_ellipse(data, obj, draw_options)
# regular patch
data, path_command, _, _ = mypath.draw_path(
data, obj.get_path(), draw_options=draw_options
)
return data, path_command | [
"def",
"draw_patch",
"(",
"data",
",",
"obj",
")",
":",
"# Gather the draw options.",
"data",
",",
"draw_options",
"=",
"mypath",
".",
"get_draw_options",
"(",
"data",
",",
"obj",
",",
"obj",
".",
"get_edgecolor",
"(",
")",
",",
"obj",
".",
"get_facecolor",
"(",
")",
",",
"obj",
".",
"get_linestyle",
"(",
")",
",",
"obj",
".",
"get_linewidth",
"(",
")",
",",
")",
"if",
"isinstance",
"(",
"obj",
",",
"mpl",
".",
"patches",
".",
"Rectangle",
")",
":",
"# rectangle specialization",
"return",
"_draw_rectangle",
"(",
"data",
",",
"obj",
",",
"draw_options",
")",
"elif",
"isinstance",
"(",
"obj",
",",
"mpl",
".",
"patches",
".",
"Ellipse",
")",
":",
"# ellipse specialization",
"return",
"_draw_ellipse",
"(",
"data",
",",
"obj",
",",
"draw_options",
")",
"# regular patch",
"data",
",",
"path_command",
",",
"_",
",",
"_",
"=",
"mypath",
".",
"draw_path",
"(",
"data",
",",
"obj",
".",
"get_path",
"(",
")",
",",
"draw_options",
"=",
"draw_options",
")",
"return",
"data",
",",
"path_command"
] | Return the PGFPlots code for patches. | [
"Return",
"the",
"PGFPlots",
"code",
"for",
"patches",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/patch.py#L8-L32 |
246,978 | nschloe/matplotlib2tikz | matplotlib2tikz/patch.py | _draw_rectangle | def _draw_rectangle(data, obj, draw_options):
"""Return the PGFPlots code for rectangles.
"""
# Objects with labels are plot objects (from bar charts, etc). Even those without
# labels explicitly set have a label of "_nolegend_". Everything else should be
# skipped because they likely correspong to axis/legend objects which are handled by
# PGFPlots
label = obj.get_label()
if label == "":
return data, []
# Get actual label, bar charts by default only give rectangles labels of
# "_nolegend_". See <https://stackoverflow.com/q/35881290/353337>.
handles, labels = obj.axes.get_legend_handles_labels()
labelsFound = [
label for h, label in zip(handles, labels) if obj in h.get_children()
]
if len(labelsFound) == 1:
label = labelsFound[0]
left_lower_x = obj.get_x()
left_lower_y = obj.get_y()
ff = data["float format"]
cont = (
"\\draw[{}] (axis cs:" + ff + "," + ff + ") "
"rectangle (axis cs:" + ff + "," + ff + ");\n"
).format(
",".join(draw_options),
left_lower_x,
left_lower_y,
left_lower_x + obj.get_width(),
left_lower_y + obj.get_height(),
)
if label != "_nolegend_" and label not in data["rectangle_legends"]:
data["rectangle_legends"].add(label)
cont += "\\addlegendimage{{ybar,ybar legend,{}}};\n".format(
",".join(draw_options)
)
cont += "\\addlegendentry{{{}}}\n\n".format(label)
return data, cont | python | def _draw_rectangle(data, obj, draw_options):
# Objects with labels are plot objects (from bar charts, etc). Even those without
# labels explicitly set have a label of "_nolegend_". Everything else should be
# skipped because they likely correspong to axis/legend objects which are handled by
# PGFPlots
label = obj.get_label()
if label == "":
return data, []
# Get actual label, bar charts by default only give rectangles labels of
# "_nolegend_". See <https://stackoverflow.com/q/35881290/353337>.
handles, labels = obj.axes.get_legend_handles_labels()
labelsFound = [
label for h, label in zip(handles, labels) if obj in h.get_children()
]
if len(labelsFound) == 1:
label = labelsFound[0]
left_lower_x = obj.get_x()
left_lower_y = obj.get_y()
ff = data["float format"]
cont = (
"\\draw[{}] (axis cs:" + ff + "," + ff + ") "
"rectangle (axis cs:" + ff + "," + ff + ");\n"
).format(
",".join(draw_options),
left_lower_x,
left_lower_y,
left_lower_x + obj.get_width(),
left_lower_y + obj.get_height(),
)
if label != "_nolegend_" and label not in data["rectangle_legends"]:
data["rectangle_legends"].add(label)
cont += "\\addlegendimage{{ybar,ybar legend,{}}};\n".format(
",".join(draw_options)
)
cont += "\\addlegendentry{{{}}}\n\n".format(label)
return data, cont | [
"def",
"_draw_rectangle",
"(",
"data",
",",
"obj",
",",
"draw_options",
")",
":",
"# Objects with labels are plot objects (from bar charts, etc). Even those without",
"# labels explicitly set have a label of \"_nolegend_\". Everything else should be",
"# skipped because they likely correspong to axis/legend objects which are handled by",
"# PGFPlots",
"label",
"=",
"obj",
".",
"get_label",
"(",
")",
"if",
"label",
"==",
"\"\"",
":",
"return",
"data",
",",
"[",
"]",
"# Get actual label, bar charts by default only give rectangles labels of",
"# \"_nolegend_\". See <https://stackoverflow.com/q/35881290/353337>.",
"handles",
",",
"labels",
"=",
"obj",
".",
"axes",
".",
"get_legend_handles_labels",
"(",
")",
"labelsFound",
"=",
"[",
"label",
"for",
"h",
",",
"label",
"in",
"zip",
"(",
"handles",
",",
"labels",
")",
"if",
"obj",
"in",
"h",
".",
"get_children",
"(",
")",
"]",
"if",
"len",
"(",
"labelsFound",
")",
"==",
"1",
":",
"label",
"=",
"labelsFound",
"[",
"0",
"]",
"left_lower_x",
"=",
"obj",
".",
"get_x",
"(",
")",
"left_lower_y",
"=",
"obj",
".",
"get_y",
"(",
")",
"ff",
"=",
"data",
"[",
"\"float format\"",
"]",
"cont",
"=",
"(",
"\"\\\\draw[{}] (axis cs:\"",
"+",
"ff",
"+",
"\",\"",
"+",
"ff",
"+",
"\") \"",
"\"rectangle (axis cs:\"",
"+",
"ff",
"+",
"\",\"",
"+",
"ff",
"+",
"\");\\n\"",
")",
".",
"format",
"(",
"\",\"",
".",
"join",
"(",
"draw_options",
")",
",",
"left_lower_x",
",",
"left_lower_y",
",",
"left_lower_x",
"+",
"obj",
".",
"get_width",
"(",
")",
",",
"left_lower_y",
"+",
"obj",
".",
"get_height",
"(",
")",
",",
")",
"if",
"label",
"!=",
"\"_nolegend_\"",
"and",
"label",
"not",
"in",
"data",
"[",
"\"rectangle_legends\"",
"]",
":",
"data",
"[",
"\"rectangle_legends\"",
"]",
".",
"add",
"(",
"label",
")",
"cont",
"+=",
"\"\\\\addlegendimage{{ybar,ybar legend,{}}};\\n\"",
".",
"format",
"(",
"\",\"",
".",
"join",
"(",
"draw_options",
")",
")",
"cont",
"+=",
"\"\\\\addlegendentry{{{}}}\\n\\n\"",
".",
"format",
"(",
"label",
")",
"return",
"data",
",",
"cont"
] | Return the PGFPlots code for rectangles. | [
"Return",
"the",
"PGFPlots",
"code",
"for",
"rectangles",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/patch.py#L91-L131 |
246,979 | nschloe/matplotlib2tikz | matplotlib2tikz/patch.py | _draw_ellipse | def _draw_ellipse(data, obj, draw_options):
"""Return the PGFPlots code for ellipses.
"""
if isinstance(obj, mpl.patches.Circle):
# circle specialization
return _draw_circle(data, obj, draw_options)
x, y = obj.center
ff = data["float format"]
if obj.angle != 0:
fmt = "rotate around={{" + ff + ":(axis cs:" + ff + "," + ff + ")}}"
draw_options.append(fmt.format(obj.angle, x, y))
cont = (
"\\draw[{}] (axis cs:"
+ ff
+ ","
+ ff
+ ") ellipse ("
+ ff
+ " and "
+ ff
+ ");\n"
).format(",".join(draw_options), x, y, 0.5 * obj.width, 0.5 * obj.height)
return data, cont | python | def _draw_ellipse(data, obj, draw_options):
if isinstance(obj, mpl.patches.Circle):
# circle specialization
return _draw_circle(data, obj, draw_options)
x, y = obj.center
ff = data["float format"]
if obj.angle != 0:
fmt = "rotate around={{" + ff + ":(axis cs:" + ff + "," + ff + ")}}"
draw_options.append(fmt.format(obj.angle, x, y))
cont = (
"\\draw[{}] (axis cs:"
+ ff
+ ","
+ ff
+ ") ellipse ("
+ ff
+ " and "
+ ff
+ ");\n"
).format(",".join(draw_options), x, y, 0.5 * obj.width, 0.5 * obj.height)
return data, cont | [
"def",
"_draw_ellipse",
"(",
"data",
",",
"obj",
",",
"draw_options",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"mpl",
".",
"patches",
".",
"Circle",
")",
":",
"# circle specialization",
"return",
"_draw_circle",
"(",
"data",
",",
"obj",
",",
"draw_options",
")",
"x",
",",
"y",
"=",
"obj",
".",
"center",
"ff",
"=",
"data",
"[",
"\"float format\"",
"]",
"if",
"obj",
".",
"angle",
"!=",
"0",
":",
"fmt",
"=",
"\"rotate around={{\"",
"+",
"ff",
"+",
"\":(axis cs:\"",
"+",
"ff",
"+",
"\",\"",
"+",
"ff",
"+",
"\")}}\"",
"draw_options",
".",
"append",
"(",
"fmt",
".",
"format",
"(",
"obj",
".",
"angle",
",",
"x",
",",
"y",
")",
")",
"cont",
"=",
"(",
"\"\\\\draw[{}] (axis cs:\"",
"+",
"ff",
"+",
"\",\"",
"+",
"ff",
"+",
"\") ellipse (\"",
"+",
"ff",
"+",
"\" and \"",
"+",
"ff",
"+",
"\");\\n\"",
")",
".",
"format",
"(",
"\",\"",
".",
"join",
"(",
"draw_options",
")",
",",
"x",
",",
"y",
",",
"0.5",
"*",
"obj",
".",
"width",
",",
"0.5",
"*",
"obj",
".",
"height",
")",
"return",
"data",
",",
"cont"
] | Return the PGFPlots code for ellipses. | [
"Return",
"the",
"PGFPlots",
"code",
"for",
"ellipses",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/patch.py#L134-L158 |
246,980 | nschloe/matplotlib2tikz | matplotlib2tikz/patch.py | _draw_circle | def _draw_circle(data, obj, draw_options):
"""Return the PGFPlots code for circles.
"""
x, y = obj.center
ff = data["float format"]
cont = ("\\draw[{}] (axis cs:" + ff + "," + ff + ") circle (" + ff + ");\n").format(
",".join(draw_options), x, y, obj.get_radius()
)
return data, cont | python | def _draw_circle(data, obj, draw_options):
x, y = obj.center
ff = data["float format"]
cont = ("\\draw[{}] (axis cs:" + ff + "," + ff + ") circle (" + ff + ");\n").format(
",".join(draw_options), x, y, obj.get_radius()
)
return data, cont | [
"def",
"_draw_circle",
"(",
"data",
",",
"obj",
",",
"draw_options",
")",
":",
"x",
",",
"y",
"=",
"obj",
".",
"center",
"ff",
"=",
"data",
"[",
"\"float format\"",
"]",
"cont",
"=",
"(",
"\"\\\\draw[{}] (axis cs:\"",
"+",
"ff",
"+",
"\",\"",
"+",
"ff",
"+",
"\") circle (\"",
"+",
"ff",
"+",
"\");\\n\"",
")",
".",
"format",
"(",
"\",\"",
".",
"join",
"(",
"draw_options",
")",
",",
"x",
",",
"y",
",",
"obj",
".",
"get_radius",
"(",
")",
")",
"return",
"data",
",",
"cont"
] | Return the PGFPlots code for circles. | [
"Return",
"the",
"PGFPlots",
"code",
"for",
"circles",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/patch.py#L161-L169 |
246,981 | nschloe/matplotlib2tikz | matplotlib2tikz/image.py | draw_image | def draw_image(data, obj):
"""Returns the PGFPlots code for an image environment.
"""
content = []
filename, rel_filepath = files.new_filename(data, "img", ".png")
# store the image as in a file
img_array = obj.get_array()
dims = img_array.shape
if len(dims) == 2: # the values are given as one real number: look at cmap
clims = obj.get_clim()
mpl.pyplot.imsave(
fname=filename,
arr=img_array,
cmap=obj.get_cmap(),
vmin=clims[0],
vmax=clims[1],
origin=obj.origin,
)
else:
# RGB (+alpha) information at each point
assert len(dims) == 3 and dims[2] in [3, 4]
# convert to PIL image
if obj.origin == "lower":
img_array = numpy.flipud(img_array)
# Convert mpl image to PIL
image = PIL.Image.fromarray(numpy.uint8(img_array * 255))
# If the input image is PIL:
# image = PIL.Image.fromarray(img_array)
image.save(filename, origin=obj.origin)
# write the corresponding information to the TikZ file
extent = obj.get_extent()
# the format specification will only accept tuples
if not isinstance(extent, tuple):
extent = tuple(extent)
# Explicitly use \pgfimage as includegrapics command, as the default
# \includegraphics fails unexpectedly in some cases
ff = data["float format"]
content.append(
(
"\\addplot graphics [includegraphics cmd=\\pgfimage,"
"xmin=" + ff + ", xmax=" + ff + ", "
"ymin=" + ff + ", ymax=" + ff + "] {{{}}};\n"
).format(*(extent + (rel_filepath,)))
)
return data, content | python | def draw_image(data, obj):
content = []
filename, rel_filepath = files.new_filename(data, "img", ".png")
# store the image as in a file
img_array = obj.get_array()
dims = img_array.shape
if len(dims) == 2: # the values are given as one real number: look at cmap
clims = obj.get_clim()
mpl.pyplot.imsave(
fname=filename,
arr=img_array,
cmap=obj.get_cmap(),
vmin=clims[0],
vmax=clims[1],
origin=obj.origin,
)
else:
# RGB (+alpha) information at each point
assert len(dims) == 3 and dims[2] in [3, 4]
# convert to PIL image
if obj.origin == "lower":
img_array = numpy.flipud(img_array)
# Convert mpl image to PIL
image = PIL.Image.fromarray(numpy.uint8(img_array * 255))
# If the input image is PIL:
# image = PIL.Image.fromarray(img_array)
image.save(filename, origin=obj.origin)
# write the corresponding information to the TikZ file
extent = obj.get_extent()
# the format specification will only accept tuples
if not isinstance(extent, tuple):
extent = tuple(extent)
# Explicitly use \pgfimage as includegrapics command, as the default
# \includegraphics fails unexpectedly in some cases
ff = data["float format"]
content.append(
(
"\\addplot graphics [includegraphics cmd=\\pgfimage,"
"xmin=" + ff + ", xmax=" + ff + ", "
"ymin=" + ff + ", ymax=" + ff + "] {{{}}};\n"
).format(*(extent + (rel_filepath,)))
)
return data, content | [
"def",
"draw_image",
"(",
"data",
",",
"obj",
")",
":",
"content",
"=",
"[",
"]",
"filename",
",",
"rel_filepath",
"=",
"files",
".",
"new_filename",
"(",
"data",
",",
"\"img\"",
",",
"\".png\"",
")",
"# store the image as in a file",
"img_array",
"=",
"obj",
".",
"get_array",
"(",
")",
"dims",
"=",
"img_array",
".",
"shape",
"if",
"len",
"(",
"dims",
")",
"==",
"2",
":",
"# the values are given as one real number: look at cmap",
"clims",
"=",
"obj",
".",
"get_clim",
"(",
")",
"mpl",
".",
"pyplot",
".",
"imsave",
"(",
"fname",
"=",
"filename",
",",
"arr",
"=",
"img_array",
",",
"cmap",
"=",
"obj",
".",
"get_cmap",
"(",
")",
",",
"vmin",
"=",
"clims",
"[",
"0",
"]",
",",
"vmax",
"=",
"clims",
"[",
"1",
"]",
",",
"origin",
"=",
"obj",
".",
"origin",
",",
")",
"else",
":",
"# RGB (+alpha) information at each point",
"assert",
"len",
"(",
"dims",
")",
"==",
"3",
"and",
"dims",
"[",
"2",
"]",
"in",
"[",
"3",
",",
"4",
"]",
"# convert to PIL image",
"if",
"obj",
".",
"origin",
"==",
"\"lower\"",
":",
"img_array",
"=",
"numpy",
".",
"flipud",
"(",
"img_array",
")",
"# Convert mpl image to PIL",
"image",
"=",
"PIL",
".",
"Image",
".",
"fromarray",
"(",
"numpy",
".",
"uint8",
"(",
"img_array",
"*",
"255",
")",
")",
"# If the input image is PIL:",
"# image = PIL.Image.fromarray(img_array)",
"image",
".",
"save",
"(",
"filename",
",",
"origin",
"=",
"obj",
".",
"origin",
")",
"# write the corresponding information to the TikZ file",
"extent",
"=",
"obj",
".",
"get_extent",
"(",
")",
"# the format specification will only accept tuples",
"if",
"not",
"isinstance",
"(",
"extent",
",",
"tuple",
")",
":",
"extent",
"=",
"tuple",
"(",
"extent",
")",
"# Explicitly use \\pgfimage as includegrapics command, as the default",
"# \\includegraphics fails unexpectedly in some cases",
"ff",
"=",
"data",
"[",
"\"float format\"",
"]",
"content",
".",
"append",
"(",
"(",
"\"\\\\addplot graphics [includegraphics cmd=\\\\pgfimage,\"",
"\"xmin=\"",
"+",
"ff",
"+",
"\", xmax=\"",
"+",
"ff",
"+",
"\", \"",
"\"ymin=\"",
"+",
"ff",
"+",
"\", ymax=\"",
"+",
"ff",
"+",
"\"] {{{}}};\\n\"",
")",
".",
"format",
"(",
"*",
"(",
"extent",
"+",
"(",
"rel_filepath",
",",
")",
")",
")",
")",
"return",
"data",
",",
"content"
] | Returns the PGFPlots code for an image environment. | [
"Returns",
"the",
"PGFPlots",
"code",
"for",
"an",
"image",
"environment",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/image.py#L10-L64 |
246,982 | nschloe/matplotlib2tikz | matplotlib2tikz/util.py | get_legend_text | def get_legend_text(obj):
"""Check if line is in legend.
"""
leg = obj.axes.get_legend()
if leg is None:
return None
keys = [l.get_label() for l in leg.legendHandles if l is not None]
values = [l.get_text() for l in leg.texts]
label = obj.get_label()
d = dict(zip(keys, values))
if label in d:
return d[label]
return None | python | def get_legend_text(obj):
leg = obj.axes.get_legend()
if leg is None:
return None
keys = [l.get_label() for l in leg.legendHandles if l is not None]
values = [l.get_text() for l in leg.texts]
label = obj.get_label()
d = dict(zip(keys, values))
if label in d:
return d[label]
return None | [
"def",
"get_legend_text",
"(",
"obj",
")",
":",
"leg",
"=",
"obj",
".",
"axes",
".",
"get_legend",
"(",
")",
"if",
"leg",
"is",
"None",
":",
"return",
"None",
"keys",
"=",
"[",
"l",
".",
"get_label",
"(",
")",
"for",
"l",
"in",
"leg",
".",
"legendHandles",
"if",
"l",
"is",
"not",
"None",
"]",
"values",
"=",
"[",
"l",
".",
"get_text",
"(",
")",
"for",
"l",
"in",
"leg",
".",
"texts",
"]",
"label",
"=",
"obj",
".",
"get_label",
"(",
")",
"d",
"=",
"dict",
"(",
"zip",
"(",
"keys",
",",
"values",
")",
")",
"if",
"label",
"in",
"d",
":",
"return",
"d",
"[",
"label",
"]",
"return",
"None"
] | Check if line is in legend. | [
"Check",
"if",
"line",
"is",
"in",
"legend",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/util.py#L11-L26 |
246,983 | nschloe/matplotlib2tikz | matplotlib2tikz/save.py | _get_color_definitions | def _get_color_definitions(data):
"""Returns the list of custom color definitions for the TikZ file.
"""
definitions = []
fmt = "\\definecolor{{{}}}{{rgb}}{{" + ",".join(3 * [data["float format"]]) + "}}"
for name, rgb in data["custom colors"].items():
definitions.append(fmt.format(name, rgb[0], rgb[1], rgb[2]))
return definitions | python | def _get_color_definitions(data):
definitions = []
fmt = "\\definecolor{{{}}}{{rgb}}{{" + ",".join(3 * [data["float format"]]) + "}}"
for name, rgb in data["custom colors"].items():
definitions.append(fmt.format(name, rgb[0], rgb[1], rgb[2]))
return definitions | [
"def",
"_get_color_definitions",
"(",
"data",
")",
":",
"definitions",
"=",
"[",
"]",
"fmt",
"=",
"\"\\\\definecolor{{{}}}{{rgb}}{{\"",
"+",
"\",\"",
".",
"join",
"(",
"3",
"*",
"[",
"data",
"[",
"\"float format\"",
"]",
"]",
")",
"+",
"\"}}\"",
"for",
"name",
",",
"rgb",
"in",
"data",
"[",
"\"custom colors\"",
"]",
".",
"items",
"(",
")",
":",
"definitions",
".",
"append",
"(",
"fmt",
".",
"format",
"(",
"name",
",",
"rgb",
"[",
"0",
"]",
",",
"rgb",
"[",
"1",
"]",
",",
"rgb",
"[",
"2",
"]",
")",
")",
"return",
"definitions"
] | Returns the list of custom color definitions for the TikZ file. | [
"Returns",
"the",
"list",
"of",
"custom",
"color",
"definitions",
"for",
"the",
"TikZ",
"file",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/save.py#L283-L290 |
246,984 | nschloe/matplotlib2tikz | matplotlib2tikz/save.py | _print_pgfplot_libs_message | def _print_pgfplot_libs_message(data):
"""Prints message to screen indicating the use of PGFPlots and its
libraries."""
pgfplotslibs = ",".join(list(data["pgfplots libs"]))
tikzlibs = ",".join(list(data["tikz libs"]))
print(70 * "=")
print("Please add the following lines to your LaTeX preamble:\n")
print("\\usepackage[utf8]{inputenc}")
print("\\usepackage{fontspec} % This line only for XeLaTeX and LuaLaTeX")
print("\\usepackage{pgfplots}")
if tikzlibs:
print("\\usetikzlibrary{" + tikzlibs + "}")
if pgfplotslibs:
print("\\usepgfplotslibrary{" + pgfplotslibs + "}")
print(70 * "=")
return | python | def _print_pgfplot_libs_message(data):
pgfplotslibs = ",".join(list(data["pgfplots libs"]))
tikzlibs = ",".join(list(data["tikz libs"]))
print(70 * "=")
print("Please add the following lines to your LaTeX preamble:\n")
print("\\usepackage[utf8]{inputenc}")
print("\\usepackage{fontspec} % This line only for XeLaTeX and LuaLaTeX")
print("\\usepackage{pgfplots}")
if tikzlibs:
print("\\usetikzlibrary{" + tikzlibs + "}")
if pgfplotslibs:
print("\\usepgfplotslibrary{" + pgfplotslibs + "}")
print(70 * "=")
return | [
"def",
"_print_pgfplot_libs_message",
"(",
"data",
")",
":",
"pgfplotslibs",
"=",
"\",\"",
".",
"join",
"(",
"list",
"(",
"data",
"[",
"\"pgfplots libs\"",
"]",
")",
")",
"tikzlibs",
"=",
"\",\"",
".",
"join",
"(",
"list",
"(",
"data",
"[",
"\"tikz libs\"",
"]",
")",
")",
"print",
"(",
"70",
"*",
"\"=\"",
")",
"print",
"(",
"\"Please add the following lines to your LaTeX preamble:\\n\"",
")",
"print",
"(",
"\"\\\\usepackage[utf8]{inputenc}\"",
")",
"print",
"(",
"\"\\\\usepackage{fontspec} % This line only for XeLaTeX and LuaLaTeX\"",
")",
"print",
"(",
"\"\\\\usepackage{pgfplots}\"",
")",
"if",
"tikzlibs",
":",
"print",
"(",
"\"\\\\usetikzlibrary{\"",
"+",
"tikzlibs",
"+",
"\"}\"",
")",
"if",
"pgfplotslibs",
":",
"print",
"(",
"\"\\\\usepgfplotslibrary{\"",
"+",
"pgfplotslibs",
"+",
"\"}\"",
")",
"print",
"(",
"70",
"*",
"\"=\"",
")",
"return"
] | Prints message to screen indicating the use of PGFPlots and its
libraries. | [
"Prints",
"message",
"to",
"screen",
"indicating",
"the",
"use",
"of",
"PGFPlots",
"and",
"its",
"libraries",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/save.py#L293-L309 |
246,985 | nschloe/matplotlib2tikz | matplotlib2tikz/save.py | _ContentManager.extend | def extend(self, content, zorder):
""" Extends with a list and a z-order
"""
if zorder not in self._content:
self._content[zorder] = []
self._content[zorder].extend(content) | python | def extend(self, content, zorder):
if zorder not in self._content:
self._content[zorder] = []
self._content[zorder].extend(content) | [
"def",
"extend",
"(",
"self",
",",
"content",
",",
"zorder",
")",
":",
"if",
"zorder",
"not",
"in",
"self",
".",
"_content",
":",
"self",
".",
"_content",
"[",
"zorder",
"]",
"=",
"[",
"]",
"self",
".",
"_content",
"[",
"zorder",
"]",
".",
"extend",
"(",
"content",
")"
] | Extends with a list and a z-order | [
"Extends",
"with",
"a",
"list",
"and",
"a",
"z",
"-",
"order"
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/save.py#L322-L327 |
246,986 | nschloe/matplotlib2tikz | matplotlib2tikz/line2d.py | draw_line2d | def draw_line2d(data, obj):
"""Returns the PGFPlots code for an Line2D environment.
"""
content = []
addplot_options = []
# If line is of length 0, do nothing. Otherwise, an empty \addplot table will be
# created, which will be interpreted as an external data source in either the file
# '' or '.tex'. Instead, render nothing.
if len(obj.get_xdata()) == 0:
return data, []
# get the linewidth (in pt)
line_width = mypath.mpl_linewidth2pgfp_linewidth(data, obj.get_linewidth())
if line_width:
addplot_options.append(line_width)
# get line color
color = obj.get_color()
data, line_xcolor, _ = mycol.mpl_color2xcolor(data, color)
addplot_options.append(line_xcolor)
alpha = obj.get_alpha()
if alpha is not None:
addplot_options.append("opacity={}".format(alpha))
linestyle = mypath.mpl_linestyle2pgfplots_linestyle(obj.get_linestyle(), line=obj)
if linestyle is not None and linestyle != "solid":
addplot_options.append(linestyle)
marker_face_color = obj.get_markerfacecolor()
marker_edge_color = obj.get_markeredgecolor()
data, marker, extra_mark_options = _mpl_marker2pgfp_marker(
data, obj.get_marker(), marker_face_color
)
if marker:
_marker(
obj,
data,
marker,
addplot_options,
extra_mark_options,
marker_face_color,
marker_edge_color,
line_xcolor,
)
if marker and linestyle is None:
addplot_options.append("only marks")
# Check if a line is in a legend and forget it if not.
# Fixes <https://github.com/nschloe/matplotlib2tikz/issues/167>.
legend_text = get_legend_text(obj)
if legend_text is None and has_legend(obj.axes):
addplot_options.append("forget plot")
# process options
content.append("\\addplot ")
if addplot_options:
content.append("[{}]\n".format(", ".join(addplot_options)))
c, axis_options = _table(obj, data)
content += c
if legend_text is not None:
content.append("\\addlegendentry{{{}}}\n".format(legend_text))
return data, content | python | def draw_line2d(data, obj):
content = []
addplot_options = []
# If line is of length 0, do nothing. Otherwise, an empty \addplot table will be
# created, which will be interpreted as an external data source in either the file
# '' or '.tex'. Instead, render nothing.
if len(obj.get_xdata()) == 0:
return data, []
# get the linewidth (in pt)
line_width = mypath.mpl_linewidth2pgfp_linewidth(data, obj.get_linewidth())
if line_width:
addplot_options.append(line_width)
# get line color
color = obj.get_color()
data, line_xcolor, _ = mycol.mpl_color2xcolor(data, color)
addplot_options.append(line_xcolor)
alpha = obj.get_alpha()
if alpha is not None:
addplot_options.append("opacity={}".format(alpha))
linestyle = mypath.mpl_linestyle2pgfplots_linestyle(obj.get_linestyle(), line=obj)
if linestyle is not None and linestyle != "solid":
addplot_options.append(linestyle)
marker_face_color = obj.get_markerfacecolor()
marker_edge_color = obj.get_markeredgecolor()
data, marker, extra_mark_options = _mpl_marker2pgfp_marker(
data, obj.get_marker(), marker_face_color
)
if marker:
_marker(
obj,
data,
marker,
addplot_options,
extra_mark_options,
marker_face_color,
marker_edge_color,
line_xcolor,
)
if marker and linestyle is None:
addplot_options.append("only marks")
# Check if a line is in a legend and forget it if not.
# Fixes <https://github.com/nschloe/matplotlib2tikz/issues/167>.
legend_text = get_legend_text(obj)
if legend_text is None and has_legend(obj.axes):
addplot_options.append("forget plot")
# process options
content.append("\\addplot ")
if addplot_options:
content.append("[{}]\n".format(", ".join(addplot_options)))
c, axis_options = _table(obj, data)
content += c
if legend_text is not None:
content.append("\\addlegendentry{{{}}}\n".format(legend_text))
return data, content | [
"def",
"draw_line2d",
"(",
"data",
",",
"obj",
")",
":",
"content",
"=",
"[",
"]",
"addplot_options",
"=",
"[",
"]",
"# If line is of length 0, do nothing. Otherwise, an empty \\addplot table will be",
"# created, which will be interpreted as an external data source in either the file",
"# '' or '.tex'. Instead, render nothing.",
"if",
"len",
"(",
"obj",
".",
"get_xdata",
"(",
")",
")",
"==",
"0",
":",
"return",
"data",
",",
"[",
"]",
"# get the linewidth (in pt)",
"line_width",
"=",
"mypath",
".",
"mpl_linewidth2pgfp_linewidth",
"(",
"data",
",",
"obj",
".",
"get_linewidth",
"(",
")",
")",
"if",
"line_width",
":",
"addplot_options",
".",
"append",
"(",
"line_width",
")",
"# get line color",
"color",
"=",
"obj",
".",
"get_color",
"(",
")",
"data",
",",
"line_xcolor",
",",
"_",
"=",
"mycol",
".",
"mpl_color2xcolor",
"(",
"data",
",",
"color",
")",
"addplot_options",
".",
"append",
"(",
"line_xcolor",
")",
"alpha",
"=",
"obj",
".",
"get_alpha",
"(",
")",
"if",
"alpha",
"is",
"not",
"None",
":",
"addplot_options",
".",
"append",
"(",
"\"opacity={}\"",
".",
"format",
"(",
"alpha",
")",
")",
"linestyle",
"=",
"mypath",
".",
"mpl_linestyle2pgfplots_linestyle",
"(",
"obj",
".",
"get_linestyle",
"(",
")",
",",
"line",
"=",
"obj",
")",
"if",
"linestyle",
"is",
"not",
"None",
"and",
"linestyle",
"!=",
"\"solid\"",
":",
"addplot_options",
".",
"append",
"(",
"linestyle",
")",
"marker_face_color",
"=",
"obj",
".",
"get_markerfacecolor",
"(",
")",
"marker_edge_color",
"=",
"obj",
".",
"get_markeredgecolor",
"(",
")",
"data",
",",
"marker",
",",
"extra_mark_options",
"=",
"_mpl_marker2pgfp_marker",
"(",
"data",
",",
"obj",
".",
"get_marker",
"(",
")",
",",
"marker_face_color",
")",
"if",
"marker",
":",
"_marker",
"(",
"obj",
",",
"data",
",",
"marker",
",",
"addplot_options",
",",
"extra_mark_options",
",",
"marker_face_color",
",",
"marker_edge_color",
",",
"line_xcolor",
",",
")",
"if",
"marker",
"and",
"linestyle",
"is",
"None",
":",
"addplot_options",
".",
"append",
"(",
"\"only marks\"",
")",
"# Check if a line is in a legend and forget it if not.",
"# Fixes <https://github.com/nschloe/matplotlib2tikz/issues/167>.",
"legend_text",
"=",
"get_legend_text",
"(",
"obj",
")",
"if",
"legend_text",
"is",
"None",
"and",
"has_legend",
"(",
"obj",
".",
"axes",
")",
":",
"addplot_options",
".",
"append",
"(",
"\"forget plot\"",
")",
"# process options",
"content",
".",
"append",
"(",
"\"\\\\addplot \"",
")",
"if",
"addplot_options",
":",
"content",
".",
"append",
"(",
"\"[{}]\\n\"",
".",
"format",
"(",
"\", \"",
".",
"join",
"(",
"addplot_options",
")",
")",
")",
"c",
",",
"axis_options",
"=",
"_table",
"(",
"obj",
",",
"data",
")",
"content",
"+=",
"c",
"if",
"legend_text",
"is",
"not",
"None",
":",
"content",
".",
"append",
"(",
"\"\\\\addlegendentry{{{}}}\\n\"",
".",
"format",
"(",
"legend_text",
")",
")",
"return",
"data",
",",
"content"
] | Returns the PGFPlots code for an Line2D environment. | [
"Returns",
"the",
"PGFPlots",
"code",
"for",
"an",
"Line2D",
"environment",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/line2d.py#L18-L85 |
246,987 | nschloe/matplotlib2tikz | matplotlib2tikz/line2d.py | draw_linecollection | def draw_linecollection(data, obj):
"""Returns Pgfplots code for a number of patch objects.
"""
content = []
edgecolors = obj.get_edgecolors()
linestyles = obj.get_linestyles()
linewidths = obj.get_linewidths()
paths = obj.get_paths()
for i, path in enumerate(paths):
color = edgecolors[i] if i < len(edgecolors) else edgecolors[0]
style = linestyles[i] if i < len(linestyles) else linestyles[0]
width = linewidths[i] if i < len(linewidths) else linewidths[0]
data, options = mypath.get_draw_options(data, obj, color, None, style, width)
# TODO what about masks?
data, cont, _, _ = mypath.draw_path(
data, path, draw_options=options, simplify=False
)
content.append(cont + "\n")
return data, content | python | def draw_linecollection(data, obj):
content = []
edgecolors = obj.get_edgecolors()
linestyles = obj.get_linestyles()
linewidths = obj.get_linewidths()
paths = obj.get_paths()
for i, path in enumerate(paths):
color = edgecolors[i] if i < len(edgecolors) else edgecolors[0]
style = linestyles[i] if i < len(linestyles) else linestyles[0]
width = linewidths[i] if i < len(linewidths) else linewidths[0]
data, options = mypath.get_draw_options(data, obj, color, None, style, width)
# TODO what about masks?
data, cont, _, _ = mypath.draw_path(
data, path, draw_options=options, simplify=False
)
content.append(cont + "\n")
return data, content | [
"def",
"draw_linecollection",
"(",
"data",
",",
"obj",
")",
":",
"content",
"=",
"[",
"]",
"edgecolors",
"=",
"obj",
".",
"get_edgecolors",
"(",
")",
"linestyles",
"=",
"obj",
".",
"get_linestyles",
"(",
")",
"linewidths",
"=",
"obj",
".",
"get_linewidths",
"(",
")",
"paths",
"=",
"obj",
".",
"get_paths",
"(",
")",
"for",
"i",
",",
"path",
"in",
"enumerate",
"(",
"paths",
")",
":",
"color",
"=",
"edgecolors",
"[",
"i",
"]",
"if",
"i",
"<",
"len",
"(",
"edgecolors",
")",
"else",
"edgecolors",
"[",
"0",
"]",
"style",
"=",
"linestyles",
"[",
"i",
"]",
"if",
"i",
"<",
"len",
"(",
"linestyles",
")",
"else",
"linestyles",
"[",
"0",
"]",
"width",
"=",
"linewidths",
"[",
"i",
"]",
"if",
"i",
"<",
"len",
"(",
"linewidths",
")",
"else",
"linewidths",
"[",
"0",
"]",
"data",
",",
"options",
"=",
"mypath",
".",
"get_draw_options",
"(",
"data",
",",
"obj",
",",
"color",
",",
"None",
",",
"style",
",",
"width",
")",
"# TODO what about masks?",
"data",
",",
"cont",
",",
"_",
",",
"_",
"=",
"mypath",
".",
"draw_path",
"(",
"data",
",",
"path",
",",
"draw_options",
"=",
"options",
",",
"simplify",
"=",
"False",
")",
"content",
".",
"append",
"(",
"cont",
"+",
"\"\\n\"",
")",
"return",
"data",
",",
"content"
] | Returns Pgfplots code for a number of patch objects. | [
"Returns",
"Pgfplots",
"code",
"for",
"a",
"number",
"of",
"patch",
"objects",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/line2d.py#L88-L111 |
246,988 | nschloe/matplotlib2tikz | matplotlib2tikz/line2d.py | _mpl_marker2pgfp_marker | def _mpl_marker2pgfp_marker(data, mpl_marker, marker_face_color):
"""Translates a marker style of matplotlib to the corresponding style
in PGFPlots.
"""
# try default list
try:
pgfplots_marker = _MP_MARKER2PGF_MARKER[mpl_marker]
except KeyError:
pass
else:
if (marker_face_color is not None) and pgfplots_marker == "o":
pgfplots_marker = "*"
data["tikz libs"].add("plotmarks")
marker_options = None
return (data, pgfplots_marker, marker_options)
# try plotmarks list
try:
data["tikz libs"].add("plotmarks")
pgfplots_marker, marker_options = _MP_MARKER2PLOTMARKS[mpl_marker]
except KeyError:
# There's no equivalent for the pixel marker (,) in Pgfplots.
pass
else:
if (
marker_face_color is not None
and (
not isinstance(marker_face_color, str)
or marker_face_color.lower() != "none"
)
and pgfplots_marker not in ["|", "-", "asterisk", "star"]
):
pgfplots_marker += "*"
return (data, pgfplots_marker, marker_options)
return data, None, None | python | def _mpl_marker2pgfp_marker(data, mpl_marker, marker_face_color):
# try default list
try:
pgfplots_marker = _MP_MARKER2PGF_MARKER[mpl_marker]
except KeyError:
pass
else:
if (marker_face_color is not None) and pgfplots_marker == "o":
pgfplots_marker = "*"
data["tikz libs"].add("plotmarks")
marker_options = None
return (data, pgfplots_marker, marker_options)
# try plotmarks list
try:
data["tikz libs"].add("plotmarks")
pgfplots_marker, marker_options = _MP_MARKER2PLOTMARKS[mpl_marker]
except KeyError:
# There's no equivalent for the pixel marker (,) in Pgfplots.
pass
else:
if (
marker_face_color is not None
and (
not isinstance(marker_face_color, str)
or marker_face_color.lower() != "none"
)
and pgfplots_marker not in ["|", "-", "asterisk", "star"]
):
pgfplots_marker += "*"
return (data, pgfplots_marker, marker_options)
return data, None, None | [
"def",
"_mpl_marker2pgfp_marker",
"(",
"data",
",",
"mpl_marker",
",",
"marker_face_color",
")",
":",
"# try default list",
"try",
":",
"pgfplots_marker",
"=",
"_MP_MARKER2PGF_MARKER",
"[",
"mpl_marker",
"]",
"except",
"KeyError",
":",
"pass",
"else",
":",
"if",
"(",
"marker_face_color",
"is",
"not",
"None",
")",
"and",
"pgfplots_marker",
"==",
"\"o\"",
":",
"pgfplots_marker",
"=",
"\"*\"",
"data",
"[",
"\"tikz libs\"",
"]",
".",
"add",
"(",
"\"plotmarks\"",
")",
"marker_options",
"=",
"None",
"return",
"(",
"data",
",",
"pgfplots_marker",
",",
"marker_options",
")",
"# try plotmarks list",
"try",
":",
"data",
"[",
"\"tikz libs\"",
"]",
".",
"add",
"(",
"\"plotmarks\"",
")",
"pgfplots_marker",
",",
"marker_options",
"=",
"_MP_MARKER2PLOTMARKS",
"[",
"mpl_marker",
"]",
"except",
"KeyError",
":",
"# There's no equivalent for the pixel marker (,) in Pgfplots.",
"pass",
"else",
":",
"if",
"(",
"marker_face_color",
"is",
"not",
"None",
"and",
"(",
"not",
"isinstance",
"(",
"marker_face_color",
",",
"str",
")",
"or",
"marker_face_color",
".",
"lower",
"(",
")",
"!=",
"\"none\"",
")",
"and",
"pgfplots_marker",
"not",
"in",
"[",
"\"|\"",
",",
"\"-\"",
",",
"\"asterisk\"",
",",
"\"star\"",
"]",
")",
":",
"pgfplots_marker",
"+=",
"\"*\"",
"return",
"(",
"data",
",",
"pgfplots_marker",
",",
"marker_options",
")",
"return",
"data",
",",
"None",
",",
"None"
] | Translates a marker style of matplotlib to the corresponding style
in PGFPlots. | [
"Translates",
"a",
"marker",
"style",
"of",
"matplotlib",
"to",
"the",
"corresponding",
"style",
"in",
"PGFPlots",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/line2d.py#L147-L182 |
246,989 | nschloe/matplotlib2tikz | matplotlib2tikz/text.py | draw_text | def draw_text(data, obj):
"""Paints text on the graph.
"""
content = []
properties = []
style = []
if isinstance(obj, mpl.text.Annotation):
_annotation(obj, data, content)
# 1: coordinates
# 2: properties (shapes, rotation, etc)
# 3: text style
# 4: the text
# -------1--------2---3--4--
pos = obj.get_position()
# from .util import transform_to_data_coordinates
# pos = transform_to_data_coordinates(obj, *pos)
text = obj.get_text()
if text in ["", data["current axis title"]]:
# Text nodes which are direct children of Axes are typically titles. They are
# already captured by the `title` property of pgfplots axes, so skip them here.
return data, content
size = obj.get_size()
bbox = obj.get_bbox_patch()
converter = mpl.colors.ColorConverter()
# without the factor 0.5, the fonts are too big most of the time.
# TODO fix this
scaling = 0.5 * size / data["font size"]
ff = data["float format"]
if scaling != 1.0:
properties.append(("scale=" + ff).format(scaling))
if bbox is not None:
_bbox(bbox, data, properties, scaling)
ha = obj.get_ha()
va = obj.get_va()
anchor = _transform_positioning(ha, va)
if anchor is not None:
properties.append(anchor)
data, col, _ = color.mpl_color2xcolor(data, converter.to_rgb(obj.get_color()))
properties.append("text={}".format(col))
properties.append("rotate={:.1f}".format(obj.get_rotation()))
if obj.get_style() == "italic":
style.append("\\itshape")
else:
assert obj.get_style() == "normal"
# From matplotlib/font_manager.py:
# weight_dict = {
# 'ultralight' : 100,
# 'light' : 200,
# 'normal' : 400,
# 'regular' : 400,
# 'book' : 400,
# 'medium' : 500,
# 'roman' : 500,
# 'semibold' : 600,
# 'demibold' : 600,
# 'demi' : 600,
# 'bold' : 700,
# 'heavy' : 800,
# 'extra bold' : 800,
# 'black' : 900}
#
# get_weights returns a numeric value in the range 0-1000 or one of
# ‘light’, ‘normal’, ‘regular’, ‘book’, ‘medium’, ‘roman’, ‘semibold’,
# ‘demibold’, ‘demi’, ‘bold’, ‘heavy’, ‘extra bold’, ‘black’
weight = obj.get_weight()
if weight in [
"semibold",
"demibold",
"demi",
"bold",
"heavy",
"extra bold",
"black",
] or (isinstance(weight, int) and weight > 550):
style.append("\\bfseries")
# \lfseries isn't that common yet
# elif weight == 'light' or (isinstance(weight, int) and weight < 300):
# style.append('\\lfseries')
if obj.axes:
# If the coordinates are relative to an axis, use `axis cs`.
tikz_pos = ("(axis cs:" + ff + "," + ff + ")").format(*pos)
else:
# relative to the entire figure, it's a getting a littler harder. See
# <http://tex.stackexchange.com/a/274902/13262> for a solution to the
# problem:
tikz_pos = (
"({{$(current bounding box.south west)!" + ff + "!"
"(current bounding box.south east)$}}"
"|-"
"{{$(current bounding box.south west)!" + ff + "!"
"(current bounding box.north west)$}})"
).format(*pos)
if "\n" in text:
# http://tex.stackexchange.com/a/124114/13262
properties.append("align={}".format(ha))
# Manipulating the text here is actually against mpl2tikz's policy not
# to do that. On the other hand, newlines should translate into
# newlines.
# We might want to remove this here in the future.
text = text.replace("\n ", "\\\\")
content.append(
"\\node at {}[\n {}\n]{{{}}};\n".format(
tikz_pos, ",\n ".join(properties), " ".join(style + [text])
)
)
return data, content | python | def draw_text(data, obj):
content = []
properties = []
style = []
if isinstance(obj, mpl.text.Annotation):
_annotation(obj, data, content)
# 1: coordinates
# 2: properties (shapes, rotation, etc)
# 3: text style
# 4: the text
# -------1--------2---3--4--
pos = obj.get_position()
# from .util import transform_to_data_coordinates
# pos = transform_to_data_coordinates(obj, *pos)
text = obj.get_text()
if text in ["", data["current axis title"]]:
# Text nodes which are direct children of Axes are typically titles. They are
# already captured by the `title` property of pgfplots axes, so skip them here.
return data, content
size = obj.get_size()
bbox = obj.get_bbox_patch()
converter = mpl.colors.ColorConverter()
# without the factor 0.5, the fonts are too big most of the time.
# TODO fix this
scaling = 0.5 * size / data["font size"]
ff = data["float format"]
if scaling != 1.0:
properties.append(("scale=" + ff).format(scaling))
if bbox is not None:
_bbox(bbox, data, properties, scaling)
ha = obj.get_ha()
va = obj.get_va()
anchor = _transform_positioning(ha, va)
if anchor is not None:
properties.append(anchor)
data, col, _ = color.mpl_color2xcolor(data, converter.to_rgb(obj.get_color()))
properties.append("text={}".format(col))
properties.append("rotate={:.1f}".format(obj.get_rotation()))
if obj.get_style() == "italic":
style.append("\\itshape")
else:
assert obj.get_style() == "normal"
# From matplotlib/font_manager.py:
# weight_dict = {
# 'ultralight' : 100,
# 'light' : 200,
# 'normal' : 400,
# 'regular' : 400,
# 'book' : 400,
# 'medium' : 500,
# 'roman' : 500,
# 'semibold' : 600,
# 'demibold' : 600,
# 'demi' : 600,
# 'bold' : 700,
# 'heavy' : 800,
# 'extra bold' : 800,
# 'black' : 900}
#
# get_weights returns a numeric value in the range 0-1000 or one of
# ‘light’, ‘normal’, ‘regular’, ‘book’, ‘medium’, ‘roman’, ‘semibold’,
# ‘demibold’, ‘demi’, ‘bold’, ‘heavy’, ‘extra bold’, ‘black’
weight = obj.get_weight()
if weight in [
"semibold",
"demibold",
"demi",
"bold",
"heavy",
"extra bold",
"black",
] or (isinstance(weight, int) and weight > 550):
style.append("\\bfseries")
# \lfseries isn't that common yet
# elif weight == 'light' or (isinstance(weight, int) and weight < 300):
# style.append('\\lfseries')
if obj.axes:
# If the coordinates are relative to an axis, use `axis cs`.
tikz_pos = ("(axis cs:" + ff + "," + ff + ")").format(*pos)
else:
# relative to the entire figure, it's a getting a littler harder. See
# <http://tex.stackexchange.com/a/274902/13262> for a solution to the
# problem:
tikz_pos = (
"({{$(current bounding box.south west)!" + ff + "!"
"(current bounding box.south east)$}}"
"|-"
"{{$(current bounding box.south west)!" + ff + "!"
"(current bounding box.north west)$}})"
).format(*pos)
if "\n" in text:
# http://tex.stackexchange.com/a/124114/13262
properties.append("align={}".format(ha))
# Manipulating the text here is actually against mpl2tikz's policy not
# to do that. On the other hand, newlines should translate into
# newlines.
# We might want to remove this here in the future.
text = text.replace("\n ", "\\\\")
content.append(
"\\node at {}[\n {}\n]{{{}}};\n".format(
tikz_pos, ",\n ".join(properties), " ".join(style + [text])
)
)
return data, content | [
"def",
"draw_text",
"(",
"data",
",",
"obj",
")",
":",
"content",
"=",
"[",
"]",
"properties",
"=",
"[",
"]",
"style",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"obj",
",",
"mpl",
".",
"text",
".",
"Annotation",
")",
":",
"_annotation",
"(",
"obj",
",",
"data",
",",
"content",
")",
"# 1: coordinates",
"# 2: properties (shapes, rotation, etc)",
"# 3: text style",
"# 4: the text",
"# -------1--------2---3--4--",
"pos",
"=",
"obj",
".",
"get_position",
"(",
")",
"# from .util import transform_to_data_coordinates",
"# pos = transform_to_data_coordinates(obj, *pos)",
"text",
"=",
"obj",
".",
"get_text",
"(",
")",
"if",
"text",
"in",
"[",
"\"\"",
",",
"data",
"[",
"\"current axis title\"",
"]",
"]",
":",
"# Text nodes which are direct children of Axes are typically titles. They are",
"# already captured by the `title` property of pgfplots axes, so skip them here.",
"return",
"data",
",",
"content",
"size",
"=",
"obj",
".",
"get_size",
"(",
")",
"bbox",
"=",
"obj",
".",
"get_bbox_patch",
"(",
")",
"converter",
"=",
"mpl",
".",
"colors",
".",
"ColorConverter",
"(",
")",
"# without the factor 0.5, the fonts are too big most of the time.",
"# TODO fix this",
"scaling",
"=",
"0.5",
"*",
"size",
"/",
"data",
"[",
"\"font size\"",
"]",
"ff",
"=",
"data",
"[",
"\"float format\"",
"]",
"if",
"scaling",
"!=",
"1.0",
":",
"properties",
".",
"append",
"(",
"(",
"\"scale=\"",
"+",
"ff",
")",
".",
"format",
"(",
"scaling",
")",
")",
"if",
"bbox",
"is",
"not",
"None",
":",
"_bbox",
"(",
"bbox",
",",
"data",
",",
"properties",
",",
"scaling",
")",
"ha",
"=",
"obj",
".",
"get_ha",
"(",
")",
"va",
"=",
"obj",
".",
"get_va",
"(",
")",
"anchor",
"=",
"_transform_positioning",
"(",
"ha",
",",
"va",
")",
"if",
"anchor",
"is",
"not",
"None",
":",
"properties",
".",
"append",
"(",
"anchor",
")",
"data",
",",
"col",
",",
"_",
"=",
"color",
".",
"mpl_color2xcolor",
"(",
"data",
",",
"converter",
".",
"to_rgb",
"(",
"obj",
".",
"get_color",
"(",
")",
")",
")",
"properties",
".",
"append",
"(",
"\"text={}\"",
".",
"format",
"(",
"col",
")",
")",
"properties",
".",
"append",
"(",
"\"rotate={:.1f}\"",
".",
"format",
"(",
"obj",
".",
"get_rotation",
"(",
")",
")",
")",
"if",
"obj",
".",
"get_style",
"(",
")",
"==",
"\"italic\"",
":",
"style",
".",
"append",
"(",
"\"\\\\itshape\"",
")",
"else",
":",
"assert",
"obj",
".",
"get_style",
"(",
")",
"==",
"\"normal\"",
"# From matplotlib/font_manager.py:",
"# weight_dict = {",
"# 'ultralight' : 100,",
"# 'light' : 200,",
"# 'normal' : 400,",
"# 'regular' : 400,",
"# 'book' : 400,",
"# 'medium' : 500,",
"# 'roman' : 500,",
"# 'semibold' : 600,",
"# 'demibold' : 600,",
"# 'demi' : 600,",
"# 'bold' : 700,",
"# 'heavy' : 800,",
"# 'extra bold' : 800,",
"# 'black' : 900}",
"#",
"# get_weights returns a numeric value in the range 0-1000 or one of",
"# ‘light’, ‘normal’, ‘regular’, ‘book’, ‘medium’, ‘roman’, ‘semibold’,",
"# ‘demibold’, ‘demi’, ‘bold’, ‘heavy’, ‘extra bold’, ‘black’",
"weight",
"=",
"obj",
".",
"get_weight",
"(",
")",
"if",
"weight",
"in",
"[",
"\"semibold\"",
",",
"\"demibold\"",
",",
"\"demi\"",
",",
"\"bold\"",
",",
"\"heavy\"",
",",
"\"extra bold\"",
",",
"\"black\"",
",",
"]",
"or",
"(",
"isinstance",
"(",
"weight",
",",
"int",
")",
"and",
"weight",
">",
"550",
")",
":",
"style",
".",
"append",
"(",
"\"\\\\bfseries\"",
")",
"# \\lfseries isn't that common yet",
"# elif weight == 'light' or (isinstance(weight, int) and weight < 300):",
"# style.append('\\\\lfseries')",
"if",
"obj",
".",
"axes",
":",
"# If the coordinates are relative to an axis, use `axis cs`.",
"tikz_pos",
"=",
"(",
"\"(axis cs:\"",
"+",
"ff",
"+",
"\",\"",
"+",
"ff",
"+",
"\")\"",
")",
".",
"format",
"(",
"*",
"pos",
")",
"else",
":",
"# relative to the entire figure, it's a getting a littler harder. See",
"# <http://tex.stackexchange.com/a/274902/13262> for a solution to the",
"# problem:",
"tikz_pos",
"=",
"(",
"\"({{$(current bounding box.south west)!\"",
"+",
"ff",
"+",
"\"!\"",
"\"(current bounding box.south east)$}}\"",
"\"|-\"",
"\"{{$(current bounding box.south west)!\"",
"+",
"ff",
"+",
"\"!\"",
"\"(current bounding box.north west)$}})\"",
")",
".",
"format",
"(",
"*",
"pos",
")",
"if",
"\"\\n\"",
"in",
"text",
":",
"# http://tex.stackexchange.com/a/124114/13262",
"properties",
".",
"append",
"(",
"\"align={}\"",
".",
"format",
"(",
"ha",
")",
")",
"# Manipulating the text here is actually against mpl2tikz's policy not",
"# to do that. On the other hand, newlines should translate into",
"# newlines.",
"# We might want to remove this here in the future.",
"text",
"=",
"text",
".",
"replace",
"(",
"\"\\n \"",
",",
"\"\\\\\\\\\"",
")",
"content",
".",
"append",
"(",
"\"\\\\node at {}[\\n {}\\n]{{{}}};\\n\"",
".",
"format",
"(",
"tikz_pos",
",",
"\",\\n \"",
".",
"join",
"(",
"properties",
")",
",",
"\" \"",
".",
"join",
"(",
"style",
"+",
"[",
"text",
"]",
")",
")",
")",
"return",
"data",
",",
"content"
] | Paints text on the graph. | [
"Paints",
"text",
"on",
"the",
"graph",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/text.py#L8-L126 |
246,990 | nschloe/matplotlib2tikz | matplotlib2tikz/text.py | _transform_positioning | def _transform_positioning(ha, va):
"""Converts matplotlib positioning to pgf node positioning.
Not quite accurate but the results are equivalent more or less."""
if ha == "center" and va == "center":
return None
ha_mpl_to_tikz = {"right": "east", "left": "west", "center": ""}
va_mpl_to_tikz = {
"top": "north",
"bottom": "south",
"center": "",
"baseline": "base",
}
return "anchor={} {}".format(va_mpl_to_tikz[va], ha_mpl_to_tikz[ha]).strip() | python | def _transform_positioning(ha, va):
if ha == "center" and va == "center":
return None
ha_mpl_to_tikz = {"right": "east", "left": "west", "center": ""}
va_mpl_to_tikz = {
"top": "north",
"bottom": "south",
"center": "",
"baseline": "base",
}
return "anchor={} {}".format(va_mpl_to_tikz[va], ha_mpl_to_tikz[ha]).strip() | [
"def",
"_transform_positioning",
"(",
"ha",
",",
"va",
")",
":",
"if",
"ha",
"==",
"\"center\"",
"and",
"va",
"==",
"\"center\"",
":",
"return",
"None",
"ha_mpl_to_tikz",
"=",
"{",
"\"right\"",
":",
"\"east\"",
",",
"\"left\"",
":",
"\"west\"",
",",
"\"center\"",
":",
"\"\"",
"}",
"va_mpl_to_tikz",
"=",
"{",
"\"top\"",
":",
"\"north\"",
",",
"\"bottom\"",
":",
"\"south\"",
",",
"\"center\"",
":",
"\"\"",
",",
"\"baseline\"",
":",
"\"base\"",
",",
"}",
"return",
"\"anchor={} {}\"",
".",
"format",
"(",
"va_mpl_to_tikz",
"[",
"va",
"]",
",",
"ha_mpl_to_tikz",
"[",
"ha",
"]",
")",
".",
"strip",
"(",
")"
] | Converts matplotlib positioning to pgf node positioning.
Not quite accurate but the results are equivalent more or less. | [
"Converts",
"matplotlib",
"positioning",
"to",
"pgf",
"node",
"positioning",
".",
"Not",
"quite",
"accurate",
"but",
"the",
"results",
"are",
"equivalent",
"more",
"or",
"less",
"."
] | ac5daca6f38b834d757f6c6ae6cc34121956f46b | https://github.com/nschloe/matplotlib2tikz/blob/ac5daca6f38b834d757f6c6ae6cc34121956f46b/matplotlib2tikz/text.py#L129-L142 |
246,991 | turicas/rows | rows/plugins/plugin_json.py | import_from_json | def import_from_json(filename_or_fobj, encoding="utf-8", *args, **kwargs):
"""Import a JSON file or file-like object into a `rows.Table`.
If a file-like object is provided it MUST be open in text (non-binary) mode
on Python 3 and could be open in both binary or text mode on Python 2.
"""
source = Source.from_file(filename_or_fobj, mode="rb", plugin_name="json", encoding=encoding)
json_obj = json.load(source.fobj, encoding=source.encoding)
field_names = list(json_obj[0].keys())
table_rows = [[item[key] for key in field_names] for item in json_obj]
meta = {"imported_from": "json", "source": source}
return create_table([field_names] + table_rows, meta=meta, *args, **kwargs) | python | def import_from_json(filename_or_fobj, encoding="utf-8", *args, **kwargs):
source = Source.from_file(filename_or_fobj, mode="rb", plugin_name="json", encoding=encoding)
json_obj = json.load(source.fobj, encoding=source.encoding)
field_names = list(json_obj[0].keys())
table_rows = [[item[key] for key in field_names] for item in json_obj]
meta = {"imported_from": "json", "source": source}
return create_table([field_names] + table_rows, meta=meta, *args, **kwargs) | [
"def",
"import_from_json",
"(",
"filename_or_fobj",
",",
"encoding",
"=",
"\"utf-8\"",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"source",
"=",
"Source",
".",
"from_file",
"(",
"filename_or_fobj",
",",
"mode",
"=",
"\"rb\"",
",",
"plugin_name",
"=",
"\"json\"",
",",
"encoding",
"=",
"encoding",
")",
"json_obj",
"=",
"json",
".",
"load",
"(",
"source",
".",
"fobj",
",",
"encoding",
"=",
"source",
".",
"encoding",
")",
"field_names",
"=",
"list",
"(",
"json_obj",
"[",
"0",
"]",
".",
"keys",
"(",
")",
")",
"table_rows",
"=",
"[",
"[",
"item",
"[",
"key",
"]",
"for",
"key",
"in",
"field_names",
"]",
"for",
"item",
"in",
"json_obj",
"]",
"meta",
"=",
"{",
"\"imported_from\"",
":",
"\"json\"",
",",
"\"source\"",
":",
"source",
"}",
"return",
"create_table",
"(",
"[",
"field_names",
"]",
"+",
"table_rows",
",",
"meta",
"=",
"meta",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Import a JSON file or file-like object into a `rows.Table`.
If a file-like object is provided it MUST be open in text (non-binary) mode
on Python 3 and could be open in both binary or text mode on Python 2. | [
"Import",
"a",
"JSON",
"file",
"or",
"file",
"-",
"like",
"object",
"into",
"a",
"rows",
".",
"Table",
"."
] | c74da41ae9ed091356b803a64f8a30c641c5fc45 | https://github.com/turicas/rows/blob/c74da41ae9ed091356b803a64f8a30c641c5fc45/rows/plugins/plugin_json.py#L33-L47 |
246,992 | turicas/rows | rows/plugins/plugin_json.py | export_to_json | def export_to_json(
table, filename_or_fobj=None, encoding="utf-8", indent=None, *args, **kwargs
):
"""Export a `rows.Table` to a JSON file or file-like object.
If a file-like object is provided it MUST be open in binary mode (like in
`open('myfile.json', mode='wb')`).
"""
# TODO: will work only if table.fields is OrderedDict
fields = table.fields
prepared_table = prepare_to_export(table, *args, **kwargs)
field_names = next(prepared_table)
data = [
{
field_name: _convert(value, fields[field_name], *args, **kwargs)
for field_name, value in zip(field_names, row)
}
for row in prepared_table
]
result = json.dumps(data, indent=indent)
if type(result) is six.text_type: # Python 3
result = result.encode(encoding)
if indent is not None:
# clean up empty spaces at the end of lines
result = b"\n".join(line.rstrip() for line in result.splitlines())
return export_data(filename_or_fobj, result, mode="wb") | python | def export_to_json(
table, filename_or_fobj=None, encoding="utf-8", indent=None, *args, **kwargs
):
# TODO: will work only if table.fields is OrderedDict
fields = table.fields
prepared_table = prepare_to_export(table, *args, **kwargs)
field_names = next(prepared_table)
data = [
{
field_name: _convert(value, fields[field_name], *args, **kwargs)
for field_name, value in zip(field_names, row)
}
for row in prepared_table
]
result = json.dumps(data, indent=indent)
if type(result) is six.text_type: # Python 3
result = result.encode(encoding)
if indent is not None:
# clean up empty spaces at the end of lines
result = b"\n".join(line.rstrip() for line in result.splitlines())
return export_data(filename_or_fobj, result, mode="wb") | [
"def",
"export_to_json",
"(",
"table",
",",
"filename_or_fobj",
"=",
"None",
",",
"encoding",
"=",
"\"utf-8\"",
",",
"indent",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# TODO: will work only if table.fields is OrderedDict",
"fields",
"=",
"table",
".",
"fields",
"prepared_table",
"=",
"prepare_to_export",
"(",
"table",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"field_names",
"=",
"next",
"(",
"prepared_table",
")",
"data",
"=",
"[",
"{",
"field_name",
":",
"_convert",
"(",
"value",
",",
"fields",
"[",
"field_name",
"]",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"for",
"field_name",
",",
"value",
"in",
"zip",
"(",
"field_names",
",",
"row",
")",
"}",
"for",
"row",
"in",
"prepared_table",
"]",
"result",
"=",
"json",
".",
"dumps",
"(",
"data",
",",
"indent",
"=",
"indent",
")",
"if",
"type",
"(",
"result",
")",
"is",
"six",
".",
"text_type",
":",
"# Python 3",
"result",
"=",
"result",
".",
"encode",
"(",
"encoding",
")",
"if",
"indent",
"is",
"not",
"None",
":",
"# clean up empty spaces at the end of lines",
"result",
"=",
"b\"\\n\"",
".",
"join",
"(",
"line",
".",
"rstrip",
"(",
")",
"for",
"line",
"in",
"result",
".",
"splitlines",
"(",
")",
")",
"return",
"export_data",
"(",
"filename_or_fobj",
",",
"result",
",",
"mode",
"=",
"\"wb\"",
")"
] | Export a `rows.Table` to a JSON file or file-like object.
If a file-like object is provided it MUST be open in binary mode (like in
`open('myfile.json', mode='wb')`). | [
"Export",
"a",
"rows",
".",
"Table",
"to",
"a",
"JSON",
"file",
"or",
"file",
"-",
"like",
"object",
"."
] | c74da41ae9ed091356b803a64f8a30c641c5fc45 | https://github.com/turicas/rows/blob/c74da41ae9ed091356b803a64f8a30c641c5fc45/rows/plugins/plugin_json.py#L68-L97 |
246,993 | turicas/rows | rows/utils.py | plugin_name_by_uri | def plugin_name_by_uri(uri):
"Return the plugin name based on the URI"
# TODO: parse URIs like 'sqlite://' also
parsed = urlparse(uri)
basename = os.path.basename(parsed.path)
if not basename.strip():
raise RuntimeError("Could not identify file format.")
plugin_name = basename.split(".")[-1].lower()
if plugin_name in FILE_EXTENSIONS:
plugin_name = MIME_TYPE_TO_PLUGIN_NAME[FILE_EXTENSIONS[plugin_name]]
return plugin_name | python | def plugin_name_by_uri(uri):
"Return the plugin name based on the URI"
# TODO: parse URIs like 'sqlite://' also
parsed = urlparse(uri)
basename = os.path.basename(parsed.path)
if not basename.strip():
raise RuntimeError("Could not identify file format.")
plugin_name = basename.split(".")[-1].lower()
if plugin_name in FILE_EXTENSIONS:
plugin_name = MIME_TYPE_TO_PLUGIN_NAME[FILE_EXTENSIONS[plugin_name]]
return plugin_name | [
"def",
"plugin_name_by_uri",
"(",
"uri",
")",
":",
"# TODO: parse URIs like 'sqlite://' also",
"parsed",
"=",
"urlparse",
"(",
"uri",
")",
"basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"parsed",
".",
"path",
")",
"if",
"not",
"basename",
".",
"strip",
"(",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Could not identify file format.\"",
")",
"plugin_name",
"=",
"basename",
".",
"split",
"(",
"\".\"",
")",
"[",
"-",
"1",
"]",
".",
"lower",
"(",
")",
"if",
"plugin_name",
"in",
"FILE_EXTENSIONS",
":",
"plugin_name",
"=",
"MIME_TYPE_TO_PLUGIN_NAME",
"[",
"FILE_EXTENSIONS",
"[",
"plugin_name",
"]",
"]",
"return",
"plugin_name"
] | Return the plugin name based on the URI | [
"Return",
"the",
"plugin",
"name",
"based",
"on",
"the",
"URI"
] | c74da41ae9ed091356b803a64f8a30c641c5fc45 | https://github.com/turicas/rows/blob/c74da41ae9ed091356b803a64f8a30c641c5fc45/rows/utils.py#L249-L263 |
246,994 | turicas/rows | rows/utils.py | extension_by_source | def extension_by_source(source, mime_type):
"Return the file extension used by this plugin"
# TODO: should get this information from the plugin
extension = source.plugin_name
if extension:
return extension
if mime_type:
return mime_type.split("/")[-1] | python | def extension_by_source(source, mime_type):
"Return the file extension used by this plugin"
# TODO: should get this information from the plugin
extension = source.plugin_name
if extension:
return extension
if mime_type:
return mime_type.split("/")[-1] | [
"def",
"extension_by_source",
"(",
"source",
",",
"mime_type",
")",
":",
"# TODO: should get this information from the plugin",
"extension",
"=",
"source",
".",
"plugin_name",
"if",
"extension",
":",
"return",
"extension",
"if",
"mime_type",
":",
"return",
"mime_type",
".",
"split",
"(",
"\"/\"",
")",
"[",
"-",
"1",
"]"
] | Return the file extension used by this plugin | [
"Return",
"the",
"file",
"extension",
"used",
"by",
"this",
"plugin"
] | c74da41ae9ed091356b803a64f8a30c641c5fc45 | https://github.com/turicas/rows/blob/c74da41ae9ed091356b803a64f8a30c641c5fc45/rows/utils.py#L266-L275 |
246,995 | turicas/rows | rows/utils.py | plugin_name_by_mime_type | def plugin_name_by_mime_type(mime_type, mime_name, file_extension):
"Return the plugin name based on the MIME type"
return MIME_TYPE_TO_PLUGIN_NAME.get(
normalize_mime_type(mime_type, mime_name, file_extension), None
) | python | def plugin_name_by_mime_type(mime_type, mime_name, file_extension):
"Return the plugin name based on the MIME type"
return MIME_TYPE_TO_PLUGIN_NAME.get(
normalize_mime_type(mime_type, mime_name, file_extension), None
) | [
"def",
"plugin_name_by_mime_type",
"(",
"mime_type",
",",
"mime_name",
",",
"file_extension",
")",
":",
"return",
"MIME_TYPE_TO_PLUGIN_NAME",
".",
"get",
"(",
"normalize_mime_type",
"(",
"mime_type",
",",
"mime_name",
",",
"file_extension",
")",
",",
"None",
")"
] | Return the plugin name based on the MIME type | [
"Return",
"the",
"plugin",
"name",
"based",
"on",
"the",
"MIME",
"type"
] | c74da41ae9ed091356b803a64f8a30c641c5fc45 | https://github.com/turicas/rows/blob/c74da41ae9ed091356b803a64f8a30c641c5fc45/rows/utils.py#L297-L302 |
246,996 | turicas/rows | rows/utils.py | detect_source | def detect_source(uri, verify_ssl, progress, timeout=5):
"""Return a `rows.Source` with information for a given URI
If URI starts with "http" or "https" the file will be downloaded.
This function should only be used if the URI already exists because it's
going to download/open the file to detect its encoding and MIME type.
"""
# TODO: should also supporte other schemes, like file://, sqlite:// etc.
if uri.lower().startswith("http://") or uri.lower().startswith("https://"):
return download_file(
uri, verify_ssl=verify_ssl, timeout=timeout, progress=progress, detect=True
)
elif uri.startswith("postgres://"):
return Source(
should_delete=False,
encoding=None,
plugin_name="postgresql",
uri=uri,
is_file=False,
local=None,
)
else:
return local_file(uri) | python | def detect_source(uri, verify_ssl, progress, timeout=5):
# TODO: should also supporte other schemes, like file://, sqlite:// etc.
if uri.lower().startswith("http://") or uri.lower().startswith("https://"):
return download_file(
uri, verify_ssl=verify_ssl, timeout=timeout, progress=progress, detect=True
)
elif uri.startswith("postgres://"):
return Source(
should_delete=False,
encoding=None,
plugin_name="postgresql",
uri=uri,
is_file=False,
local=None,
)
else:
return local_file(uri) | [
"def",
"detect_source",
"(",
"uri",
",",
"verify_ssl",
",",
"progress",
",",
"timeout",
"=",
"5",
")",
":",
"# TODO: should also supporte other schemes, like file://, sqlite:// etc.",
"if",
"uri",
".",
"lower",
"(",
")",
".",
"startswith",
"(",
"\"http://\"",
")",
"or",
"uri",
".",
"lower",
"(",
")",
".",
"startswith",
"(",
"\"https://\"",
")",
":",
"return",
"download_file",
"(",
"uri",
",",
"verify_ssl",
"=",
"verify_ssl",
",",
"timeout",
"=",
"timeout",
",",
"progress",
"=",
"progress",
",",
"detect",
"=",
"True",
")",
"elif",
"uri",
".",
"startswith",
"(",
"\"postgres://\"",
")",
":",
"return",
"Source",
"(",
"should_delete",
"=",
"False",
",",
"encoding",
"=",
"None",
",",
"plugin_name",
"=",
"\"postgresql\"",
",",
"uri",
"=",
"uri",
",",
"is_file",
"=",
"False",
",",
"local",
"=",
"None",
",",
")",
"else",
":",
"return",
"local_file",
"(",
"uri",
")"
] | Return a `rows.Source` with information for a given URI
If URI starts with "http" or "https" the file will be downloaded.
This function should only be used if the URI already exists because it's
going to download/open the file to detect its encoding and MIME type. | [
"Return",
"a",
"rows",
".",
"Source",
"with",
"information",
"for",
"a",
"given",
"URI"
] | c74da41ae9ed091356b803a64f8a30c641c5fc45 | https://github.com/turicas/rows/blob/c74da41ae9ed091356b803a64f8a30c641c5fc45/rows/utils.py#L439-L465 |
246,997 | turicas/rows | rows/utils.py | import_from_source | def import_from_source(source, default_encoding, *args, **kwargs):
"Import data described in a `rows.Source` into a `rows.Table`"
# TODO: test open_compressed
plugin_name = source.plugin_name
kwargs["encoding"] = (
kwargs.get("encoding", None) or source.encoding or default_encoding
)
try:
import_function = getattr(rows, "import_from_{}".format(plugin_name))
except AttributeError:
raise ValueError('Plugin (import) "{}" not found'.format(plugin_name))
table = import_function(source.uri, *args, **kwargs)
return table | python | def import_from_source(source, default_encoding, *args, **kwargs):
"Import data described in a `rows.Source` into a `rows.Table`"
# TODO: test open_compressed
plugin_name = source.plugin_name
kwargs["encoding"] = (
kwargs.get("encoding", None) or source.encoding or default_encoding
)
try:
import_function = getattr(rows, "import_from_{}".format(plugin_name))
except AttributeError:
raise ValueError('Plugin (import) "{}" not found'.format(plugin_name))
table = import_function(source.uri, *args, **kwargs)
return table | [
"def",
"import_from_source",
"(",
"source",
",",
"default_encoding",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# TODO: test open_compressed",
"plugin_name",
"=",
"source",
".",
"plugin_name",
"kwargs",
"[",
"\"encoding\"",
"]",
"=",
"(",
"kwargs",
".",
"get",
"(",
"\"encoding\"",
",",
"None",
")",
"or",
"source",
".",
"encoding",
"or",
"default_encoding",
")",
"try",
":",
"import_function",
"=",
"getattr",
"(",
"rows",
",",
"\"import_from_{}\"",
".",
"format",
"(",
"plugin_name",
")",
")",
"except",
"AttributeError",
":",
"raise",
"ValueError",
"(",
"'Plugin (import) \"{}\" not found'",
".",
"format",
"(",
"plugin_name",
")",
")",
"table",
"=",
"import_function",
"(",
"source",
".",
"uri",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"table"
] | Import data described in a `rows.Source` into a `rows.Table` | [
"Import",
"data",
"described",
"in",
"a",
"rows",
".",
"Source",
"into",
"a",
"rows",
".",
"Table"
] | c74da41ae9ed091356b803a64f8a30c641c5fc45 | https://github.com/turicas/rows/blob/c74da41ae9ed091356b803a64f8a30c641c5fc45/rows/utils.py#L468-L484 |
246,998 | turicas/rows | rows/utils.py | import_from_uri | def import_from_uri(
uri, default_encoding="utf-8", verify_ssl=True, progress=False, *args, **kwargs
):
"Given an URI, detects plugin and encoding and imports into a `rows.Table`"
# TODO: support '-' also
# TODO: (optimization) if `kwargs.get('encoding', None) is not None` we can
# skip encoding detection.
source = detect_source(uri, verify_ssl=verify_ssl, progress=progress)
return import_from_source(source, default_encoding, *args, **kwargs) | python | def import_from_uri(
uri, default_encoding="utf-8", verify_ssl=True, progress=False, *args, **kwargs
):
"Given an URI, detects plugin and encoding and imports into a `rows.Table`"
# TODO: support '-' also
# TODO: (optimization) if `kwargs.get('encoding', None) is not None` we can
# skip encoding detection.
source = detect_source(uri, verify_ssl=verify_ssl, progress=progress)
return import_from_source(source, default_encoding, *args, **kwargs) | [
"def",
"import_from_uri",
"(",
"uri",
",",
"default_encoding",
"=",
"\"utf-8\"",
",",
"verify_ssl",
"=",
"True",
",",
"progress",
"=",
"False",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# TODO: support '-' also",
"# TODO: (optimization) if `kwargs.get('encoding', None) is not None` we can",
"# skip encoding detection.",
"source",
"=",
"detect_source",
"(",
"uri",
",",
"verify_ssl",
"=",
"verify_ssl",
",",
"progress",
"=",
"progress",
")",
"return",
"import_from_source",
"(",
"source",
",",
"default_encoding",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Given an URI, detects plugin and encoding and imports into a `rows.Table` | [
"Given",
"an",
"URI",
"detects",
"plugin",
"and",
"encoding",
"and",
"imports",
"into",
"a",
"rows",
".",
"Table"
] | c74da41ae9ed091356b803a64f8a30c641c5fc45 | https://github.com/turicas/rows/blob/c74da41ae9ed091356b803a64f8a30c641c5fc45/rows/utils.py#L487-L496 |
246,999 | turicas/rows | rows/utils.py | open_compressed | def open_compressed(filename, mode="r", encoding=None):
"Return a text-based file object from a filename, even if compressed"
# TODO: integrate this function in the library itself, using
# get_filename_and_fobj
binary_mode = "b" in mode
extension = str(filename).split(".")[-1].lower()
if binary_mode and encoding:
raise ValueError("encoding should not be specified in binary mode")
if extension == "xz":
if lzma is None:
raise RuntimeError("lzma support is not installed")
fobj = lzma.open(filename, mode=mode)
if binary_mode:
return fobj
else:
return io.TextIOWrapper(fobj, encoding=encoding)
elif extension == "gz":
fobj = gzip.GzipFile(filename, mode=mode)
if binary_mode:
return fobj
else:
return io.TextIOWrapper(fobj, encoding=encoding)
elif extension == "bz2":
if bz2 is None:
raise RuntimeError("bzip2 support is not installed")
if binary_mode: # ignore encoding
return bz2.open(filename, mode=mode)
else:
if "t" not in mode:
# For some reason, passing only mode='r' to bzip2 is equivalent
# to 'rb', not 'rt', so we force it here.
mode += "t"
return bz2.open(filename, mode=mode, encoding=encoding)
else:
if binary_mode:
return open(filename, mode=mode)
else:
return open(filename, mode=mode, encoding=encoding) | python | def open_compressed(filename, mode="r", encoding=None):
"Return a text-based file object from a filename, even if compressed"
# TODO: integrate this function in the library itself, using
# get_filename_and_fobj
binary_mode = "b" in mode
extension = str(filename).split(".")[-1].lower()
if binary_mode and encoding:
raise ValueError("encoding should not be specified in binary mode")
if extension == "xz":
if lzma is None:
raise RuntimeError("lzma support is not installed")
fobj = lzma.open(filename, mode=mode)
if binary_mode:
return fobj
else:
return io.TextIOWrapper(fobj, encoding=encoding)
elif extension == "gz":
fobj = gzip.GzipFile(filename, mode=mode)
if binary_mode:
return fobj
else:
return io.TextIOWrapper(fobj, encoding=encoding)
elif extension == "bz2":
if bz2 is None:
raise RuntimeError("bzip2 support is not installed")
if binary_mode: # ignore encoding
return bz2.open(filename, mode=mode)
else:
if "t" not in mode:
# For some reason, passing only mode='r' to bzip2 is equivalent
# to 'rb', not 'rt', so we force it here.
mode += "t"
return bz2.open(filename, mode=mode, encoding=encoding)
else:
if binary_mode:
return open(filename, mode=mode)
else:
return open(filename, mode=mode, encoding=encoding) | [
"def",
"open_compressed",
"(",
"filename",
",",
"mode",
"=",
"\"r\"",
",",
"encoding",
"=",
"None",
")",
":",
"# TODO: integrate this function in the library itself, using",
"# get_filename_and_fobj",
"binary_mode",
"=",
"\"b\"",
"in",
"mode",
"extension",
"=",
"str",
"(",
"filename",
")",
".",
"split",
"(",
"\".\"",
")",
"[",
"-",
"1",
"]",
".",
"lower",
"(",
")",
"if",
"binary_mode",
"and",
"encoding",
":",
"raise",
"ValueError",
"(",
"\"encoding should not be specified in binary mode\"",
")",
"if",
"extension",
"==",
"\"xz\"",
":",
"if",
"lzma",
"is",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"lzma support is not installed\"",
")",
"fobj",
"=",
"lzma",
".",
"open",
"(",
"filename",
",",
"mode",
"=",
"mode",
")",
"if",
"binary_mode",
":",
"return",
"fobj",
"else",
":",
"return",
"io",
".",
"TextIOWrapper",
"(",
"fobj",
",",
"encoding",
"=",
"encoding",
")",
"elif",
"extension",
"==",
"\"gz\"",
":",
"fobj",
"=",
"gzip",
".",
"GzipFile",
"(",
"filename",
",",
"mode",
"=",
"mode",
")",
"if",
"binary_mode",
":",
"return",
"fobj",
"else",
":",
"return",
"io",
".",
"TextIOWrapper",
"(",
"fobj",
",",
"encoding",
"=",
"encoding",
")",
"elif",
"extension",
"==",
"\"bz2\"",
":",
"if",
"bz2",
"is",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"bzip2 support is not installed\"",
")",
"if",
"binary_mode",
":",
"# ignore encoding",
"return",
"bz2",
".",
"open",
"(",
"filename",
",",
"mode",
"=",
"mode",
")",
"else",
":",
"if",
"\"t\"",
"not",
"in",
"mode",
":",
"# For some reason, passing only mode='r' to bzip2 is equivalent",
"# to 'rb', not 'rt', so we force it here.",
"mode",
"+=",
"\"t\"",
"return",
"bz2",
".",
"open",
"(",
"filename",
",",
"mode",
"=",
"mode",
",",
"encoding",
"=",
"encoding",
")",
"else",
":",
"if",
"binary_mode",
":",
"return",
"open",
"(",
"filename",
",",
"mode",
"=",
"mode",
")",
"else",
":",
"return",
"open",
"(",
"filename",
",",
"mode",
"=",
"mode",
",",
"encoding",
"=",
"encoding",
")"
] | Return a text-based file object from a filename, even if compressed | [
"Return",
"a",
"text",
"-",
"based",
"file",
"object",
"from",
"a",
"filename",
"even",
"if",
"compressed"
] | c74da41ae9ed091356b803a64f8a30c641c5fc45 | https://github.com/turicas/rows/blob/c74da41ae9ed091356b803a64f8a30c641c5fc45/rows/utils.py#L513-L557 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.