repository_name
stringlengths 5
67
| func_path_in_repository
stringlengths 4
234
| func_name
stringlengths 0
314
| whole_func_string
stringlengths 52
3.87M
| language
stringclasses 6
values | func_code_string
stringlengths 39
1.84M
| func_code_tokens
listlengths 15
672k
| func_documentation_string
stringlengths 1
47.2k
| func_documentation_tokens
listlengths 1
3.92k
| split_name
stringclasses 1
value | func_code_url
stringlengths 85
339
|
---|---|---|---|---|---|---|---|---|---|---|
mosdef-hub/foyer
|
foyer/smarts_graph.py
|
SMARTSGraph._add_edges
|
def _add_edges(self, ast_node, trunk=None):
""""Add all bonds in the SMARTS string as edges in the graph."""
atom_indices = self._atom_indices
for atom in ast_node.tail:
if atom.head == 'atom':
atom_idx = atom_indices[id(atom)]
if atom.is_first_kid and atom.parent().head == 'branch':
trunk_idx = atom_indices[id(trunk)]
self.add_edge(atom_idx, trunk_idx)
if not atom.is_last_kid:
if atom.next_kid.head == 'atom':
next_idx = atom_indices[id(atom.next_kid)]
self.add_edge(atom_idx, next_idx)
elif atom.next_kid.head == 'branch':
trunk = atom
else: # We traveled through the whole branch.
return
elif atom.head == 'branch':
self._add_edges(atom, trunk)
|
python
|
def _add_edges(self, ast_node, trunk=None):
atom_indices = self._atom_indices
for atom in ast_node.tail:
if atom.head == 'atom':
atom_idx = atom_indices[id(atom)]
if atom.is_first_kid and atom.parent().head == 'branch':
trunk_idx = atom_indices[id(trunk)]
self.add_edge(atom_idx, trunk_idx)
if not atom.is_last_kid:
if atom.next_kid.head == 'atom':
next_idx = atom_indices[id(atom.next_kid)]
self.add_edge(atom_idx, next_idx)
elif atom.next_kid.head == 'branch':
trunk = atom
else:
return
elif atom.head == 'branch':
self._add_edges(atom, trunk)
|
[
"def",
"_add_edges",
"(",
"self",
",",
"ast_node",
",",
"trunk",
"=",
"None",
")",
":",
"atom_indices",
"=",
"self",
".",
"_atom_indices",
"for",
"atom",
"in",
"ast_node",
".",
"tail",
":",
"if",
"atom",
".",
"head",
"==",
"'atom'",
":",
"atom_idx",
"=",
"atom_indices",
"[",
"id",
"(",
"atom",
")",
"]",
"if",
"atom",
".",
"is_first_kid",
"and",
"atom",
".",
"parent",
"(",
")",
".",
"head",
"==",
"'branch'",
":",
"trunk_idx",
"=",
"atom_indices",
"[",
"id",
"(",
"trunk",
")",
"]",
"self",
".",
"add_edge",
"(",
"atom_idx",
",",
"trunk_idx",
")",
"if",
"not",
"atom",
".",
"is_last_kid",
":",
"if",
"atom",
".",
"next_kid",
".",
"head",
"==",
"'atom'",
":",
"next_idx",
"=",
"atom_indices",
"[",
"id",
"(",
"atom",
".",
"next_kid",
")",
"]",
"self",
".",
"add_edge",
"(",
"atom_idx",
",",
"next_idx",
")",
"elif",
"atom",
".",
"next_kid",
".",
"head",
"==",
"'branch'",
":",
"trunk",
"=",
"atom",
"else",
":",
"# We traveled through the whole branch.",
"return",
"elif",
"atom",
".",
"head",
"==",
"'branch'",
":",
"self",
".",
"_add_edges",
"(",
"atom",
",",
"trunk",
")"
] |
Add all bonds in the SMARTS string as edges in the graph.
|
[
"Add",
"all",
"bonds",
"in",
"the",
"SMARTS",
"string",
"as",
"edges",
"in",
"the",
"graph",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/smarts_graph.py#L57-L75
|
mosdef-hub/foyer
|
foyer/smarts_graph.py
|
SMARTSGraph._add_label_edges
|
def _add_label_edges(self):
"""Add edges between all atoms with the same atom_label in rings."""
labels = self.ast.select('atom_label')
if not labels:
return
# We need each individual label and atoms with multiple ring labels
# would yield e.g. the string '12' so split those up.
label_digits = defaultdict(list)
for label in labels:
digits = list(label.tail[0])
for digit in digits:
label_digits[digit].append(label.parent())
for label, (atom1, atom2) in label_digits.items():
atom1_idx = self._atom_indices[id(atom1)]
atom2_idx = self._atom_indices[id(atom2)]
self.add_edge(atom1_idx, atom2_idx)
|
python
|
def _add_label_edges(self):
labels = self.ast.select('atom_label')
if not labels:
return
label_digits = defaultdict(list)
for label in labels:
digits = list(label.tail[0])
for digit in digits:
label_digits[digit].append(label.parent())
for label, (atom1, atom2) in label_digits.items():
atom1_idx = self._atom_indices[id(atom1)]
atom2_idx = self._atom_indices[id(atom2)]
self.add_edge(atom1_idx, atom2_idx)
|
[
"def",
"_add_label_edges",
"(",
"self",
")",
":",
"labels",
"=",
"self",
".",
"ast",
".",
"select",
"(",
"'atom_label'",
")",
"if",
"not",
"labels",
":",
"return",
"# We need each individual label and atoms with multiple ring labels",
"# would yield e.g. the string '12' so split those up.",
"label_digits",
"=",
"defaultdict",
"(",
"list",
")",
"for",
"label",
"in",
"labels",
":",
"digits",
"=",
"list",
"(",
"label",
".",
"tail",
"[",
"0",
"]",
")",
"for",
"digit",
"in",
"digits",
":",
"label_digits",
"[",
"digit",
"]",
".",
"append",
"(",
"label",
".",
"parent",
"(",
")",
")",
"for",
"label",
",",
"(",
"atom1",
",",
"atom2",
")",
"in",
"label_digits",
".",
"items",
"(",
")",
":",
"atom1_idx",
"=",
"self",
".",
"_atom_indices",
"[",
"id",
"(",
"atom1",
")",
"]",
"atom2_idx",
"=",
"self",
".",
"_atom_indices",
"[",
"id",
"(",
"atom2",
")",
"]",
"self",
".",
"add_edge",
"(",
"atom1_idx",
",",
"atom2_idx",
")"
] |
Add edges between all atoms with the same atom_label in rings.
|
[
"Add",
"edges",
"between",
"all",
"atoms",
"with",
"the",
"same",
"atom_label",
"in",
"rings",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/smarts_graph.py#L77-L94
|
mosdef-hub/foyer
|
foyer/smarts_graph.py
|
SMARTSGraph.find_matches
|
def find_matches(self, topology):
"""Return sets of atoms that match this SMARTS pattern in a topology.
Notes:
------
When this function gets used in atomtyper.py, we actively modify the
white- and blacklists of the atoms in `topology` after finding a match.
This means that between every successive call of
`subgraph_isomorphisms_iter()`, the topology against which we are
matching may have actually changed. Currently, we take advantage of this
behavior in some edges cases (e.g. see `test_hexa_coordinated` in
`test_smarts.py`).
"""
# Note: Needs to be updated in sync with the grammar in `smarts.py`.
ring_tokens = ['ring_size', 'ring_count']
has_ring_rules = any(self.ast.select(token)
for token in ring_tokens)
_prepare_atoms(topology, compute_cycles=has_ring_rules)
top_graph = nx.Graph()
top_graph.add_nodes_from(((a.index, {'atom': a})
for a in topology.atoms()))
top_graph.add_edges_from(((b[0].index, b[1].index)
for b in topology.bonds()))
if self._graph_matcher is None:
atom = nx.get_node_attributes(self, name='atom')[0]
if len(atom.select('atom_symbol')) == 1 and not atom.select('not_expression'):
try:
element = atom.select('atom_symbol').strees[0].tail[0]
except IndexError:
try:
atomic_num = atom.select('atomic_num').strees[0].tail[0]
element = pt.Element[int(atomic_num)]
except IndexError:
element = None
else:
element = None
self._graph_matcher = SMARTSMatcher(top_graph, self,
node_match=self._node_match,
element=element)
matched_atoms = set()
for mapping in self._graph_matcher.subgraph_isomorphisms_iter():
mapping = {node_id: atom_id for atom_id, node_id in mapping.items()}
# The first node in the smarts graph always corresponds to the atom
# that we are trying to match.
atom_index = mapping[0]
# Don't yield duplicate matches found via matching the pattern in a
# different order.
if atom_index not in matched_atoms:
matched_atoms.add(atom_index)
yield atom_index
|
python
|
def find_matches(self, topology):
ring_tokens = ['ring_size', 'ring_count']
has_ring_rules = any(self.ast.select(token)
for token in ring_tokens)
_prepare_atoms(topology, compute_cycles=has_ring_rules)
top_graph = nx.Graph()
top_graph.add_nodes_from(((a.index, {'atom': a})
for a in topology.atoms()))
top_graph.add_edges_from(((b[0].index, b[1].index)
for b in topology.bonds()))
if self._graph_matcher is None:
atom = nx.get_node_attributes(self, name='atom')[0]
if len(atom.select('atom_symbol')) == 1 and not atom.select('not_expression'):
try:
element = atom.select('atom_symbol').strees[0].tail[0]
except IndexError:
try:
atomic_num = atom.select('atomic_num').strees[0].tail[0]
element = pt.Element[int(atomic_num)]
except IndexError:
element = None
else:
element = None
self._graph_matcher = SMARTSMatcher(top_graph, self,
node_match=self._node_match,
element=element)
matched_atoms = set()
for mapping in self._graph_matcher.subgraph_isomorphisms_iter():
mapping = {node_id: atom_id for atom_id, node_id in mapping.items()}
atom_index = mapping[0]
if atom_index not in matched_atoms:
matched_atoms.add(atom_index)
yield atom_index
|
[
"def",
"find_matches",
"(",
"self",
",",
"topology",
")",
":",
"# Note: Needs to be updated in sync with the grammar in `smarts.py`.",
"ring_tokens",
"=",
"[",
"'ring_size'",
",",
"'ring_count'",
"]",
"has_ring_rules",
"=",
"any",
"(",
"self",
".",
"ast",
".",
"select",
"(",
"token",
")",
"for",
"token",
"in",
"ring_tokens",
")",
"_prepare_atoms",
"(",
"topology",
",",
"compute_cycles",
"=",
"has_ring_rules",
")",
"top_graph",
"=",
"nx",
".",
"Graph",
"(",
")",
"top_graph",
".",
"add_nodes_from",
"(",
"(",
"(",
"a",
".",
"index",
",",
"{",
"'atom'",
":",
"a",
"}",
")",
"for",
"a",
"in",
"topology",
".",
"atoms",
"(",
")",
")",
")",
"top_graph",
".",
"add_edges_from",
"(",
"(",
"(",
"b",
"[",
"0",
"]",
".",
"index",
",",
"b",
"[",
"1",
"]",
".",
"index",
")",
"for",
"b",
"in",
"topology",
".",
"bonds",
"(",
")",
")",
")",
"if",
"self",
".",
"_graph_matcher",
"is",
"None",
":",
"atom",
"=",
"nx",
".",
"get_node_attributes",
"(",
"self",
",",
"name",
"=",
"'atom'",
")",
"[",
"0",
"]",
"if",
"len",
"(",
"atom",
".",
"select",
"(",
"'atom_symbol'",
")",
")",
"==",
"1",
"and",
"not",
"atom",
".",
"select",
"(",
"'not_expression'",
")",
":",
"try",
":",
"element",
"=",
"atom",
".",
"select",
"(",
"'atom_symbol'",
")",
".",
"strees",
"[",
"0",
"]",
".",
"tail",
"[",
"0",
"]",
"except",
"IndexError",
":",
"try",
":",
"atomic_num",
"=",
"atom",
".",
"select",
"(",
"'atomic_num'",
")",
".",
"strees",
"[",
"0",
"]",
".",
"tail",
"[",
"0",
"]",
"element",
"=",
"pt",
".",
"Element",
"[",
"int",
"(",
"atomic_num",
")",
"]",
"except",
"IndexError",
":",
"element",
"=",
"None",
"else",
":",
"element",
"=",
"None",
"self",
".",
"_graph_matcher",
"=",
"SMARTSMatcher",
"(",
"top_graph",
",",
"self",
",",
"node_match",
"=",
"self",
".",
"_node_match",
",",
"element",
"=",
"element",
")",
"matched_atoms",
"=",
"set",
"(",
")",
"for",
"mapping",
"in",
"self",
".",
"_graph_matcher",
".",
"subgraph_isomorphisms_iter",
"(",
")",
":",
"mapping",
"=",
"{",
"node_id",
":",
"atom_id",
"for",
"atom_id",
",",
"node_id",
"in",
"mapping",
".",
"items",
"(",
")",
"}",
"# The first node in the smarts graph always corresponds to the atom",
"# that we are trying to match.",
"atom_index",
"=",
"mapping",
"[",
"0",
"]",
"# Don't yield duplicate matches found via matching the pattern in a",
"# different order.",
"if",
"atom_index",
"not",
"in",
"matched_atoms",
":",
"matched_atoms",
".",
"add",
"(",
"atom_index",
")",
"yield",
"atom_index"
] |
Return sets of atoms that match this SMARTS pattern in a topology.
Notes:
------
When this function gets used in atomtyper.py, we actively modify the
white- and blacklists of the atoms in `topology` after finding a match.
This means that between every successive call of
`subgraph_isomorphisms_iter()`, the topology against which we are
matching may have actually changed. Currently, we take advantage of this
behavior in some edges cases (e.g. see `test_hexa_coordinated` in
`test_smarts.py`).
|
[
"Return",
"sets",
"of",
"atoms",
"that",
"match",
"this",
"SMARTS",
"pattern",
"in",
"a",
"topology",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/smarts_graph.py#L150-L203
|
mosdef-hub/foyer
|
foyer/smarts_graph.py
|
SMARTSMatcher.candidate_pairs_iter
|
def candidate_pairs_iter(self):
"""Iterator over candidate pairs of nodes in G1 and G2."""
# All computations are done using the current state!
G2_nodes = self.G2_nodes
# First we compute the inout-terminal sets.
T1_inout = set(self.inout_1.keys()) - set(self.core_1.keys())
T2_inout = set(self.inout_2.keys()) - set(self.core_2.keys())
# If T1_inout and T2_inout are both nonempty.
# P(s) = T1_inout x {min T2_inout}
if T1_inout and T2_inout:
for node in T1_inout:
yield node, min(T2_inout)
else:
# First we determine the candidate node for G2
other_node = min(G2_nodes - set(self.core_2))
host_nodes = self.valid_nodes if other_node == 0 else self.G1.nodes()
for node in host_nodes:
if node not in self.core_1:
yield node, other_node
|
python
|
def candidate_pairs_iter(self):
G2_nodes = self.G2_nodes
T1_inout = set(self.inout_1.keys()) - set(self.core_1.keys())
T2_inout = set(self.inout_2.keys()) - set(self.core_2.keys())
if T1_inout and T2_inout:
for node in T1_inout:
yield node, min(T2_inout)
else:
other_node = min(G2_nodes - set(self.core_2))
host_nodes = self.valid_nodes if other_node == 0 else self.G1.nodes()
for node in host_nodes:
if node not in self.core_1:
yield node, other_node
|
[
"def",
"candidate_pairs_iter",
"(",
"self",
")",
":",
"# All computations are done using the current state!",
"G2_nodes",
"=",
"self",
".",
"G2_nodes",
"# First we compute the inout-terminal sets.",
"T1_inout",
"=",
"set",
"(",
"self",
".",
"inout_1",
".",
"keys",
"(",
")",
")",
"-",
"set",
"(",
"self",
".",
"core_1",
".",
"keys",
"(",
")",
")",
"T2_inout",
"=",
"set",
"(",
"self",
".",
"inout_2",
".",
"keys",
"(",
")",
")",
"-",
"set",
"(",
"self",
".",
"core_2",
".",
"keys",
"(",
")",
")",
"# If T1_inout and T2_inout are both nonempty.",
"# P(s) = T1_inout x {min T2_inout}",
"if",
"T1_inout",
"and",
"T2_inout",
":",
"for",
"node",
"in",
"T1_inout",
":",
"yield",
"node",
",",
"min",
"(",
"T2_inout",
")",
"else",
":",
"# First we determine the candidate node for G2",
"other_node",
"=",
"min",
"(",
"G2_nodes",
"-",
"set",
"(",
"self",
".",
"core_2",
")",
")",
"host_nodes",
"=",
"self",
".",
"valid_nodes",
"if",
"other_node",
"==",
"0",
"else",
"self",
".",
"G1",
".",
"nodes",
"(",
")",
"for",
"node",
"in",
"host_nodes",
":",
"if",
"node",
"not",
"in",
"self",
".",
"core_1",
":",
"yield",
"node",
",",
"other_node"
] |
Iterator over candidate pairs of nodes in G1 and G2.
|
[
"Iterator",
"over",
"candidate",
"pairs",
"of",
"nodes",
"in",
"G1",
"and",
"G2",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/smarts_graph.py#L216-L236
|
mosdef-hub/foyer
|
foyer/atomtyper.py
|
find_atomtypes
|
def find_atomtypes(topology, forcefield, max_iter=10):
"""Determine atomtypes for all atoms.
Parameters
----------
topology : simtk.openmm.app.Topology
The topology that we are trying to atomtype.
forcefield : foyer.Forcefield
The forcefield object.
max_iter : int, optional, default=10
The maximum number of iterations.
"""
rules = _load_rules(forcefield)
# Only consider rules for elements found in topology
subrules = dict()
system_elements = {a.element.symbol for a in topology.atoms()}
for key,val in rules.items():
atom = val.node[0]['atom']
if len(atom.select('atom_symbol')) == 1 and not atom.select('not_expression'):
try:
element = atom.select('atom_symbol').strees[0].tail[0]
except IndexError:
try:
atomic_num = atom.select('atomic_num').strees[0].tail[0]
element = pt.Element[int(atomic_num)]
except IndexError:
element = None
else:
element = None
if element is None or element in system_elements:
subrules[key] = val
rules = subrules
_iterate_rules(rules, topology, max_iter=max_iter)
_resolve_atomtypes(topology)
|
python
|
def find_atomtypes(topology, forcefield, max_iter=10):
rules = _load_rules(forcefield)
subrules = dict()
system_elements = {a.element.symbol for a in topology.atoms()}
for key,val in rules.items():
atom = val.node[0]['atom']
if len(atom.select('atom_symbol')) == 1 and not atom.select('not_expression'):
try:
element = atom.select('atom_symbol').strees[0].tail[0]
except IndexError:
try:
atomic_num = atom.select('atomic_num').strees[0].tail[0]
element = pt.Element[int(atomic_num)]
except IndexError:
element = None
else:
element = None
if element is None or element in system_elements:
subrules[key] = val
rules = subrules
_iterate_rules(rules, topology, max_iter=max_iter)
_resolve_atomtypes(topology)
|
[
"def",
"find_atomtypes",
"(",
"topology",
",",
"forcefield",
",",
"max_iter",
"=",
"10",
")",
":",
"rules",
"=",
"_load_rules",
"(",
"forcefield",
")",
"# Only consider rules for elements found in topology",
"subrules",
"=",
"dict",
"(",
")",
"system_elements",
"=",
"{",
"a",
".",
"element",
".",
"symbol",
"for",
"a",
"in",
"topology",
".",
"atoms",
"(",
")",
"}",
"for",
"key",
",",
"val",
"in",
"rules",
".",
"items",
"(",
")",
":",
"atom",
"=",
"val",
".",
"node",
"[",
"0",
"]",
"[",
"'atom'",
"]",
"if",
"len",
"(",
"atom",
".",
"select",
"(",
"'atom_symbol'",
")",
")",
"==",
"1",
"and",
"not",
"atom",
".",
"select",
"(",
"'not_expression'",
")",
":",
"try",
":",
"element",
"=",
"atom",
".",
"select",
"(",
"'atom_symbol'",
")",
".",
"strees",
"[",
"0",
"]",
".",
"tail",
"[",
"0",
"]",
"except",
"IndexError",
":",
"try",
":",
"atomic_num",
"=",
"atom",
".",
"select",
"(",
"'atomic_num'",
")",
".",
"strees",
"[",
"0",
"]",
".",
"tail",
"[",
"0",
"]",
"element",
"=",
"pt",
".",
"Element",
"[",
"int",
"(",
"atomic_num",
")",
"]",
"except",
"IndexError",
":",
"element",
"=",
"None",
"else",
":",
"element",
"=",
"None",
"if",
"element",
"is",
"None",
"or",
"element",
"in",
"system_elements",
":",
"subrules",
"[",
"key",
"]",
"=",
"val",
"rules",
"=",
"subrules",
"_iterate_rules",
"(",
"rules",
",",
"topology",
",",
"max_iter",
"=",
"max_iter",
")",
"_resolve_atomtypes",
"(",
"topology",
")"
] |
Determine atomtypes for all atoms.
Parameters
----------
topology : simtk.openmm.app.Topology
The topology that we are trying to atomtype.
forcefield : foyer.Forcefield
The forcefield object.
max_iter : int, optional, default=10
The maximum number of iterations.
|
[
"Determine",
"atomtypes",
"for",
"all",
"atoms",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/atomtyper.py#L7-L43
|
mosdef-hub/foyer
|
foyer/atomtyper.py
|
_load_rules
|
def _load_rules(forcefield):
"""Load atomtyping rules from a forcefield into SMARTSGraphs. """
rules = dict()
for rule_name, smarts in forcefield.atomTypeDefinitions.items():
overrides = forcefield.atomTypeOverrides.get(rule_name)
if overrides is not None:
overrides = set(overrides)
else:
overrides = set()
rules[rule_name] = SMARTSGraph(smarts_string=smarts,
parser=forcefield.parser,
name=rule_name,
overrides=overrides)
return rules
|
python
|
def _load_rules(forcefield):
rules = dict()
for rule_name, smarts in forcefield.atomTypeDefinitions.items():
overrides = forcefield.atomTypeOverrides.get(rule_name)
if overrides is not None:
overrides = set(overrides)
else:
overrides = set()
rules[rule_name] = SMARTSGraph(smarts_string=smarts,
parser=forcefield.parser,
name=rule_name,
overrides=overrides)
return rules
|
[
"def",
"_load_rules",
"(",
"forcefield",
")",
":",
"rules",
"=",
"dict",
"(",
")",
"for",
"rule_name",
",",
"smarts",
"in",
"forcefield",
".",
"atomTypeDefinitions",
".",
"items",
"(",
")",
":",
"overrides",
"=",
"forcefield",
".",
"atomTypeOverrides",
".",
"get",
"(",
"rule_name",
")",
"if",
"overrides",
"is",
"not",
"None",
":",
"overrides",
"=",
"set",
"(",
"overrides",
")",
"else",
":",
"overrides",
"=",
"set",
"(",
")",
"rules",
"[",
"rule_name",
"]",
"=",
"SMARTSGraph",
"(",
"smarts_string",
"=",
"smarts",
",",
"parser",
"=",
"forcefield",
".",
"parser",
",",
"name",
"=",
"rule_name",
",",
"overrides",
"=",
"overrides",
")",
"return",
"rules"
] |
Load atomtyping rules from a forcefield into SMARTSGraphs.
|
[
"Load",
"atomtyping",
"rules",
"from",
"a",
"forcefield",
"into",
"SMARTSGraphs",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/atomtyper.py#L46-L59
|
mosdef-hub/foyer
|
foyer/atomtyper.py
|
_iterate_rules
|
def _iterate_rules(rules, topology, max_iter):
"""Iteratively run all the rules until the white- and backlists converge.
Parameters
----------
rules : dict
A dictionary mapping rule names (typically atomtype names) to
SMARTSGraphs that evaluate those rules.
topology : simtk.openmm.app.Topology
The topology that we are trying to atomtype.
max_iter : int
The maximum number of iterations.
"""
atoms = list(topology.atoms())
for _ in range(max_iter):
max_iter -= 1
found_something = False
for rule in rules.values():
for match_index in rule.find_matches(topology):
atom = atoms[match_index]
if rule.name not in atom.whitelist:
atom.whitelist.add(rule.name)
atom.blacklist |= rule.overrides
found_something = True
if not found_something:
break
else:
warn("Reached maximum iterations. Something probably went wrong.")
|
python
|
def _iterate_rules(rules, topology, max_iter):
atoms = list(topology.atoms())
for _ in range(max_iter):
max_iter -= 1
found_something = False
for rule in rules.values():
for match_index in rule.find_matches(topology):
atom = atoms[match_index]
if rule.name not in atom.whitelist:
atom.whitelist.add(rule.name)
atom.blacklist |= rule.overrides
found_something = True
if not found_something:
break
else:
warn("Reached maximum iterations. Something probably went wrong.")
|
[
"def",
"_iterate_rules",
"(",
"rules",
",",
"topology",
",",
"max_iter",
")",
":",
"atoms",
"=",
"list",
"(",
"topology",
".",
"atoms",
"(",
")",
")",
"for",
"_",
"in",
"range",
"(",
"max_iter",
")",
":",
"max_iter",
"-=",
"1",
"found_something",
"=",
"False",
"for",
"rule",
"in",
"rules",
".",
"values",
"(",
")",
":",
"for",
"match_index",
"in",
"rule",
".",
"find_matches",
"(",
"topology",
")",
":",
"atom",
"=",
"atoms",
"[",
"match_index",
"]",
"if",
"rule",
".",
"name",
"not",
"in",
"atom",
".",
"whitelist",
":",
"atom",
".",
"whitelist",
".",
"add",
"(",
"rule",
".",
"name",
")",
"atom",
".",
"blacklist",
"|=",
"rule",
".",
"overrides",
"found_something",
"=",
"True",
"if",
"not",
"found_something",
":",
"break",
"else",
":",
"warn",
"(",
"\"Reached maximum iterations. Something probably went wrong.\"",
")"
] |
Iteratively run all the rules until the white- and backlists converge.
Parameters
----------
rules : dict
A dictionary mapping rule names (typically atomtype names) to
SMARTSGraphs that evaluate those rules.
topology : simtk.openmm.app.Topology
The topology that we are trying to atomtype.
max_iter : int
The maximum number of iterations.
|
[
"Iteratively",
"run",
"all",
"the",
"rules",
"until",
"the",
"white",
"-",
"and",
"backlists",
"converge",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/atomtyper.py#L62-L90
|
mosdef-hub/foyer
|
foyer/atomtyper.py
|
_resolve_atomtypes
|
def _resolve_atomtypes(topology):
"""Determine the final atomtypes from the white- and blacklists. """
for atom in topology.atoms():
atomtype = [rule_name for rule_name in atom.whitelist - atom.blacklist]
if len(atomtype) == 1:
atom.id = atomtype[0]
elif len(atomtype) > 1:
raise FoyerError("Found multiple types for atom {} ({}): {}.".format(
atom.index, atom.element.name, atomtype))
else:
raise FoyerError("Found no types for atom {} ({}).".format(
atom.index, atom.element.name))
|
python
|
def _resolve_atomtypes(topology):
for atom in topology.atoms():
atomtype = [rule_name for rule_name in atom.whitelist - atom.blacklist]
if len(atomtype) == 1:
atom.id = atomtype[0]
elif len(atomtype) > 1:
raise FoyerError("Found multiple types for atom {} ({}): {}.".format(
atom.index, atom.element.name, atomtype))
else:
raise FoyerError("Found no types for atom {} ({}).".format(
atom.index, atom.element.name))
|
[
"def",
"_resolve_atomtypes",
"(",
"topology",
")",
":",
"for",
"atom",
"in",
"topology",
".",
"atoms",
"(",
")",
":",
"atomtype",
"=",
"[",
"rule_name",
"for",
"rule_name",
"in",
"atom",
".",
"whitelist",
"-",
"atom",
".",
"blacklist",
"]",
"if",
"len",
"(",
"atomtype",
")",
"==",
"1",
":",
"atom",
".",
"id",
"=",
"atomtype",
"[",
"0",
"]",
"elif",
"len",
"(",
"atomtype",
")",
">",
"1",
":",
"raise",
"FoyerError",
"(",
"\"Found multiple types for atom {} ({}): {}.\"",
".",
"format",
"(",
"atom",
".",
"index",
",",
"atom",
".",
"element",
".",
"name",
",",
"atomtype",
")",
")",
"else",
":",
"raise",
"FoyerError",
"(",
"\"Found no types for atom {} ({}).\"",
".",
"format",
"(",
"atom",
".",
"index",
",",
"atom",
".",
"element",
".",
"name",
")",
")"
] |
Determine the final atomtypes from the white- and blacklists.
|
[
"Determine",
"the",
"final",
"atomtypes",
"from",
"the",
"white",
"-",
"and",
"blacklists",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/atomtyper.py#L93-L104
|
mosdef-hub/foyer
|
foyer/forcefield.py
|
generate_topology
|
def generate_topology(non_omm_topology, non_element_types=None,
residues=None):
"""Create an OpenMM Topology from another supported topology structure."""
if non_element_types is None:
non_element_types = set()
if isinstance(non_omm_topology, pmd.Structure):
return _topology_from_parmed(non_omm_topology, non_element_types)
elif has_mbuild:
mb = import_('mbuild')
if (non_omm_topology, mb.Compound):
pmdCompoundStructure = non_omm_topology.to_parmed(residues=residues)
return _topology_from_parmed(pmdCompoundStructure, non_element_types)
else:
raise FoyerError('Unknown topology format: {}\n'
'Supported formats are: '
'"parmed.Structure", '
'"mbuild.Compound", '
'"openmm.app.Topology"'.format(non_omm_topology))
|
python
|
def generate_topology(non_omm_topology, non_element_types=None,
residues=None):
if non_element_types is None:
non_element_types = set()
if isinstance(non_omm_topology, pmd.Structure):
return _topology_from_parmed(non_omm_topology, non_element_types)
elif has_mbuild:
mb = import_('mbuild')
if (non_omm_topology, mb.Compound):
pmdCompoundStructure = non_omm_topology.to_parmed(residues=residues)
return _topology_from_parmed(pmdCompoundStructure, non_element_types)
else:
raise FoyerError('Unknown topology format: {}\n'
'Supported formats are: '
'"parmed.Structure", '
'"mbuild.Compound", '
'"openmm.app.Topology"'.format(non_omm_topology))
|
[
"def",
"generate_topology",
"(",
"non_omm_topology",
",",
"non_element_types",
"=",
"None",
",",
"residues",
"=",
"None",
")",
":",
"if",
"non_element_types",
"is",
"None",
":",
"non_element_types",
"=",
"set",
"(",
")",
"if",
"isinstance",
"(",
"non_omm_topology",
",",
"pmd",
".",
"Structure",
")",
":",
"return",
"_topology_from_parmed",
"(",
"non_omm_topology",
",",
"non_element_types",
")",
"elif",
"has_mbuild",
":",
"mb",
"=",
"import_",
"(",
"'mbuild'",
")",
"if",
"(",
"non_omm_topology",
",",
"mb",
".",
"Compound",
")",
":",
"pmdCompoundStructure",
"=",
"non_omm_topology",
".",
"to_parmed",
"(",
"residues",
"=",
"residues",
")",
"return",
"_topology_from_parmed",
"(",
"pmdCompoundStructure",
",",
"non_element_types",
")",
"else",
":",
"raise",
"FoyerError",
"(",
"'Unknown topology format: {}\\n'",
"'Supported formats are: '",
"'\"parmed.Structure\", '",
"'\"mbuild.Compound\", '",
"'\"openmm.app.Topology\"'",
".",
"format",
"(",
"non_omm_topology",
")",
")"
] |
Create an OpenMM Topology from another supported topology structure.
|
[
"Create",
"an",
"OpenMM",
"Topology",
"from",
"another",
"supported",
"topology",
"structure",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/forcefield.py#L87-L105
|
mosdef-hub/foyer
|
foyer/forcefield.py
|
_topology_from_parmed
|
def _topology_from_parmed(structure, non_element_types):
"""Convert a ParmEd Structure to an OpenMM Topology."""
topology = app.Topology()
residues = dict()
for pmd_residue in structure.residues:
chain = topology.addChain()
omm_residue = topology.addResidue(pmd_residue.name, chain)
residues[pmd_residue] = omm_residue
atoms = dict() # pmd.Atom: omm.Atom
for pmd_atom in structure.atoms:
name = pmd_atom.name
if pmd_atom.name in non_element_types:
element = non_element_types[pmd_atom.name]
else:
if (isinstance(pmd_atom.atomic_number, int) and
pmd_atom.atomic_number != 0):
element = elem.Element.getByAtomicNumber(pmd_atom.atomic_number)
else:
element = elem.Element.getBySymbol(pmd_atom.name)
omm_atom = topology.addAtom(name, element, residues[pmd_atom.residue])
atoms[pmd_atom] = omm_atom
omm_atom.bond_partners = []
for bond in structure.bonds:
atom1 = atoms[bond.atom1]
atom2 = atoms[bond.atom2]
topology.addBond(atom1, atom2)
atom1.bond_partners.append(atom2)
atom2.bond_partners.append(atom1)
if structure.box_vectors and np.any([x._value for x in structure.box_vectors]):
topology.setPeriodicBoxVectors(structure.box_vectors)
positions = structure.positions
return topology, positions
|
python
|
def _topology_from_parmed(structure, non_element_types):
topology = app.Topology()
residues = dict()
for pmd_residue in structure.residues:
chain = topology.addChain()
omm_residue = topology.addResidue(pmd_residue.name, chain)
residues[pmd_residue] = omm_residue
atoms = dict()
for pmd_atom in structure.atoms:
name = pmd_atom.name
if pmd_atom.name in non_element_types:
element = non_element_types[pmd_atom.name]
else:
if (isinstance(pmd_atom.atomic_number, int) and
pmd_atom.atomic_number != 0):
element = elem.Element.getByAtomicNumber(pmd_atom.atomic_number)
else:
element = elem.Element.getBySymbol(pmd_atom.name)
omm_atom = topology.addAtom(name, element, residues[pmd_atom.residue])
atoms[pmd_atom] = omm_atom
omm_atom.bond_partners = []
for bond in structure.bonds:
atom1 = atoms[bond.atom1]
atom2 = atoms[bond.atom2]
topology.addBond(atom1, atom2)
atom1.bond_partners.append(atom2)
atom2.bond_partners.append(atom1)
if structure.box_vectors and np.any([x._value for x in structure.box_vectors]):
topology.setPeriodicBoxVectors(structure.box_vectors)
positions = structure.positions
return topology, positions
|
[
"def",
"_topology_from_parmed",
"(",
"structure",
",",
"non_element_types",
")",
":",
"topology",
"=",
"app",
".",
"Topology",
"(",
")",
"residues",
"=",
"dict",
"(",
")",
"for",
"pmd_residue",
"in",
"structure",
".",
"residues",
":",
"chain",
"=",
"topology",
".",
"addChain",
"(",
")",
"omm_residue",
"=",
"topology",
".",
"addResidue",
"(",
"pmd_residue",
".",
"name",
",",
"chain",
")",
"residues",
"[",
"pmd_residue",
"]",
"=",
"omm_residue",
"atoms",
"=",
"dict",
"(",
")",
"# pmd.Atom: omm.Atom",
"for",
"pmd_atom",
"in",
"structure",
".",
"atoms",
":",
"name",
"=",
"pmd_atom",
".",
"name",
"if",
"pmd_atom",
".",
"name",
"in",
"non_element_types",
":",
"element",
"=",
"non_element_types",
"[",
"pmd_atom",
".",
"name",
"]",
"else",
":",
"if",
"(",
"isinstance",
"(",
"pmd_atom",
".",
"atomic_number",
",",
"int",
")",
"and",
"pmd_atom",
".",
"atomic_number",
"!=",
"0",
")",
":",
"element",
"=",
"elem",
".",
"Element",
".",
"getByAtomicNumber",
"(",
"pmd_atom",
".",
"atomic_number",
")",
"else",
":",
"element",
"=",
"elem",
".",
"Element",
".",
"getBySymbol",
"(",
"pmd_atom",
".",
"name",
")",
"omm_atom",
"=",
"topology",
".",
"addAtom",
"(",
"name",
",",
"element",
",",
"residues",
"[",
"pmd_atom",
".",
"residue",
"]",
")",
"atoms",
"[",
"pmd_atom",
"]",
"=",
"omm_atom",
"omm_atom",
".",
"bond_partners",
"=",
"[",
"]",
"for",
"bond",
"in",
"structure",
".",
"bonds",
":",
"atom1",
"=",
"atoms",
"[",
"bond",
".",
"atom1",
"]",
"atom2",
"=",
"atoms",
"[",
"bond",
".",
"atom2",
"]",
"topology",
".",
"addBond",
"(",
"atom1",
",",
"atom2",
")",
"atom1",
".",
"bond_partners",
".",
"append",
"(",
"atom2",
")",
"atom2",
".",
"bond_partners",
".",
"append",
"(",
"atom1",
")",
"if",
"structure",
".",
"box_vectors",
"and",
"np",
".",
"any",
"(",
"[",
"x",
".",
"_value",
"for",
"x",
"in",
"structure",
".",
"box_vectors",
"]",
")",
":",
"topology",
".",
"setPeriodicBoxVectors",
"(",
"structure",
".",
"box_vectors",
")",
"positions",
"=",
"structure",
".",
"positions",
"return",
"topology",
",",
"positions"
] |
Convert a ParmEd Structure to an OpenMM Topology.
|
[
"Convert",
"a",
"ParmEd",
"Structure",
"to",
"an",
"OpenMM",
"Topology",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/forcefield.py#L108-L143
|
mosdef-hub/foyer
|
foyer/forcefield.py
|
_topology_from_residue
|
def _topology_from_residue(res):
"""Converts a openmm.app.Topology.Residue to openmm.app.Topology.
Parameters
----------
res : openmm.app.Topology.Residue
An individual residue in an openmm.app.Topology
Returns
-------
topology : openmm.app.Topology
The generated topology
"""
topology = app.Topology()
chain = topology.addChain()
new_res = topology.addResidue(res.name, chain)
atoms = dict() # { omm.Atom in res : omm.Atom in *new* topology }
for res_atom in res.atoms():
topology_atom = topology.addAtom(name=res_atom.name,
element=res_atom.element,
residue=new_res)
atoms[res_atom] = topology_atom
topology_atom.bond_partners = []
for bond in res.bonds():
atom1 = atoms[bond.atom1]
atom2 = atoms[bond.atom2]
topology.addBond(atom1, atom2)
atom1.bond_partners.append(atom2)
atom2.bond_partners.append(atom1)
return topology
|
python
|
def _topology_from_residue(res):
topology = app.Topology()
chain = topology.addChain()
new_res = topology.addResidue(res.name, chain)
atoms = dict()
for res_atom in res.atoms():
topology_atom = topology.addAtom(name=res_atom.name,
element=res_atom.element,
residue=new_res)
atoms[res_atom] = topology_atom
topology_atom.bond_partners = []
for bond in res.bonds():
atom1 = atoms[bond.atom1]
atom2 = atoms[bond.atom2]
topology.addBond(atom1, atom2)
atom1.bond_partners.append(atom2)
atom2.bond_partners.append(atom1)
return topology
|
[
"def",
"_topology_from_residue",
"(",
"res",
")",
":",
"topology",
"=",
"app",
".",
"Topology",
"(",
")",
"chain",
"=",
"topology",
".",
"addChain",
"(",
")",
"new_res",
"=",
"topology",
".",
"addResidue",
"(",
"res",
".",
"name",
",",
"chain",
")",
"atoms",
"=",
"dict",
"(",
")",
"# { omm.Atom in res : omm.Atom in *new* topology }",
"for",
"res_atom",
"in",
"res",
".",
"atoms",
"(",
")",
":",
"topology_atom",
"=",
"topology",
".",
"addAtom",
"(",
"name",
"=",
"res_atom",
".",
"name",
",",
"element",
"=",
"res_atom",
".",
"element",
",",
"residue",
"=",
"new_res",
")",
"atoms",
"[",
"res_atom",
"]",
"=",
"topology_atom",
"topology_atom",
".",
"bond_partners",
"=",
"[",
"]",
"for",
"bond",
"in",
"res",
".",
"bonds",
"(",
")",
":",
"atom1",
"=",
"atoms",
"[",
"bond",
".",
"atom1",
"]",
"atom2",
"=",
"atoms",
"[",
"bond",
".",
"atom2",
"]",
"topology",
".",
"addBond",
"(",
"atom1",
",",
"atom2",
")",
"atom1",
".",
"bond_partners",
".",
"append",
"(",
"atom2",
")",
"atom2",
".",
"bond_partners",
".",
"append",
"(",
"atom1",
")",
"return",
"topology"
] |
Converts a openmm.app.Topology.Residue to openmm.app.Topology.
Parameters
----------
res : openmm.app.Topology.Residue
An individual residue in an openmm.app.Topology
Returns
-------
topology : openmm.app.Topology
The generated topology
|
[
"Converts",
"a",
"openmm",
".",
"app",
".",
"Topology",
".",
"Residue",
"to",
"openmm",
".",
"app",
".",
"Topology",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/forcefield.py#L146-L180
|
mosdef-hub/foyer
|
foyer/forcefield.py
|
_check_independent_residues
|
def _check_independent_residues(topology):
"""Check to see if residues will constitute independent graphs."""
for res in topology.residues():
atoms_in_residue = set([atom for atom in res.atoms()])
bond_partners_in_residue = [item for sublist in [atom.bond_partners for atom in res.atoms()] for item in sublist]
# Handle the case of a 'residue' with no neighbors
if not bond_partners_in_residue:
continue
if set(atoms_in_residue) != set(bond_partners_in_residue):
return False
return True
|
python
|
def _check_independent_residues(topology):
for res in topology.residues():
atoms_in_residue = set([atom for atom in res.atoms()])
bond_partners_in_residue = [item for sublist in [atom.bond_partners for atom in res.atoms()] for item in sublist]
if not bond_partners_in_residue:
continue
if set(atoms_in_residue) != set(bond_partners_in_residue):
return False
return True
|
[
"def",
"_check_independent_residues",
"(",
"topology",
")",
":",
"for",
"res",
"in",
"topology",
".",
"residues",
"(",
")",
":",
"atoms_in_residue",
"=",
"set",
"(",
"[",
"atom",
"for",
"atom",
"in",
"res",
".",
"atoms",
"(",
")",
"]",
")",
"bond_partners_in_residue",
"=",
"[",
"item",
"for",
"sublist",
"in",
"[",
"atom",
".",
"bond_partners",
"for",
"atom",
"in",
"res",
".",
"atoms",
"(",
")",
"]",
"for",
"item",
"in",
"sublist",
"]",
"# Handle the case of a 'residue' with no neighbors",
"if",
"not",
"bond_partners_in_residue",
":",
"continue",
"if",
"set",
"(",
"atoms_in_residue",
")",
"!=",
"set",
"(",
"bond_partners_in_residue",
")",
":",
"return",
"False",
"return",
"True"
] |
Check to see if residues will constitute independent graphs.
|
[
"Check",
"to",
"see",
"if",
"residues",
"will",
"constitute",
"independent",
"graphs",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/forcefield.py#L183-L193
|
mosdef-hub/foyer
|
foyer/forcefield.py
|
_update_atomtypes
|
def _update_atomtypes(unatomtyped_topology, res_name, prototype):
"""Update atomtypes in residues in a topology using a prototype topology.
Atomtypes are updated when residues in each topology have matching names.
Parameters
----------
unatomtyped_topology : openmm.app.Topology
Topology lacking atomtypes defined by `find_atomtypes`.
prototype : openmm.app.Topology
Prototype topology with atomtypes defined by `find_atomtypes`.
"""
for res in unatomtyped_topology.residues():
if res.name == res_name:
for old_atom, new_atom_id in zip([atom for atom in res.atoms()], [atom.id for atom in prototype.atoms()]):
old_atom.id = new_atom_id
|
python
|
def _update_atomtypes(unatomtyped_topology, res_name, prototype):
for res in unatomtyped_topology.residues():
if res.name == res_name:
for old_atom, new_atom_id in zip([atom for atom in res.atoms()], [atom.id for atom in prototype.atoms()]):
old_atom.id = new_atom_id
|
[
"def",
"_update_atomtypes",
"(",
"unatomtyped_topology",
",",
"res_name",
",",
"prototype",
")",
":",
"for",
"res",
"in",
"unatomtyped_topology",
".",
"residues",
"(",
")",
":",
"if",
"res",
".",
"name",
"==",
"res_name",
":",
"for",
"old_atom",
",",
"new_atom_id",
"in",
"zip",
"(",
"[",
"atom",
"for",
"atom",
"in",
"res",
".",
"atoms",
"(",
")",
"]",
",",
"[",
"atom",
".",
"id",
"for",
"atom",
"in",
"prototype",
".",
"atoms",
"(",
")",
"]",
")",
":",
"old_atom",
".",
"id",
"=",
"new_atom_id"
] |
Update atomtypes in residues in a topology using a prototype topology.
Atomtypes are updated when residues in each topology have matching names.
Parameters
----------
unatomtyped_topology : openmm.app.Topology
Topology lacking atomtypes defined by `find_atomtypes`.
prototype : openmm.app.Topology
Prototype topology with atomtypes defined by `find_atomtypes`.
|
[
"Update",
"atomtypes",
"in",
"residues",
"in",
"a",
"topology",
"using",
"a",
"prototype",
"topology",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/forcefield.py#L196-L212
|
mosdef-hub/foyer
|
foyer/forcefield.py
|
Forcefield.registerAtomType
|
def registerAtomType(self, parameters):
"""Register a new atom type. """
name = parameters['name']
if name in self._atomTypes:
raise ValueError('Found multiple definitions for atom type: ' + name)
atom_class = parameters['class']
mass = _convertParameterToNumber(parameters['mass'])
element = None
if 'element' in parameters:
element, custom = self._create_element(parameters['element'], mass)
if custom:
self.non_element_types[element.symbol] = element
self._atomTypes[name] = self.__class__._AtomType(name, atom_class, mass, element)
if atom_class in self._atomClasses:
type_set = self._atomClasses[atom_class]
else:
type_set = set()
self._atomClasses[atom_class] = type_set
type_set.add(name)
self._atomClasses[''].add(name)
name = parameters['name']
if 'def' in parameters:
self.atomTypeDefinitions[name] = parameters['def']
if 'overrides' in parameters:
overrides = set(atype.strip() for atype
in parameters['overrides'].split(","))
if overrides:
self.atomTypeOverrides[name] = overrides
if 'des' in parameters:
self.atomTypeDesc[name] = parameters['desc']
if 'doi' in parameters:
dois = set(doi.strip() for doi in parameters['doi'].split(','))
self.atomTypeRefs[name] = dois
|
python
|
def registerAtomType(self, parameters):
name = parameters['name']
if name in self._atomTypes:
raise ValueError('Found multiple definitions for atom type: ' + name)
atom_class = parameters['class']
mass = _convertParameterToNumber(parameters['mass'])
element = None
if 'element' in parameters:
element, custom = self._create_element(parameters['element'], mass)
if custom:
self.non_element_types[element.symbol] = element
self._atomTypes[name] = self.__class__._AtomType(name, atom_class, mass, element)
if atom_class in self._atomClasses:
type_set = self._atomClasses[atom_class]
else:
type_set = set()
self._atomClasses[atom_class] = type_set
type_set.add(name)
self._atomClasses[''].add(name)
name = parameters['name']
if 'def' in parameters:
self.atomTypeDefinitions[name] = parameters['def']
if 'overrides' in parameters:
overrides = set(atype.strip() for atype
in parameters['overrides'].split(","))
if overrides:
self.atomTypeOverrides[name] = overrides
if 'des' in parameters:
self.atomTypeDesc[name] = parameters['desc']
if 'doi' in parameters:
dois = set(doi.strip() for doi in parameters['doi'].split(','))
self.atomTypeRefs[name] = dois
|
[
"def",
"registerAtomType",
"(",
"self",
",",
"parameters",
")",
":",
"name",
"=",
"parameters",
"[",
"'name'",
"]",
"if",
"name",
"in",
"self",
".",
"_atomTypes",
":",
"raise",
"ValueError",
"(",
"'Found multiple definitions for atom type: '",
"+",
"name",
")",
"atom_class",
"=",
"parameters",
"[",
"'class'",
"]",
"mass",
"=",
"_convertParameterToNumber",
"(",
"parameters",
"[",
"'mass'",
"]",
")",
"element",
"=",
"None",
"if",
"'element'",
"in",
"parameters",
":",
"element",
",",
"custom",
"=",
"self",
".",
"_create_element",
"(",
"parameters",
"[",
"'element'",
"]",
",",
"mass",
")",
"if",
"custom",
":",
"self",
".",
"non_element_types",
"[",
"element",
".",
"symbol",
"]",
"=",
"element",
"self",
".",
"_atomTypes",
"[",
"name",
"]",
"=",
"self",
".",
"__class__",
".",
"_AtomType",
"(",
"name",
",",
"atom_class",
",",
"mass",
",",
"element",
")",
"if",
"atom_class",
"in",
"self",
".",
"_atomClasses",
":",
"type_set",
"=",
"self",
".",
"_atomClasses",
"[",
"atom_class",
"]",
"else",
":",
"type_set",
"=",
"set",
"(",
")",
"self",
".",
"_atomClasses",
"[",
"atom_class",
"]",
"=",
"type_set",
"type_set",
".",
"add",
"(",
"name",
")",
"self",
".",
"_atomClasses",
"[",
"''",
"]",
".",
"add",
"(",
"name",
")",
"name",
"=",
"parameters",
"[",
"'name'",
"]",
"if",
"'def'",
"in",
"parameters",
":",
"self",
".",
"atomTypeDefinitions",
"[",
"name",
"]",
"=",
"parameters",
"[",
"'def'",
"]",
"if",
"'overrides'",
"in",
"parameters",
":",
"overrides",
"=",
"set",
"(",
"atype",
".",
"strip",
"(",
")",
"for",
"atype",
"in",
"parameters",
"[",
"'overrides'",
"]",
".",
"split",
"(",
"\",\"",
")",
")",
"if",
"overrides",
":",
"self",
".",
"atomTypeOverrides",
"[",
"name",
"]",
"=",
"overrides",
"if",
"'des'",
"in",
"parameters",
":",
"self",
".",
"atomTypeDesc",
"[",
"name",
"]",
"=",
"parameters",
"[",
"'desc'",
"]",
"if",
"'doi'",
"in",
"parameters",
":",
"dois",
"=",
"set",
"(",
"doi",
".",
"strip",
"(",
")",
"for",
"doi",
"in",
"parameters",
"[",
"'doi'",
"]",
".",
"split",
"(",
"','",
")",
")",
"self",
".",
"atomTypeRefs",
"[",
"name",
"]",
"=",
"dois"
] |
Register a new atom type.
|
[
"Register",
"a",
"new",
"atom",
"type",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/forcefield.py#L307-L341
|
mosdef-hub/foyer
|
foyer/forcefield.py
|
Forcefield.apply
|
def apply(self, topology, references_file=None, use_residue_map=True,
assert_bond_params=True, assert_angle_params=True,
assert_dihedral_params=True, assert_improper_params=False,
*args, **kwargs):
"""Apply the force field to a molecular structure
Parameters
----------
topology : openmm.app.Topology or parmed.Structure or mbuild.Compound
Molecular structure to apply the force field to
references_file : str, optional, default=None
Name of file where force field references will be written (in Bibtex
format)
use_residue_map : boolean, optional, default=True
Store atomtyped topologies of residues to a dictionary that maps
them to residue names. Each topology, including atomtypes, will be
copied to other residues with the same name. This avoids repeatedly
calling the subgraph isomorphism on idential residues and should
result in better performance for systems with many identical
residues, i.e. a box of water. Note that for this to be applied to
independent molecules, they must each be saved as different
residues in the topology.
assert_bond_params : bool, optional, default=True
If True, Foyer will exit if parameters are not found for all system
bonds.
assert_angle_params : bool, optional, default=True
If True, Foyer will exit if parameters are not found for all system
angles.
assert_dihedral_params : bool, optional, default=True
If True, Foyer will exit if parameters are not found for all system
proper dihedrals.
assert_improper_params : bool, optional, default=False
If True, Foyer will exit if parameters are not found for all system
improper dihedrals.
"""
if self.atomTypeDefinitions == {}:
raise FoyerError('Attempting to atom-type using a force field '
'with no atom type defitions.')
if not isinstance(topology, app.Topology):
residues = kwargs.get('residues')
topology, positions = generate_topology(topology,
self.non_element_types, residues=residues)
else:
positions = np.empty(shape=(topology.getNumAtoms(), 3))
positions[:] = np.nan
box_vectors = topology.getPeriodicBoxVectors()
topology = self.run_atomtyping(topology, use_residue_map=use_residue_map)
system = self.createSystem(topology, *args, **kwargs)
structure = pmd.openmm.load_topology(topology=topology, system=system)
'''
Check that all topology objects (angles, dihedrals, and impropers)
have parameters assigned. OpenMM will generate an error if bond parameters
are not assigned.
'''
data = self._SystemData
if data.bonds:
missing = [b for b in structure.bonds
if b.type is None]
if missing:
nmissing = len(structure.bonds) - len(missing)
msg = ("Parameters have not been assigned to all bonds. "
"Total system bonds: {}, Parametrized bonds: {}"
"".format(len(structure.bonds), nmissing))
_error_or_warn(assert_bond_params, msg)
if data.angles and (len(data.angles) != len(structure.angles)):
msg = ("Parameters have not been assigned to all angles. Total "
"system angles: {}, Parameterized angles: {}"
"".format(len(data.angles), len(structure.angles)))
_error_or_warn(assert_angle_params, msg)
proper_dihedrals = [dihedral for dihedral in structure.dihedrals
if not dihedral.improper]
if data.propers and len(data.propers) != \
len(proper_dihedrals) + len(structure.rb_torsions):
msg = ("Parameters have not been assigned to all proper dihedrals. "
"Total system dihedrals: {}, Parameterized dihedrals: {}. "
"Note that if your system contains torsions of Ryckaert-"
"Bellemans functional form, all of these torsions are "
"processed as propers.".format(len(data.propers),
len(proper_dihedrals) + len(structure.rb_torsions)))
_error_or_warn(assert_dihedral_params, msg)
improper_dihedrals = [dihedral for dihedral in structure.dihedrals
if dihedral.improper]
if data.impropers and len(data.impropers) != \
len(improper_dihedrals) + len(structure.impropers):
msg = ("Parameters have not been assigned to all impropers. Total "
"system impropers: {}, Parameterized impropers: {}. "
"Note that if your system contains torsions of Ryckaert-"
"Bellemans functional form, all of these torsions are "
"processed as propers".format(len(data.impropers),
len(improper_dihedrals) + len(structure.impropers)))
_error_or_warn(assert_improper_params, msg)
structure.bonds.sort(key=lambda x: x.atom1.idx)
structure.positions = positions
if box_vectors is not None:
structure.box_vectors = box_vectors
if references_file:
atom_types = set(atom.type for atom in structure.atoms)
self._write_references_to_file(atom_types, references_file)
return structure
|
python
|
def apply(self, topology, references_file=None, use_residue_map=True,
assert_bond_params=True, assert_angle_params=True,
assert_dihedral_params=True, assert_improper_params=False,
*args, **kwargs):
if self.atomTypeDefinitions == {}:
raise FoyerError('Attempting to atom-type using a force field '
'with no atom type defitions.')
if not isinstance(topology, app.Topology):
residues = kwargs.get('residues')
topology, positions = generate_topology(topology,
self.non_element_types, residues=residues)
else:
positions = np.empty(shape=(topology.getNumAtoms(), 3))
positions[:] = np.nan
box_vectors = topology.getPeriodicBoxVectors()
topology = self.run_atomtyping(topology, use_residue_map=use_residue_map)
system = self.createSystem(topology, *args, **kwargs)
structure = pmd.openmm.load_topology(topology=topology, system=system)
data = self._SystemData
if data.bonds:
missing = [b for b in structure.bonds
if b.type is None]
if missing:
nmissing = len(structure.bonds) - len(missing)
msg = ("Parameters have not been assigned to all bonds. "
"Total system bonds: {}, Parametrized bonds: {}"
"".format(len(structure.bonds), nmissing))
_error_or_warn(assert_bond_params, msg)
if data.angles and (len(data.angles) != len(structure.angles)):
msg = ("Parameters have not been assigned to all angles. Total "
"system angles: {}, Parameterized angles: {}"
"".format(len(data.angles), len(structure.angles)))
_error_or_warn(assert_angle_params, msg)
proper_dihedrals = [dihedral for dihedral in structure.dihedrals
if not dihedral.improper]
if data.propers and len(data.propers) != \
len(proper_dihedrals) + len(structure.rb_torsions):
msg = ("Parameters have not been assigned to all proper dihedrals. "
"Total system dihedrals: {}, Parameterized dihedrals: {}. "
"Note that if your system contains torsions of Ryckaert-"
"Bellemans functional form, all of these torsions are "
"processed as propers.".format(len(data.propers),
len(proper_dihedrals) + len(structure.rb_torsions)))
_error_or_warn(assert_dihedral_params, msg)
improper_dihedrals = [dihedral for dihedral in structure.dihedrals
if dihedral.improper]
if data.impropers and len(data.impropers) != \
len(improper_dihedrals) + len(structure.impropers):
msg = ("Parameters have not been assigned to all impropers. Total "
"system impropers: {}, Parameterized impropers: {}. "
"Note that if your system contains torsions of Ryckaert-"
"Bellemans functional form, all of these torsions are "
"processed as propers".format(len(data.impropers),
len(improper_dihedrals) + len(structure.impropers)))
_error_or_warn(assert_improper_params, msg)
structure.bonds.sort(key=lambda x: x.atom1.idx)
structure.positions = positions
if box_vectors is not None:
structure.box_vectors = box_vectors
if references_file:
atom_types = set(atom.type for atom in structure.atoms)
self._write_references_to_file(atom_types, references_file)
return structure
|
[
"def",
"apply",
"(",
"self",
",",
"topology",
",",
"references_file",
"=",
"None",
",",
"use_residue_map",
"=",
"True",
",",
"assert_bond_params",
"=",
"True",
",",
"assert_angle_params",
"=",
"True",
",",
"assert_dihedral_params",
"=",
"True",
",",
"assert_improper_params",
"=",
"False",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"atomTypeDefinitions",
"==",
"{",
"}",
":",
"raise",
"FoyerError",
"(",
"'Attempting to atom-type using a force field '",
"'with no atom type defitions.'",
")",
"if",
"not",
"isinstance",
"(",
"topology",
",",
"app",
".",
"Topology",
")",
":",
"residues",
"=",
"kwargs",
".",
"get",
"(",
"'residues'",
")",
"topology",
",",
"positions",
"=",
"generate_topology",
"(",
"topology",
",",
"self",
".",
"non_element_types",
",",
"residues",
"=",
"residues",
")",
"else",
":",
"positions",
"=",
"np",
".",
"empty",
"(",
"shape",
"=",
"(",
"topology",
".",
"getNumAtoms",
"(",
")",
",",
"3",
")",
")",
"positions",
"[",
":",
"]",
"=",
"np",
".",
"nan",
"box_vectors",
"=",
"topology",
".",
"getPeriodicBoxVectors",
"(",
")",
"topology",
"=",
"self",
".",
"run_atomtyping",
"(",
"topology",
",",
"use_residue_map",
"=",
"use_residue_map",
")",
"system",
"=",
"self",
".",
"createSystem",
"(",
"topology",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"structure",
"=",
"pmd",
".",
"openmm",
".",
"load_topology",
"(",
"topology",
"=",
"topology",
",",
"system",
"=",
"system",
")",
"'''\n Check that all topology objects (angles, dihedrals, and impropers)\n have parameters assigned. OpenMM will generate an error if bond parameters\n are not assigned.\n '''",
"data",
"=",
"self",
".",
"_SystemData",
"if",
"data",
".",
"bonds",
":",
"missing",
"=",
"[",
"b",
"for",
"b",
"in",
"structure",
".",
"bonds",
"if",
"b",
".",
"type",
"is",
"None",
"]",
"if",
"missing",
":",
"nmissing",
"=",
"len",
"(",
"structure",
".",
"bonds",
")",
"-",
"len",
"(",
"missing",
")",
"msg",
"=",
"(",
"\"Parameters have not been assigned to all bonds. \"",
"\"Total system bonds: {}, Parametrized bonds: {}\"",
"\"\"",
".",
"format",
"(",
"len",
"(",
"structure",
".",
"bonds",
")",
",",
"nmissing",
")",
")",
"_error_or_warn",
"(",
"assert_bond_params",
",",
"msg",
")",
"if",
"data",
".",
"angles",
"and",
"(",
"len",
"(",
"data",
".",
"angles",
")",
"!=",
"len",
"(",
"structure",
".",
"angles",
")",
")",
":",
"msg",
"=",
"(",
"\"Parameters have not been assigned to all angles. Total \"",
"\"system angles: {}, Parameterized angles: {}\"",
"\"\"",
".",
"format",
"(",
"len",
"(",
"data",
".",
"angles",
")",
",",
"len",
"(",
"structure",
".",
"angles",
")",
")",
")",
"_error_or_warn",
"(",
"assert_angle_params",
",",
"msg",
")",
"proper_dihedrals",
"=",
"[",
"dihedral",
"for",
"dihedral",
"in",
"structure",
".",
"dihedrals",
"if",
"not",
"dihedral",
".",
"improper",
"]",
"if",
"data",
".",
"propers",
"and",
"len",
"(",
"data",
".",
"propers",
")",
"!=",
"len",
"(",
"proper_dihedrals",
")",
"+",
"len",
"(",
"structure",
".",
"rb_torsions",
")",
":",
"msg",
"=",
"(",
"\"Parameters have not been assigned to all proper dihedrals. \"",
"\"Total system dihedrals: {}, Parameterized dihedrals: {}. \"",
"\"Note that if your system contains torsions of Ryckaert-\"",
"\"Bellemans functional form, all of these torsions are \"",
"\"processed as propers.\"",
".",
"format",
"(",
"len",
"(",
"data",
".",
"propers",
")",
",",
"len",
"(",
"proper_dihedrals",
")",
"+",
"len",
"(",
"structure",
".",
"rb_torsions",
")",
")",
")",
"_error_or_warn",
"(",
"assert_dihedral_params",
",",
"msg",
")",
"improper_dihedrals",
"=",
"[",
"dihedral",
"for",
"dihedral",
"in",
"structure",
".",
"dihedrals",
"if",
"dihedral",
".",
"improper",
"]",
"if",
"data",
".",
"impropers",
"and",
"len",
"(",
"data",
".",
"impropers",
")",
"!=",
"len",
"(",
"improper_dihedrals",
")",
"+",
"len",
"(",
"structure",
".",
"impropers",
")",
":",
"msg",
"=",
"(",
"\"Parameters have not been assigned to all impropers. Total \"",
"\"system impropers: {}, Parameterized impropers: {}. \"",
"\"Note that if your system contains torsions of Ryckaert-\"",
"\"Bellemans functional form, all of these torsions are \"",
"\"processed as propers\"",
".",
"format",
"(",
"len",
"(",
"data",
".",
"impropers",
")",
",",
"len",
"(",
"improper_dihedrals",
")",
"+",
"len",
"(",
"structure",
".",
"impropers",
")",
")",
")",
"_error_or_warn",
"(",
"assert_improper_params",
",",
"msg",
")",
"structure",
".",
"bonds",
".",
"sort",
"(",
"key",
"=",
"lambda",
"x",
":",
"x",
".",
"atom1",
".",
"idx",
")",
"structure",
".",
"positions",
"=",
"positions",
"if",
"box_vectors",
"is",
"not",
"None",
":",
"structure",
".",
"box_vectors",
"=",
"box_vectors",
"if",
"references_file",
":",
"atom_types",
"=",
"set",
"(",
"atom",
".",
"type",
"for",
"atom",
"in",
"structure",
".",
"atoms",
")",
"self",
".",
"_write_references_to_file",
"(",
"atom_types",
",",
"references_file",
")",
"return",
"structure"
] |
Apply the force field to a molecular structure
Parameters
----------
topology : openmm.app.Topology or parmed.Structure or mbuild.Compound
Molecular structure to apply the force field to
references_file : str, optional, default=None
Name of file where force field references will be written (in Bibtex
format)
use_residue_map : boolean, optional, default=True
Store atomtyped topologies of residues to a dictionary that maps
them to residue names. Each topology, including atomtypes, will be
copied to other residues with the same name. This avoids repeatedly
calling the subgraph isomorphism on idential residues and should
result in better performance for systems with many identical
residues, i.e. a box of water. Note that for this to be applied to
independent molecules, they must each be saved as different
residues in the topology.
assert_bond_params : bool, optional, default=True
If True, Foyer will exit if parameters are not found for all system
bonds.
assert_angle_params : bool, optional, default=True
If True, Foyer will exit if parameters are not found for all system
angles.
assert_dihedral_params : bool, optional, default=True
If True, Foyer will exit if parameters are not found for all system
proper dihedrals.
assert_improper_params : bool, optional, default=False
If True, Foyer will exit if parameters are not found for all system
improper dihedrals.
|
[
"Apply",
"the",
"force",
"field",
"to",
"a",
"molecular",
"structure"
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/forcefield.py#L343-L450
|
mosdef-hub/foyer
|
foyer/forcefield.py
|
Forcefield.run_atomtyping
|
def run_atomtyping(self, topology, use_residue_map=True):
"""Atomtype the topology
Parameters
----------
topology : openmm.app.Topology
Molecular structure to find atom types of
use_residue_map : boolean, optional, default=True
Store atomtyped topologies of residues to a dictionary that maps
them to residue names. Each topology, including atomtypes, will be
copied to other residues with the same name. This avoids repeatedly
calling the subgraph isomorphism on idential residues and should
result in better performance for systems with many identical
residues, i.e. a box of water. Note that for this to be applied to
independent molecules, they must each be saved as different
residues in the topology.
"""
if use_residue_map:
independent_residues = _check_independent_residues(topology)
if independent_residues:
residue_map = dict()
for res in topology.residues():
if res.name not in residue_map.keys():
residue = _topology_from_residue(res)
find_atomtypes(residue, forcefield=self)
residue_map[res.name] = residue
for key, val in residue_map.items():
_update_atomtypes(topology, key, val)
else:
find_atomtypes(topology, forcefield=self)
else:
find_atomtypes(topology, forcefield=self)
if not all([a.id for a in topology.atoms()][0]):
raise ValueError('Not all atoms in topology have atom types')
return topology
|
python
|
def run_atomtyping(self, topology, use_residue_map=True):
if use_residue_map:
independent_residues = _check_independent_residues(topology)
if independent_residues:
residue_map = dict()
for res in topology.residues():
if res.name not in residue_map.keys():
residue = _topology_from_residue(res)
find_atomtypes(residue, forcefield=self)
residue_map[res.name] = residue
for key, val in residue_map.items():
_update_atomtypes(topology, key, val)
else:
find_atomtypes(topology, forcefield=self)
else:
find_atomtypes(topology, forcefield=self)
if not all([a.id for a in topology.atoms()][0]):
raise ValueError('Not all atoms in topology have atom types')
return topology
|
[
"def",
"run_atomtyping",
"(",
"self",
",",
"topology",
",",
"use_residue_map",
"=",
"True",
")",
":",
"if",
"use_residue_map",
":",
"independent_residues",
"=",
"_check_independent_residues",
"(",
"topology",
")",
"if",
"independent_residues",
":",
"residue_map",
"=",
"dict",
"(",
")",
"for",
"res",
"in",
"topology",
".",
"residues",
"(",
")",
":",
"if",
"res",
".",
"name",
"not",
"in",
"residue_map",
".",
"keys",
"(",
")",
":",
"residue",
"=",
"_topology_from_residue",
"(",
"res",
")",
"find_atomtypes",
"(",
"residue",
",",
"forcefield",
"=",
"self",
")",
"residue_map",
"[",
"res",
".",
"name",
"]",
"=",
"residue",
"for",
"key",
",",
"val",
"in",
"residue_map",
".",
"items",
"(",
")",
":",
"_update_atomtypes",
"(",
"topology",
",",
"key",
",",
"val",
")",
"else",
":",
"find_atomtypes",
"(",
"topology",
",",
"forcefield",
"=",
"self",
")",
"else",
":",
"find_atomtypes",
"(",
"topology",
",",
"forcefield",
"=",
"self",
")",
"if",
"not",
"all",
"(",
"[",
"a",
".",
"id",
"for",
"a",
"in",
"topology",
".",
"atoms",
"(",
")",
"]",
"[",
"0",
"]",
")",
":",
"raise",
"ValueError",
"(",
"'Not all atoms in topology have atom types'",
")",
"return",
"topology"
] |
Atomtype the topology
Parameters
----------
topology : openmm.app.Topology
Molecular structure to find atom types of
use_residue_map : boolean, optional, default=True
Store atomtyped topologies of residues to a dictionary that maps
them to residue names. Each topology, including atomtypes, will be
copied to other residues with the same name. This avoids repeatedly
calling the subgraph isomorphism on idential residues and should
result in better performance for systems with many identical
residues, i.e. a box of water. Note that for this to be applied to
independent molecules, they must each be saved as different
residues in the topology.
|
[
"Atomtype",
"the",
"topology"
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/forcefield.py#L452-L493
|
mosdef-hub/foyer
|
foyer/forcefield.py
|
Forcefield.createSystem
|
def createSystem(self, topology, nonbondedMethod=NoCutoff,
nonbondedCutoff=1.0 * u.nanometer, constraints=None,
rigidWater=True, removeCMMotion=True, hydrogenMass=None,
**args):
"""Construct an OpenMM System representing a Topology with this force field.
Parameters
----------
topology : Topology
The Topology for which to create a System
nonbondedMethod : object=NoCutoff
The method to use for nonbonded interactions. Allowed values are
NoCutoff, CutoffNonPeriodic, CutoffPeriodic, Ewald, or PME.
nonbondedCutoff : distance=1*nanometer
The cutoff distance to use for nonbonded interactions
constraints : object=None
Specifies which bonds and angles should be implemented with constraints.
Allowed values are None, HBonds, AllBonds, or HAngles.
rigidWater : boolean=True
If true, water molecules will be fully rigid regardless of the value
passed for the constraints argument
removeCMMotion : boolean=True
If true, a CMMotionRemover will be added to the System
hydrogenMass : mass=None
The mass to use for hydrogen atoms bound to heavy atoms. Any mass
added to a hydrogen is subtracted from the heavy atom to keep
their total mass the same.
args
Arbitrary additional keyword arguments may also be specified.
This allows extra parameters to be specified that are specific to
particular force fields.
Returns
-------
system
the newly created System
"""
# Overwrite previous _SystemData object
self._SystemData = app.ForceField._SystemData()
data = self._SystemData
data.atoms = list(topology.atoms())
for atom in data.atoms:
data.excludeAtomWith.append([])
# Make a list of all bonds
for bond in topology.bonds():
data.bonds.append(app.ForceField._BondData(bond[0].index, bond[1].index))
# Record which atoms are bonded to each other atom
bonded_to_atom = []
for i in range(len(data.atoms)):
bonded_to_atom.append(set())
data.atomBonds.append([])
for i in range(len(data.bonds)):
bond = data.bonds[i]
bonded_to_atom[bond.atom1].add(bond.atom2)
bonded_to_atom[bond.atom2].add(bond.atom1)
data.atomBonds[bond.atom1].append(i)
data.atomBonds[bond.atom2].append(i)
# TODO: Better way to lookup nonbonded parameters...?
nonbonded_params = None
for generator in self.getGenerators():
if isinstance(generator, NonbondedGenerator):
nonbonded_params = generator.params.paramsForType
break
for chain in topology.chains():
for res in chain.residues():
for atom in res.atoms():
data.atomType[atom] = atom.id
if nonbonded_params:
params = nonbonded_params[atom.id]
data.atomParameters[atom] = params
# Create the System and add atoms
sys = mm.System()
for atom in topology.atoms():
# Look up the atom type name, returning a helpful error message if it cannot be found.
if atom not in data.atomType:
raise Exception("Could not identify atom type for atom '%s'." % str(atom))
typename = data.atomType[atom]
# Look up the type name in the list of registered atom types, returning a helpful error message if it cannot be found.
if typename not in self._atomTypes:
msg = "Could not find typename '%s' for atom '%s' in list of known atom types.\n" % (typename, str(atom))
msg += "Known atom types are: %s" % str(self._atomTypes.keys())
raise Exception(msg)
# Add the particle to the OpenMM system.
mass = self._atomTypes[typename].mass
sys.addParticle(mass)
# Adjust hydrogen masses if requested.
if hydrogenMass is not None:
if not u.is_quantity(hydrogenMass):
hydrogenMass *= u.dalton
for atom1, atom2 in topology.bonds():
if atom1.element == elem.hydrogen:
(atom1, atom2) = (atom2, atom1)
if atom2.element == elem.hydrogen and atom1.element not in (elem.hydrogen, None):
transfer_mass = hydrogenMass - sys.getParticleMass(atom2.index)
sys.setParticleMass(atom2.index, hydrogenMass)
mass = sys.getParticleMass(atom1.index) - transfer_mass
sys.setParticleMass(atom1.index, mass)
# Set periodic boundary conditions.
box_vectors = topology.getPeriodicBoxVectors()
if box_vectors is not None:
sys.setDefaultPeriodicBoxVectors(box_vectors[0],
box_vectors[1],
box_vectors[2])
elif nonbondedMethod not in [NoCutoff, CutoffNonPeriodic]:
raise ValueError('Requested periodic boundary conditions for a '
'Topology that does not specify periodic box '
'dimensions')
# Make a list of all unique angles
unique_angles = set()
for bond in data.bonds:
for atom in bonded_to_atom[bond.atom1]:
if atom != bond.atom2:
if atom < bond.atom2:
unique_angles.add((atom, bond.atom1, bond.atom2))
else:
unique_angles.add((bond.atom2, bond.atom1, atom))
for atom in bonded_to_atom[bond.atom2]:
if atom != bond.atom1:
if atom > bond.atom1:
unique_angles.add((bond.atom1, bond.atom2, atom))
else:
unique_angles.add((atom, bond.atom2, bond.atom1))
data.angles = sorted(list(unique_angles))
# Make a list of all unique proper torsions
unique_propers = set()
for angle in data.angles:
for atom in bonded_to_atom[angle[0]]:
if atom not in angle:
if atom < angle[2]:
unique_propers.add((atom, angle[0], angle[1], angle[2]))
else:
unique_propers.add((angle[2], angle[1], angle[0], atom))
for atom in bonded_to_atom[angle[2]]:
if atom not in angle:
if atom > angle[0]:
unique_propers.add((angle[0], angle[1], angle[2], atom))
else:
unique_propers.add((atom, angle[2], angle[1], angle[0]))
data.propers = sorted(list(unique_propers))
# Make a list of all unique improper torsions
for atom in range(len(bonded_to_atom)):
bonded_to = bonded_to_atom[atom]
if len(bonded_to) > 2:
for subset in itertools.combinations(bonded_to, 3):
data.impropers.append((atom, subset[0], subset[1], subset[2]))
# Identify bonds that should be implemented with constraints
if constraints == AllBonds or constraints == HAngles:
for bond in data.bonds:
bond.isConstrained = True
elif constraints == HBonds:
for bond in data.bonds:
atom1 = data.atoms[bond.atom1]
atom2 = data.atoms[bond.atom2]
bond.isConstrained = atom1.name.startswith('H') or atom2.name.startswith('H')
if rigidWater:
for bond in data.bonds:
atom1 = data.atoms[bond.atom1]
atom2 = data.atoms[bond.atom2]
if atom1.residue.name == 'HOH' and atom2.residue.name == 'HOH':
bond.isConstrained = True
# Identify angles that should be implemented with constraints
if constraints == HAngles:
for angle in data.angles:
atom1 = data.atoms[angle[0]]
atom2 = data.atoms[angle[1]]
atom3 = data.atoms[angle[2]]
numH = 0
if atom1.name.startswith('H'):
numH += 1
if atom3.name.startswith('H'):
numH += 1
data.isAngleConstrained.append(numH == 2 or (numH == 1 and atom2.name.startswith('O')))
else:
data.isAngleConstrained = len(data.angles)*[False]
if rigidWater:
for i in range(len(data.angles)):
angle = data.angles[i]
atom1 = data.atoms[angle[0]]
atom2 = data.atoms[angle[1]]
atom3 = data.atoms[angle[2]]
if atom1.residue.name == 'HOH' and atom2.residue.name == 'HOH' and atom3.residue.name == 'HOH':
data.isAngleConstrained[i] = True
# Add virtual sites
for atom in data.virtualSites:
(site, atoms, excludeWith) = data.virtualSites[atom]
index = atom.index
data.excludeAtomWith[excludeWith].append(index)
if site.type == 'average2':
sys.setVirtualSite(index, mm.TwoParticleAverageSite(
atoms[0], atoms[1], site.weights[0], site.weights[1]))
elif site.type == 'average3':
sys.setVirtualSite(index, mm.ThreeParticleAverageSite(
atoms[0], atoms[1], atoms[2],
site.weights[0], site.weights[1], site.weights[2]))
elif site.type == 'outOfPlane':
sys.setVirtualSite(index, mm.OutOfPlaneSite(
atoms[0], atoms[1], atoms[2],
site.weights[0], site.weights[1], site.weights[2]))
elif site.type == 'localCoords':
local_coord_site = mm.LocalCoordinatesSite(
atoms[0], atoms[1], atoms[2],
mm.Vec3(site.originWeights[0], site.originWeights[1], site.originWeights[2]),
mm.Vec3(site.xWeights[0], site.xWeights[1], site.xWeights[2]),
mm.Vec3(site.yWeights[0], site.yWeights[1], site.yWeights[2]),
mm.Vec3(site.localPos[0], site.localPos[1], site.localPos[2]))
sys.setVirtualSite(index, local_coord_site)
# Add forces to the System
for force in self._forces:
force.createForce(sys, data, nonbondedMethod, nonbondedCutoff, args)
if removeCMMotion:
sys.addForce(mm.CMMotionRemover())
# Let force generators do postprocessing
for force in self._forces:
if 'postprocessSystem' in dir(force):
force.postprocessSystem(sys, data, args)
# Execute scripts found in the XML files.
for script in self._scripts:
exec(script, locals())
return sys
|
python
|
def createSystem(self, topology, nonbondedMethod=NoCutoff,
nonbondedCutoff=1.0 * u.nanometer, constraints=None,
rigidWater=True, removeCMMotion=True, hydrogenMass=None,
**args):
self._SystemData = app.ForceField._SystemData()
data = self._SystemData
data.atoms = list(topology.atoms())
for atom in data.atoms:
data.excludeAtomWith.append([])
for bond in topology.bonds():
data.bonds.append(app.ForceField._BondData(bond[0].index, bond[1].index))
bonded_to_atom = []
for i in range(len(data.atoms)):
bonded_to_atom.append(set())
data.atomBonds.append([])
for i in range(len(data.bonds)):
bond = data.bonds[i]
bonded_to_atom[bond.atom1].add(bond.atom2)
bonded_to_atom[bond.atom2].add(bond.atom1)
data.atomBonds[bond.atom1].append(i)
data.atomBonds[bond.atom2].append(i)
nonbonded_params = None
for generator in self.getGenerators():
if isinstance(generator, NonbondedGenerator):
nonbonded_params = generator.params.paramsForType
break
for chain in topology.chains():
for res in chain.residues():
for atom in res.atoms():
data.atomType[atom] = atom.id
if nonbonded_params:
params = nonbonded_params[atom.id]
data.atomParameters[atom] = params
sys = mm.System()
for atom in topology.atoms():
if atom not in data.atomType:
raise Exception("Could not identify atom type for atom '%s'." % str(atom))
typename = data.atomType[atom]
if typename not in self._atomTypes:
msg = "Could not find typename '%s' for atom '%s' in list of known atom types.\n" % (typename, str(atom))
msg += "Known atom types are: %s" % str(self._atomTypes.keys())
raise Exception(msg)
mass = self._atomTypes[typename].mass
sys.addParticle(mass)
if hydrogenMass is not None:
if not u.is_quantity(hydrogenMass):
hydrogenMass *= u.dalton
for atom1, atom2 in topology.bonds():
if atom1.element == elem.hydrogen:
(atom1, atom2) = (atom2, atom1)
if atom2.element == elem.hydrogen and atom1.element not in (elem.hydrogen, None):
transfer_mass = hydrogenMass - sys.getParticleMass(atom2.index)
sys.setParticleMass(atom2.index, hydrogenMass)
mass = sys.getParticleMass(atom1.index) - transfer_mass
sys.setParticleMass(atom1.index, mass)
box_vectors = topology.getPeriodicBoxVectors()
if box_vectors is not None:
sys.setDefaultPeriodicBoxVectors(box_vectors[0],
box_vectors[1],
box_vectors[2])
elif nonbondedMethod not in [NoCutoff, CutoffNonPeriodic]:
raise ValueError('Requested periodic boundary conditions for a '
'Topology that does not specify periodic box '
'dimensions')
unique_angles = set()
for bond in data.bonds:
for atom in bonded_to_atom[bond.atom1]:
if atom != bond.atom2:
if atom < bond.atom2:
unique_angles.add((atom, bond.atom1, bond.atom2))
else:
unique_angles.add((bond.atom2, bond.atom1, atom))
for atom in bonded_to_atom[bond.atom2]:
if atom != bond.atom1:
if atom > bond.atom1:
unique_angles.add((bond.atom1, bond.atom2, atom))
else:
unique_angles.add((atom, bond.atom2, bond.atom1))
data.angles = sorted(list(unique_angles))
unique_propers = set()
for angle in data.angles:
for atom in bonded_to_atom[angle[0]]:
if atom not in angle:
if atom < angle[2]:
unique_propers.add((atom, angle[0], angle[1], angle[2]))
else:
unique_propers.add((angle[2], angle[1], angle[0], atom))
for atom in bonded_to_atom[angle[2]]:
if atom not in angle:
if atom > angle[0]:
unique_propers.add((angle[0], angle[1], angle[2], atom))
else:
unique_propers.add((atom, angle[2], angle[1], angle[0]))
data.propers = sorted(list(unique_propers))
for atom in range(len(bonded_to_atom)):
bonded_to = bonded_to_atom[atom]
if len(bonded_to) > 2:
for subset in itertools.combinations(bonded_to, 3):
data.impropers.append((atom, subset[0], subset[1], subset[2]))
if constraints == AllBonds or constraints == HAngles:
for bond in data.bonds:
bond.isConstrained = True
elif constraints == HBonds:
for bond in data.bonds:
atom1 = data.atoms[bond.atom1]
atom2 = data.atoms[bond.atom2]
bond.isConstrained = atom1.name.startswith('H') or atom2.name.startswith('H')
if rigidWater:
for bond in data.bonds:
atom1 = data.atoms[bond.atom1]
atom2 = data.atoms[bond.atom2]
if atom1.residue.name == 'HOH' and atom2.residue.name == 'HOH':
bond.isConstrained = True
if constraints == HAngles:
for angle in data.angles:
atom1 = data.atoms[angle[0]]
atom2 = data.atoms[angle[1]]
atom3 = data.atoms[angle[2]]
numH = 0
if atom1.name.startswith('H'):
numH += 1
if atom3.name.startswith('H'):
numH += 1
data.isAngleConstrained.append(numH == 2 or (numH == 1 and atom2.name.startswith('O')))
else:
data.isAngleConstrained = len(data.angles)*[False]
if rigidWater:
for i in range(len(data.angles)):
angle = data.angles[i]
atom1 = data.atoms[angle[0]]
atom2 = data.atoms[angle[1]]
atom3 = data.atoms[angle[2]]
if atom1.residue.name == 'HOH' and atom2.residue.name == 'HOH' and atom3.residue.name == 'HOH':
data.isAngleConstrained[i] = True
for atom in data.virtualSites:
(site, atoms, excludeWith) = data.virtualSites[atom]
index = atom.index
data.excludeAtomWith[excludeWith].append(index)
if site.type == 'average2':
sys.setVirtualSite(index, mm.TwoParticleAverageSite(
atoms[0], atoms[1], site.weights[0], site.weights[1]))
elif site.type == 'average3':
sys.setVirtualSite(index, mm.ThreeParticleAverageSite(
atoms[0], atoms[1], atoms[2],
site.weights[0], site.weights[1], site.weights[2]))
elif site.type == 'outOfPlane':
sys.setVirtualSite(index, mm.OutOfPlaneSite(
atoms[0], atoms[1], atoms[2],
site.weights[0], site.weights[1], site.weights[2]))
elif site.type == 'localCoords':
local_coord_site = mm.LocalCoordinatesSite(
atoms[0], atoms[1], atoms[2],
mm.Vec3(site.originWeights[0], site.originWeights[1], site.originWeights[2]),
mm.Vec3(site.xWeights[0], site.xWeights[1], site.xWeights[2]),
mm.Vec3(site.yWeights[0], site.yWeights[1], site.yWeights[2]),
mm.Vec3(site.localPos[0], site.localPos[1], site.localPos[2]))
sys.setVirtualSite(index, local_coord_site)
for force in self._forces:
force.createForce(sys, data, nonbondedMethod, nonbondedCutoff, args)
if removeCMMotion:
sys.addForce(mm.CMMotionRemover())
for force in self._forces:
if 'postprocessSystem' in dir(force):
force.postprocessSystem(sys, data, args)
for script in self._scripts:
exec(script, locals())
return sys
|
[
"def",
"createSystem",
"(",
"self",
",",
"topology",
",",
"nonbondedMethod",
"=",
"NoCutoff",
",",
"nonbondedCutoff",
"=",
"1.0",
"*",
"u",
".",
"nanometer",
",",
"constraints",
"=",
"None",
",",
"rigidWater",
"=",
"True",
",",
"removeCMMotion",
"=",
"True",
",",
"hydrogenMass",
"=",
"None",
",",
"*",
"*",
"args",
")",
":",
"# Overwrite previous _SystemData object",
"self",
".",
"_SystemData",
"=",
"app",
".",
"ForceField",
".",
"_SystemData",
"(",
")",
"data",
"=",
"self",
".",
"_SystemData",
"data",
".",
"atoms",
"=",
"list",
"(",
"topology",
".",
"atoms",
"(",
")",
")",
"for",
"atom",
"in",
"data",
".",
"atoms",
":",
"data",
".",
"excludeAtomWith",
".",
"append",
"(",
"[",
"]",
")",
"# Make a list of all bonds",
"for",
"bond",
"in",
"topology",
".",
"bonds",
"(",
")",
":",
"data",
".",
"bonds",
".",
"append",
"(",
"app",
".",
"ForceField",
".",
"_BondData",
"(",
"bond",
"[",
"0",
"]",
".",
"index",
",",
"bond",
"[",
"1",
"]",
".",
"index",
")",
")",
"# Record which atoms are bonded to each other atom",
"bonded_to_atom",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"data",
".",
"atoms",
")",
")",
":",
"bonded_to_atom",
".",
"append",
"(",
"set",
"(",
")",
")",
"data",
".",
"atomBonds",
".",
"append",
"(",
"[",
"]",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"data",
".",
"bonds",
")",
")",
":",
"bond",
"=",
"data",
".",
"bonds",
"[",
"i",
"]",
"bonded_to_atom",
"[",
"bond",
".",
"atom1",
"]",
".",
"add",
"(",
"bond",
".",
"atom2",
")",
"bonded_to_atom",
"[",
"bond",
".",
"atom2",
"]",
".",
"add",
"(",
"bond",
".",
"atom1",
")",
"data",
".",
"atomBonds",
"[",
"bond",
".",
"atom1",
"]",
".",
"append",
"(",
"i",
")",
"data",
".",
"atomBonds",
"[",
"bond",
".",
"atom2",
"]",
".",
"append",
"(",
"i",
")",
"# TODO: Better way to lookup nonbonded parameters...?",
"nonbonded_params",
"=",
"None",
"for",
"generator",
"in",
"self",
".",
"getGenerators",
"(",
")",
":",
"if",
"isinstance",
"(",
"generator",
",",
"NonbondedGenerator",
")",
":",
"nonbonded_params",
"=",
"generator",
".",
"params",
".",
"paramsForType",
"break",
"for",
"chain",
"in",
"topology",
".",
"chains",
"(",
")",
":",
"for",
"res",
"in",
"chain",
".",
"residues",
"(",
")",
":",
"for",
"atom",
"in",
"res",
".",
"atoms",
"(",
")",
":",
"data",
".",
"atomType",
"[",
"atom",
"]",
"=",
"atom",
".",
"id",
"if",
"nonbonded_params",
":",
"params",
"=",
"nonbonded_params",
"[",
"atom",
".",
"id",
"]",
"data",
".",
"atomParameters",
"[",
"atom",
"]",
"=",
"params",
"# Create the System and add atoms",
"sys",
"=",
"mm",
".",
"System",
"(",
")",
"for",
"atom",
"in",
"topology",
".",
"atoms",
"(",
")",
":",
"# Look up the atom type name, returning a helpful error message if it cannot be found.",
"if",
"atom",
"not",
"in",
"data",
".",
"atomType",
":",
"raise",
"Exception",
"(",
"\"Could not identify atom type for atom '%s'.\"",
"%",
"str",
"(",
"atom",
")",
")",
"typename",
"=",
"data",
".",
"atomType",
"[",
"atom",
"]",
"# Look up the type name in the list of registered atom types, returning a helpful error message if it cannot be found.",
"if",
"typename",
"not",
"in",
"self",
".",
"_atomTypes",
":",
"msg",
"=",
"\"Could not find typename '%s' for atom '%s' in list of known atom types.\\n\"",
"%",
"(",
"typename",
",",
"str",
"(",
"atom",
")",
")",
"msg",
"+=",
"\"Known atom types are: %s\"",
"%",
"str",
"(",
"self",
".",
"_atomTypes",
".",
"keys",
"(",
")",
")",
"raise",
"Exception",
"(",
"msg",
")",
"# Add the particle to the OpenMM system.",
"mass",
"=",
"self",
".",
"_atomTypes",
"[",
"typename",
"]",
".",
"mass",
"sys",
".",
"addParticle",
"(",
"mass",
")",
"# Adjust hydrogen masses if requested.",
"if",
"hydrogenMass",
"is",
"not",
"None",
":",
"if",
"not",
"u",
".",
"is_quantity",
"(",
"hydrogenMass",
")",
":",
"hydrogenMass",
"*=",
"u",
".",
"dalton",
"for",
"atom1",
",",
"atom2",
"in",
"topology",
".",
"bonds",
"(",
")",
":",
"if",
"atom1",
".",
"element",
"==",
"elem",
".",
"hydrogen",
":",
"(",
"atom1",
",",
"atom2",
")",
"=",
"(",
"atom2",
",",
"atom1",
")",
"if",
"atom2",
".",
"element",
"==",
"elem",
".",
"hydrogen",
"and",
"atom1",
".",
"element",
"not",
"in",
"(",
"elem",
".",
"hydrogen",
",",
"None",
")",
":",
"transfer_mass",
"=",
"hydrogenMass",
"-",
"sys",
".",
"getParticleMass",
"(",
"atom2",
".",
"index",
")",
"sys",
".",
"setParticleMass",
"(",
"atom2",
".",
"index",
",",
"hydrogenMass",
")",
"mass",
"=",
"sys",
".",
"getParticleMass",
"(",
"atom1",
".",
"index",
")",
"-",
"transfer_mass",
"sys",
".",
"setParticleMass",
"(",
"atom1",
".",
"index",
",",
"mass",
")",
"# Set periodic boundary conditions.",
"box_vectors",
"=",
"topology",
".",
"getPeriodicBoxVectors",
"(",
")",
"if",
"box_vectors",
"is",
"not",
"None",
":",
"sys",
".",
"setDefaultPeriodicBoxVectors",
"(",
"box_vectors",
"[",
"0",
"]",
",",
"box_vectors",
"[",
"1",
"]",
",",
"box_vectors",
"[",
"2",
"]",
")",
"elif",
"nonbondedMethod",
"not",
"in",
"[",
"NoCutoff",
",",
"CutoffNonPeriodic",
"]",
":",
"raise",
"ValueError",
"(",
"'Requested periodic boundary conditions for a '",
"'Topology that does not specify periodic box '",
"'dimensions'",
")",
"# Make a list of all unique angles",
"unique_angles",
"=",
"set",
"(",
")",
"for",
"bond",
"in",
"data",
".",
"bonds",
":",
"for",
"atom",
"in",
"bonded_to_atom",
"[",
"bond",
".",
"atom1",
"]",
":",
"if",
"atom",
"!=",
"bond",
".",
"atom2",
":",
"if",
"atom",
"<",
"bond",
".",
"atom2",
":",
"unique_angles",
".",
"add",
"(",
"(",
"atom",
",",
"bond",
".",
"atom1",
",",
"bond",
".",
"atom2",
")",
")",
"else",
":",
"unique_angles",
".",
"add",
"(",
"(",
"bond",
".",
"atom2",
",",
"bond",
".",
"atom1",
",",
"atom",
")",
")",
"for",
"atom",
"in",
"bonded_to_atom",
"[",
"bond",
".",
"atom2",
"]",
":",
"if",
"atom",
"!=",
"bond",
".",
"atom1",
":",
"if",
"atom",
">",
"bond",
".",
"atom1",
":",
"unique_angles",
".",
"add",
"(",
"(",
"bond",
".",
"atom1",
",",
"bond",
".",
"atom2",
",",
"atom",
")",
")",
"else",
":",
"unique_angles",
".",
"add",
"(",
"(",
"atom",
",",
"bond",
".",
"atom2",
",",
"bond",
".",
"atom1",
")",
")",
"data",
".",
"angles",
"=",
"sorted",
"(",
"list",
"(",
"unique_angles",
")",
")",
"# Make a list of all unique proper torsions",
"unique_propers",
"=",
"set",
"(",
")",
"for",
"angle",
"in",
"data",
".",
"angles",
":",
"for",
"atom",
"in",
"bonded_to_atom",
"[",
"angle",
"[",
"0",
"]",
"]",
":",
"if",
"atom",
"not",
"in",
"angle",
":",
"if",
"atom",
"<",
"angle",
"[",
"2",
"]",
":",
"unique_propers",
".",
"add",
"(",
"(",
"atom",
",",
"angle",
"[",
"0",
"]",
",",
"angle",
"[",
"1",
"]",
",",
"angle",
"[",
"2",
"]",
")",
")",
"else",
":",
"unique_propers",
".",
"add",
"(",
"(",
"angle",
"[",
"2",
"]",
",",
"angle",
"[",
"1",
"]",
",",
"angle",
"[",
"0",
"]",
",",
"atom",
")",
")",
"for",
"atom",
"in",
"bonded_to_atom",
"[",
"angle",
"[",
"2",
"]",
"]",
":",
"if",
"atom",
"not",
"in",
"angle",
":",
"if",
"atom",
">",
"angle",
"[",
"0",
"]",
":",
"unique_propers",
".",
"add",
"(",
"(",
"angle",
"[",
"0",
"]",
",",
"angle",
"[",
"1",
"]",
",",
"angle",
"[",
"2",
"]",
",",
"atom",
")",
")",
"else",
":",
"unique_propers",
".",
"add",
"(",
"(",
"atom",
",",
"angle",
"[",
"2",
"]",
",",
"angle",
"[",
"1",
"]",
",",
"angle",
"[",
"0",
"]",
")",
")",
"data",
".",
"propers",
"=",
"sorted",
"(",
"list",
"(",
"unique_propers",
")",
")",
"# Make a list of all unique improper torsions",
"for",
"atom",
"in",
"range",
"(",
"len",
"(",
"bonded_to_atom",
")",
")",
":",
"bonded_to",
"=",
"bonded_to_atom",
"[",
"atom",
"]",
"if",
"len",
"(",
"bonded_to",
")",
">",
"2",
":",
"for",
"subset",
"in",
"itertools",
".",
"combinations",
"(",
"bonded_to",
",",
"3",
")",
":",
"data",
".",
"impropers",
".",
"append",
"(",
"(",
"atom",
",",
"subset",
"[",
"0",
"]",
",",
"subset",
"[",
"1",
"]",
",",
"subset",
"[",
"2",
"]",
")",
")",
"# Identify bonds that should be implemented with constraints",
"if",
"constraints",
"==",
"AllBonds",
"or",
"constraints",
"==",
"HAngles",
":",
"for",
"bond",
"in",
"data",
".",
"bonds",
":",
"bond",
".",
"isConstrained",
"=",
"True",
"elif",
"constraints",
"==",
"HBonds",
":",
"for",
"bond",
"in",
"data",
".",
"bonds",
":",
"atom1",
"=",
"data",
".",
"atoms",
"[",
"bond",
".",
"atom1",
"]",
"atom2",
"=",
"data",
".",
"atoms",
"[",
"bond",
".",
"atom2",
"]",
"bond",
".",
"isConstrained",
"=",
"atom1",
".",
"name",
".",
"startswith",
"(",
"'H'",
")",
"or",
"atom2",
".",
"name",
".",
"startswith",
"(",
"'H'",
")",
"if",
"rigidWater",
":",
"for",
"bond",
"in",
"data",
".",
"bonds",
":",
"atom1",
"=",
"data",
".",
"atoms",
"[",
"bond",
".",
"atom1",
"]",
"atom2",
"=",
"data",
".",
"atoms",
"[",
"bond",
".",
"atom2",
"]",
"if",
"atom1",
".",
"residue",
".",
"name",
"==",
"'HOH'",
"and",
"atom2",
".",
"residue",
".",
"name",
"==",
"'HOH'",
":",
"bond",
".",
"isConstrained",
"=",
"True",
"# Identify angles that should be implemented with constraints",
"if",
"constraints",
"==",
"HAngles",
":",
"for",
"angle",
"in",
"data",
".",
"angles",
":",
"atom1",
"=",
"data",
".",
"atoms",
"[",
"angle",
"[",
"0",
"]",
"]",
"atom2",
"=",
"data",
".",
"atoms",
"[",
"angle",
"[",
"1",
"]",
"]",
"atom3",
"=",
"data",
".",
"atoms",
"[",
"angle",
"[",
"2",
"]",
"]",
"numH",
"=",
"0",
"if",
"atom1",
".",
"name",
".",
"startswith",
"(",
"'H'",
")",
":",
"numH",
"+=",
"1",
"if",
"atom3",
".",
"name",
".",
"startswith",
"(",
"'H'",
")",
":",
"numH",
"+=",
"1",
"data",
".",
"isAngleConstrained",
".",
"append",
"(",
"numH",
"==",
"2",
"or",
"(",
"numH",
"==",
"1",
"and",
"atom2",
".",
"name",
".",
"startswith",
"(",
"'O'",
")",
")",
")",
"else",
":",
"data",
".",
"isAngleConstrained",
"=",
"len",
"(",
"data",
".",
"angles",
")",
"*",
"[",
"False",
"]",
"if",
"rigidWater",
":",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"data",
".",
"angles",
")",
")",
":",
"angle",
"=",
"data",
".",
"angles",
"[",
"i",
"]",
"atom1",
"=",
"data",
".",
"atoms",
"[",
"angle",
"[",
"0",
"]",
"]",
"atom2",
"=",
"data",
".",
"atoms",
"[",
"angle",
"[",
"1",
"]",
"]",
"atom3",
"=",
"data",
".",
"atoms",
"[",
"angle",
"[",
"2",
"]",
"]",
"if",
"atom1",
".",
"residue",
".",
"name",
"==",
"'HOH'",
"and",
"atom2",
".",
"residue",
".",
"name",
"==",
"'HOH'",
"and",
"atom3",
".",
"residue",
".",
"name",
"==",
"'HOH'",
":",
"data",
".",
"isAngleConstrained",
"[",
"i",
"]",
"=",
"True",
"# Add virtual sites",
"for",
"atom",
"in",
"data",
".",
"virtualSites",
":",
"(",
"site",
",",
"atoms",
",",
"excludeWith",
")",
"=",
"data",
".",
"virtualSites",
"[",
"atom",
"]",
"index",
"=",
"atom",
".",
"index",
"data",
".",
"excludeAtomWith",
"[",
"excludeWith",
"]",
".",
"append",
"(",
"index",
")",
"if",
"site",
".",
"type",
"==",
"'average2'",
":",
"sys",
".",
"setVirtualSite",
"(",
"index",
",",
"mm",
".",
"TwoParticleAverageSite",
"(",
"atoms",
"[",
"0",
"]",
",",
"atoms",
"[",
"1",
"]",
",",
"site",
".",
"weights",
"[",
"0",
"]",
",",
"site",
".",
"weights",
"[",
"1",
"]",
")",
")",
"elif",
"site",
".",
"type",
"==",
"'average3'",
":",
"sys",
".",
"setVirtualSite",
"(",
"index",
",",
"mm",
".",
"ThreeParticleAverageSite",
"(",
"atoms",
"[",
"0",
"]",
",",
"atoms",
"[",
"1",
"]",
",",
"atoms",
"[",
"2",
"]",
",",
"site",
".",
"weights",
"[",
"0",
"]",
",",
"site",
".",
"weights",
"[",
"1",
"]",
",",
"site",
".",
"weights",
"[",
"2",
"]",
")",
")",
"elif",
"site",
".",
"type",
"==",
"'outOfPlane'",
":",
"sys",
".",
"setVirtualSite",
"(",
"index",
",",
"mm",
".",
"OutOfPlaneSite",
"(",
"atoms",
"[",
"0",
"]",
",",
"atoms",
"[",
"1",
"]",
",",
"atoms",
"[",
"2",
"]",
",",
"site",
".",
"weights",
"[",
"0",
"]",
",",
"site",
".",
"weights",
"[",
"1",
"]",
",",
"site",
".",
"weights",
"[",
"2",
"]",
")",
")",
"elif",
"site",
".",
"type",
"==",
"'localCoords'",
":",
"local_coord_site",
"=",
"mm",
".",
"LocalCoordinatesSite",
"(",
"atoms",
"[",
"0",
"]",
",",
"atoms",
"[",
"1",
"]",
",",
"atoms",
"[",
"2",
"]",
",",
"mm",
".",
"Vec3",
"(",
"site",
".",
"originWeights",
"[",
"0",
"]",
",",
"site",
".",
"originWeights",
"[",
"1",
"]",
",",
"site",
".",
"originWeights",
"[",
"2",
"]",
")",
",",
"mm",
".",
"Vec3",
"(",
"site",
".",
"xWeights",
"[",
"0",
"]",
",",
"site",
".",
"xWeights",
"[",
"1",
"]",
",",
"site",
".",
"xWeights",
"[",
"2",
"]",
")",
",",
"mm",
".",
"Vec3",
"(",
"site",
".",
"yWeights",
"[",
"0",
"]",
",",
"site",
".",
"yWeights",
"[",
"1",
"]",
",",
"site",
".",
"yWeights",
"[",
"2",
"]",
")",
",",
"mm",
".",
"Vec3",
"(",
"site",
".",
"localPos",
"[",
"0",
"]",
",",
"site",
".",
"localPos",
"[",
"1",
"]",
",",
"site",
".",
"localPos",
"[",
"2",
"]",
")",
")",
"sys",
".",
"setVirtualSite",
"(",
"index",
",",
"local_coord_site",
")",
"# Add forces to the System",
"for",
"force",
"in",
"self",
".",
"_forces",
":",
"force",
".",
"createForce",
"(",
"sys",
",",
"data",
",",
"nonbondedMethod",
",",
"nonbondedCutoff",
",",
"args",
")",
"if",
"removeCMMotion",
":",
"sys",
".",
"addForce",
"(",
"mm",
".",
"CMMotionRemover",
"(",
")",
")",
"# Let force generators do postprocessing",
"for",
"force",
"in",
"self",
".",
"_forces",
":",
"if",
"'postprocessSystem'",
"in",
"dir",
"(",
"force",
")",
":",
"force",
".",
"postprocessSystem",
"(",
"sys",
",",
"data",
",",
"args",
")",
"# Execute scripts found in the XML files.",
"for",
"script",
"in",
"self",
".",
"_scripts",
":",
"exec",
"(",
"script",
",",
"locals",
"(",
")",
")",
"return",
"sys"
] |
Construct an OpenMM System representing a Topology with this force field.
Parameters
----------
topology : Topology
The Topology for which to create a System
nonbondedMethod : object=NoCutoff
The method to use for nonbonded interactions. Allowed values are
NoCutoff, CutoffNonPeriodic, CutoffPeriodic, Ewald, or PME.
nonbondedCutoff : distance=1*nanometer
The cutoff distance to use for nonbonded interactions
constraints : object=None
Specifies which bonds and angles should be implemented with constraints.
Allowed values are None, HBonds, AllBonds, or HAngles.
rigidWater : boolean=True
If true, water molecules will be fully rigid regardless of the value
passed for the constraints argument
removeCMMotion : boolean=True
If true, a CMMotionRemover will be added to the System
hydrogenMass : mass=None
The mass to use for hydrogen atoms bound to heavy atoms. Any mass
added to a hydrogen is subtracted from the heavy atom to keep
their total mass the same.
args
Arbitrary additional keyword arguments may also be specified.
This allows extra parameters to be specified that are specific to
particular force fields.
Returns
-------
system
the newly created System
|
[
"Construct",
"an",
"OpenMM",
"System",
"representing",
"a",
"Topology",
"with",
"this",
"force",
"field",
"."
] |
train
|
https://github.com/mosdef-hub/foyer/blob/9e39c71208fc01a6cc7b7cbe5a533c56830681d3/foyer/forcefield.py#L495-L734
|
mgedmin/check-manifest
|
check_manifest.py
|
run
|
def run(command, encoding=None, decode=True, cwd=None):
"""Run a command [cmd, arg1, arg2, ...].
Returns the output (stdout + stderr).
Raises CommandFailed in cases of error.
"""
if not encoding:
encoding = locale.getpreferredencoding()
try:
with open(os.devnull, 'rb') as devnull:
pipe = subprocess.Popen(command, stdin=devnull,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, cwd=cwd)
except OSError as e:
raise Failure("could not run %s: %s" % (command, e))
output = pipe.communicate()[0]
if decode:
output = output.decode(encoding)
status = pipe.wait()
if status != 0:
raise CommandFailed(command, status, output)
return output
|
python
|
def run(command, encoding=None, decode=True, cwd=None):
if not encoding:
encoding = locale.getpreferredencoding()
try:
with open(os.devnull, 'rb') as devnull:
pipe = subprocess.Popen(command, stdin=devnull,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, cwd=cwd)
except OSError as e:
raise Failure("could not run %s: %s" % (command, e))
output = pipe.communicate()[0]
if decode:
output = output.decode(encoding)
status = pipe.wait()
if status != 0:
raise CommandFailed(command, status, output)
return output
|
[
"def",
"run",
"(",
"command",
",",
"encoding",
"=",
"None",
",",
"decode",
"=",
"True",
",",
"cwd",
"=",
"None",
")",
":",
"if",
"not",
"encoding",
":",
"encoding",
"=",
"locale",
".",
"getpreferredencoding",
"(",
")",
"try",
":",
"with",
"open",
"(",
"os",
".",
"devnull",
",",
"'rb'",
")",
"as",
"devnull",
":",
"pipe",
"=",
"subprocess",
".",
"Popen",
"(",
"command",
",",
"stdin",
"=",
"devnull",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"STDOUT",
",",
"cwd",
"=",
"cwd",
")",
"except",
"OSError",
"as",
"e",
":",
"raise",
"Failure",
"(",
"\"could not run %s: %s\"",
"%",
"(",
"command",
",",
"e",
")",
")",
"output",
"=",
"pipe",
".",
"communicate",
"(",
")",
"[",
"0",
"]",
"if",
"decode",
":",
"output",
"=",
"output",
".",
"decode",
"(",
"encoding",
")",
"status",
"=",
"pipe",
".",
"wait",
"(",
")",
"if",
"status",
"!=",
"0",
":",
"raise",
"CommandFailed",
"(",
"command",
",",
"status",
",",
"output",
")",
"return",
"output"
] |
Run a command [cmd, arg1, arg2, ...].
Returns the output (stdout + stderr).
Raises CommandFailed in cases of error.
|
[
"Run",
"a",
"command",
"[",
"cmd",
"arg1",
"arg2",
"...",
"]",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L138-L160
|
mgedmin/check-manifest
|
check_manifest.py
|
cd
|
def cd(directory):
"""Change the current working directory, temporarily.
Use as a context manager: with cd(d): ...
"""
old_dir = os.getcwd()
try:
os.chdir(directory)
yield
finally:
os.chdir(old_dir)
|
python
|
def cd(directory):
old_dir = os.getcwd()
try:
os.chdir(directory)
yield
finally:
os.chdir(old_dir)
|
[
"def",
"cd",
"(",
"directory",
")",
":",
"old_dir",
"=",
"os",
".",
"getcwd",
"(",
")",
"try",
":",
"os",
".",
"chdir",
"(",
"directory",
")",
"yield",
"finally",
":",
"os",
".",
"chdir",
"(",
"old_dir",
")"
] |
Change the current working directory, temporarily.
Use as a context manager: with cd(d): ...
|
[
"Change",
"the",
"current",
"working",
"directory",
"temporarily",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L164-L174
|
mgedmin/check-manifest
|
check_manifest.py
|
mkdtemp
|
def mkdtemp(hint=''):
"""Create a temporary directory, then clean it up.
Use as a context manager: with mkdtemp('-purpose'): ...
"""
dirname = tempfile.mkdtemp(prefix='check-manifest-', suffix=hint)
try:
yield dirname
finally:
rmtree(dirname)
|
python
|
def mkdtemp(hint=''):
dirname = tempfile.mkdtemp(prefix='check-manifest-', suffix=hint)
try:
yield dirname
finally:
rmtree(dirname)
|
[
"def",
"mkdtemp",
"(",
"hint",
"=",
"''",
")",
":",
"dirname",
"=",
"tempfile",
".",
"mkdtemp",
"(",
"prefix",
"=",
"'check-manifest-'",
",",
"suffix",
"=",
"hint",
")",
"try",
":",
"yield",
"dirname",
"finally",
":",
"rmtree",
"(",
"dirname",
")"
] |
Create a temporary directory, then clean it up.
Use as a context manager: with mkdtemp('-purpose'): ...
|
[
"Create",
"a",
"temporary",
"directory",
"then",
"clean",
"it",
"up",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L178-L187
|
mgedmin/check-manifest
|
check_manifest.py
|
chmod_plus
|
def chmod_plus(path, add_bits=stat.S_IWUSR):
"""Change a file's mode by adding a few bits.
Like chmod +<bits> <path> in a Unix shell.
"""
try:
os.chmod(path, stat.S_IMODE(os.stat(path).st_mode) | add_bits)
except OSError: # pragma: nocover
pass
|
python
|
def chmod_plus(path, add_bits=stat.S_IWUSR):
try:
os.chmod(path, stat.S_IMODE(os.stat(path).st_mode) | add_bits)
except OSError:
pass
|
[
"def",
"chmod_plus",
"(",
"path",
",",
"add_bits",
"=",
"stat",
".",
"S_IWUSR",
")",
":",
"try",
":",
"os",
".",
"chmod",
"(",
"path",
",",
"stat",
".",
"S_IMODE",
"(",
"os",
".",
"stat",
"(",
"path",
")",
".",
"st_mode",
")",
"|",
"add_bits",
")",
"except",
"OSError",
":",
"# pragma: nocover",
"pass"
] |
Change a file's mode by adding a few bits.
Like chmod +<bits> <path> in a Unix shell.
|
[
"Change",
"a",
"file",
"s",
"mode",
"by",
"adding",
"a",
"few",
"bits",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L190-L198
|
mgedmin/check-manifest
|
check_manifest.py
|
rmtree
|
def rmtree(path):
"""A version of rmtree that can deal with read-only files and directories.
Needed because the stock shutil.rmtree() fails with an access error
when there are read-only files in the directory on Windows, or when the
directory itself is read-only on Unix.
"""
def onerror(func, path, exc_info):
# Did you know what on Python 3.3 on Windows os.remove() and
# os.unlink() are distinct functions?
if func is os.remove or func is os.unlink or func is os.rmdir:
if sys.platform != 'win32':
chmod_plus(os.path.dirname(path), stat.S_IWUSR | stat.S_IXUSR)
chmod_plus(path)
func(path)
else:
raise
shutil.rmtree(path, onerror=onerror)
|
python
|
def rmtree(path):
def onerror(func, path, exc_info):
if func is os.remove or func is os.unlink or func is os.rmdir:
if sys.platform != 'win32':
chmod_plus(os.path.dirname(path), stat.S_IWUSR | stat.S_IXUSR)
chmod_plus(path)
func(path)
else:
raise
shutil.rmtree(path, onerror=onerror)
|
[
"def",
"rmtree",
"(",
"path",
")",
":",
"def",
"onerror",
"(",
"func",
",",
"path",
",",
"exc_info",
")",
":",
"# Did you know what on Python 3.3 on Windows os.remove() and",
"# os.unlink() are distinct functions?",
"if",
"func",
"is",
"os",
".",
"remove",
"or",
"func",
"is",
"os",
".",
"unlink",
"or",
"func",
"is",
"os",
".",
"rmdir",
":",
"if",
"sys",
".",
"platform",
"!=",
"'win32'",
":",
"chmod_plus",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"path",
")",
",",
"stat",
".",
"S_IWUSR",
"|",
"stat",
".",
"S_IXUSR",
")",
"chmod_plus",
"(",
"path",
")",
"func",
"(",
"path",
")",
"else",
":",
"raise",
"shutil",
".",
"rmtree",
"(",
"path",
",",
"onerror",
"=",
"onerror",
")"
] |
A version of rmtree that can deal with read-only files and directories.
Needed because the stock shutil.rmtree() fails with an access error
when there are read-only files in the directory on Windows, or when the
directory itself is read-only on Unix.
|
[
"A",
"version",
"of",
"rmtree",
"that",
"can",
"deal",
"with",
"read",
"-",
"only",
"files",
"and",
"directories",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L201-L218
|
mgedmin/check-manifest
|
check_manifest.py
|
copy_files
|
def copy_files(filelist, destdir):
"""Copy a list of files to destdir, preserving directory structure.
File names should be relative to the current working directory.
"""
for filename in filelist:
destfile = os.path.join(destdir, filename)
# filename should not be absolute, but let's double-check
assert destfile.startswith(destdir + os.path.sep)
destfiledir = os.path.dirname(destfile)
if not os.path.isdir(destfiledir):
os.makedirs(destfiledir)
if os.path.isdir(filename):
os.mkdir(destfile)
else:
shutil.copy2(filename, destfile)
|
python
|
def copy_files(filelist, destdir):
for filename in filelist:
destfile = os.path.join(destdir, filename)
assert destfile.startswith(destdir + os.path.sep)
destfiledir = os.path.dirname(destfile)
if not os.path.isdir(destfiledir):
os.makedirs(destfiledir)
if os.path.isdir(filename):
os.mkdir(destfile)
else:
shutil.copy2(filename, destfile)
|
[
"def",
"copy_files",
"(",
"filelist",
",",
"destdir",
")",
":",
"for",
"filename",
"in",
"filelist",
":",
"destfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"destdir",
",",
"filename",
")",
"# filename should not be absolute, but let's double-check",
"assert",
"destfile",
".",
"startswith",
"(",
"destdir",
"+",
"os",
".",
"path",
".",
"sep",
")",
"destfiledir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"destfile",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"destfiledir",
")",
":",
"os",
".",
"makedirs",
"(",
"destfiledir",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"filename",
")",
":",
"os",
".",
"mkdir",
"(",
"destfile",
")",
"else",
":",
"shutil",
".",
"copy2",
"(",
"filename",
",",
"destfile",
")"
] |
Copy a list of files to destdir, preserving directory structure.
File names should be relative to the current working directory.
|
[
"Copy",
"a",
"list",
"of",
"files",
"to",
"destdir",
"preserving",
"directory",
"structure",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L221-L236
|
mgedmin/check-manifest
|
check_manifest.py
|
get_one_file_in
|
def get_one_file_in(dirname):
"""Return the pathname of the one file in a directory.
Raises if the directory has no files or more than one file.
"""
files = os.listdir(dirname)
if len(files) > 1:
raise Failure('More than one file exists in %s:\n%s' %
(dirname, '\n'.join(sorted(files))))
elif not files:
raise Failure('No files found in %s' % dirname)
return os.path.join(dirname, files[0])
|
python
|
def get_one_file_in(dirname):
files = os.listdir(dirname)
if len(files) > 1:
raise Failure('More than one file exists in %s:\n%s' %
(dirname, '\n'.join(sorted(files))))
elif not files:
raise Failure('No files found in %s' % dirname)
return os.path.join(dirname, files[0])
|
[
"def",
"get_one_file_in",
"(",
"dirname",
")",
":",
"files",
"=",
"os",
".",
"listdir",
"(",
"dirname",
")",
"if",
"len",
"(",
"files",
")",
">",
"1",
":",
"raise",
"Failure",
"(",
"'More than one file exists in %s:\\n%s'",
"%",
"(",
"dirname",
",",
"'\\n'",
".",
"join",
"(",
"sorted",
"(",
"files",
")",
")",
")",
")",
"elif",
"not",
"files",
":",
"raise",
"Failure",
"(",
"'No files found in %s'",
"%",
"dirname",
")",
"return",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"files",
"[",
"0",
"]",
")"
] |
Return the pathname of the one file in a directory.
Raises if the directory has no files or more than one file.
|
[
"Return",
"the",
"pathname",
"of",
"the",
"one",
"file",
"in",
"a",
"directory",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L239-L250
|
mgedmin/check-manifest
|
check_manifest.py
|
unicodify
|
def unicodify(filename):
"""Make sure filename is Unicode.
Because the tarfile module on Python 2 doesn't return Unicode.
"""
if isinstance(filename, bytes):
return filename.decode(locale.getpreferredencoding())
else:
return filename
|
python
|
def unicodify(filename):
if isinstance(filename, bytes):
return filename.decode(locale.getpreferredencoding())
else:
return filename
|
[
"def",
"unicodify",
"(",
"filename",
")",
":",
"if",
"isinstance",
"(",
"filename",
",",
"bytes",
")",
":",
"return",
"filename",
".",
"decode",
"(",
"locale",
".",
"getpreferredencoding",
"(",
")",
")",
"else",
":",
"return",
"filename"
] |
Make sure filename is Unicode.
Because the tarfile module on Python 2 doesn't return Unicode.
|
[
"Make",
"sure",
"filename",
"is",
"Unicode",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L253-L261
|
mgedmin/check-manifest
|
check_manifest.py
|
get_archive_file_list
|
def get_archive_file_list(archive_filename):
"""Return the list of files in an archive.
Supports .tar.gz and .zip.
"""
if archive_filename.endswith('.zip'):
with closing(zipfile.ZipFile(archive_filename)) as zf:
return add_directories(zf.namelist())
elif archive_filename.endswith(('.tar.gz', '.tar.bz2', '.tar')):
with closing(tarfile.open(archive_filename)) as tf:
return add_directories(list(map(unicodify, tf.getnames())))
else:
ext = os.path.splitext(archive_filename)[-1]
raise Failure('Unrecognized archive type: %s' % ext)
|
python
|
def get_archive_file_list(archive_filename):
if archive_filename.endswith('.zip'):
with closing(zipfile.ZipFile(archive_filename)) as zf:
return add_directories(zf.namelist())
elif archive_filename.endswith(('.tar.gz', '.tar.bz2', '.tar')):
with closing(tarfile.open(archive_filename)) as tf:
return add_directories(list(map(unicodify, tf.getnames())))
else:
ext = os.path.splitext(archive_filename)[-1]
raise Failure('Unrecognized archive type: %s' % ext)
|
[
"def",
"get_archive_file_list",
"(",
"archive_filename",
")",
":",
"if",
"archive_filename",
".",
"endswith",
"(",
"'.zip'",
")",
":",
"with",
"closing",
"(",
"zipfile",
".",
"ZipFile",
"(",
"archive_filename",
")",
")",
"as",
"zf",
":",
"return",
"add_directories",
"(",
"zf",
".",
"namelist",
"(",
")",
")",
"elif",
"archive_filename",
".",
"endswith",
"(",
"(",
"'.tar.gz'",
",",
"'.tar.bz2'",
",",
"'.tar'",
")",
")",
":",
"with",
"closing",
"(",
"tarfile",
".",
"open",
"(",
"archive_filename",
")",
")",
"as",
"tf",
":",
"return",
"add_directories",
"(",
"list",
"(",
"map",
"(",
"unicodify",
",",
"tf",
".",
"getnames",
"(",
")",
")",
")",
")",
"else",
":",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"archive_filename",
")",
"[",
"-",
"1",
"]",
"raise",
"Failure",
"(",
"'Unrecognized archive type: %s'",
"%",
"ext",
")"
] |
Return the list of files in an archive.
Supports .tar.gz and .zip.
|
[
"Return",
"the",
"list",
"of",
"files",
"in",
"an",
"archive",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L264-L277
|
mgedmin/check-manifest
|
check_manifest.py
|
strip_toplevel_name
|
def strip_toplevel_name(filelist):
"""Strip toplevel name from a file list.
>>> strip_toplevel_name(['a', 'a/b', 'a/c', 'a/c/d'])
['b', 'c', 'c/d']
>>> strip_toplevel_name(['a/b', 'a/c', 'a/c/d'])
['b', 'c', 'c/d']
"""
if not filelist:
return filelist
prefix = filelist[0]
if '/' in prefix:
prefix = prefix.partition('/')[0] + '/'
names = filelist
else:
prefix = prefix + '/'
names = filelist[1:]
for name in names:
if not name.startswith(prefix):
raise Failure("File doesn't have the common prefix (%s): %s"
% (name, prefix))
return [name[len(prefix):] for name in names]
|
python
|
def strip_toplevel_name(filelist):
if not filelist:
return filelist
prefix = filelist[0]
if '/' in prefix:
prefix = prefix.partition('/')[0] + '/'
names = filelist
else:
prefix = prefix + '/'
names = filelist[1:]
for name in names:
if not name.startswith(prefix):
raise Failure("File doesn't have the common prefix (%s): %s"
% (name, prefix))
return [name[len(prefix):] for name in names]
|
[
"def",
"strip_toplevel_name",
"(",
"filelist",
")",
":",
"if",
"not",
"filelist",
":",
"return",
"filelist",
"prefix",
"=",
"filelist",
"[",
"0",
"]",
"if",
"'/'",
"in",
"prefix",
":",
"prefix",
"=",
"prefix",
".",
"partition",
"(",
"'/'",
")",
"[",
"0",
"]",
"+",
"'/'",
"names",
"=",
"filelist",
"else",
":",
"prefix",
"=",
"prefix",
"+",
"'/'",
"names",
"=",
"filelist",
"[",
"1",
":",
"]",
"for",
"name",
"in",
"names",
":",
"if",
"not",
"name",
".",
"startswith",
"(",
"prefix",
")",
":",
"raise",
"Failure",
"(",
"\"File doesn't have the common prefix (%s): %s\"",
"%",
"(",
"name",
",",
"prefix",
")",
")",
"return",
"[",
"name",
"[",
"len",
"(",
"prefix",
")",
":",
"]",
"for",
"name",
"in",
"names",
"]"
] |
Strip toplevel name from a file list.
>>> strip_toplevel_name(['a', 'a/b', 'a/c', 'a/c/d'])
['b', 'c', 'c/d']
>>> strip_toplevel_name(['a/b', 'a/c', 'a/c/d'])
['b', 'c', 'c/d']
|
[
"Strip",
"toplevel",
"name",
"from",
"a",
"file",
"list",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L280-L303
|
mgedmin/check-manifest
|
check_manifest.py
|
detect_vcs
|
def detect_vcs():
"""Detect the version control system used for the current directory."""
location = os.path.abspath('.')
while True:
for vcs in Git, Mercurial, Bazaar, Subversion:
if vcs.detect(location):
return vcs
parent = os.path.dirname(location)
if parent == location:
raise Failure("Couldn't find version control data"
" (git/hg/bzr/svn supported)")
location = parent
|
python
|
def detect_vcs():
location = os.path.abspath('.')
while True:
for vcs in Git, Mercurial, Bazaar, Subversion:
if vcs.detect(location):
return vcs
parent = os.path.dirname(location)
if parent == location:
raise Failure("Couldn't find version control data"
" (git/hg/bzr/svn supported)")
location = parent
|
[
"def",
"detect_vcs",
"(",
")",
":",
"location",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"'.'",
")",
"while",
"True",
":",
"for",
"vcs",
"in",
"Git",
",",
"Mercurial",
",",
"Bazaar",
",",
"Subversion",
":",
"if",
"vcs",
".",
"detect",
"(",
"location",
")",
":",
"return",
"vcs",
"parent",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"location",
")",
"if",
"parent",
"==",
"location",
":",
"raise",
"Failure",
"(",
"\"Couldn't find version control data\"",
"\" (git/hg/bzr/svn supported)\"",
")",
"location",
"=",
"parent"
] |
Detect the version control system used for the current directory.
|
[
"Detect",
"the",
"version",
"control",
"system",
"used",
"for",
"the",
"current",
"directory",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L465-L476
|
mgedmin/check-manifest
|
check_manifest.py
|
normalize_name
|
def normalize_name(name):
"""Some VCS print directory names with trailing slashes. Strip them.
Easiest is to normalize the path.
And encodings may trip us up too, especially when comparing lists
of files. Plus maybe lowercase versus uppercase.
"""
name = os.path.normpath(name)
name = unicodify(name)
if sys.platform == 'darwin':
# Mac OSX may have problems comparing non-ascii filenames, so
# we convert them.
name = unicodedata.normalize('NFC', name)
return name
|
python
|
def normalize_name(name):
name = os.path.normpath(name)
name = unicodify(name)
if sys.platform == 'darwin':
name = unicodedata.normalize('NFC', name)
return name
|
[
"def",
"normalize_name",
"(",
"name",
")",
":",
"name",
"=",
"os",
".",
"path",
".",
"normpath",
"(",
"name",
")",
"name",
"=",
"unicodify",
"(",
"name",
")",
"if",
"sys",
".",
"platform",
"==",
"'darwin'",
":",
"# Mac OSX may have problems comparing non-ascii filenames, so",
"# we convert them.",
"name",
"=",
"unicodedata",
".",
"normalize",
"(",
"'NFC'",
",",
"name",
")",
"return",
"name"
] |
Some VCS print directory names with trailing slashes. Strip them.
Easiest is to normalize the path.
And encodings may trip us up too, especially when comparing lists
of files. Plus maybe lowercase versus uppercase.
|
[
"Some",
"VCS",
"print",
"directory",
"names",
"with",
"trailing",
"slashes",
".",
"Strip",
"them",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L490-L504
|
mgedmin/check-manifest
|
check_manifest.py
|
add_directories
|
def add_directories(names):
"""Git/Mercurial/zip files omit directories, let's add them back."""
res = list(names)
seen = set(names)
for name in names:
while True:
name = os.path.dirname(name)
if not name or name in seen:
break
res.append(name)
seen.add(name)
return sorted(res)
|
python
|
def add_directories(names):
res = list(names)
seen = set(names)
for name in names:
while True:
name = os.path.dirname(name)
if not name or name in seen:
break
res.append(name)
seen.add(name)
return sorted(res)
|
[
"def",
"add_directories",
"(",
"names",
")",
":",
"res",
"=",
"list",
"(",
"names",
")",
"seen",
"=",
"set",
"(",
"names",
")",
"for",
"name",
"in",
"names",
":",
"while",
"True",
":",
"name",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"name",
")",
"if",
"not",
"name",
"or",
"name",
"in",
"seen",
":",
"break",
"res",
".",
"append",
"(",
"name",
")",
"seen",
".",
"add",
"(",
"name",
")",
"return",
"sorted",
"(",
"res",
")"
] |
Git/Mercurial/zip files omit directories, let's add them back.
|
[
"Git",
"/",
"Mercurial",
"/",
"zip",
"files",
"omit",
"directories",
"let",
"s",
"add",
"them",
"back",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L507-L518
|
mgedmin/check-manifest
|
check_manifest.py
|
read_config
|
def read_config():
"""Read configuration from file if possible."""
# XXX modifies global state, which is kind of evil
config = _load_config()
if config.get(CFG_IGNORE_DEFAULT_RULES[1], False):
del IGNORE[:]
if CFG_IGNORE[1] in config:
IGNORE.extend(p for p in config[CFG_IGNORE[1]] if p)
if CFG_IGNORE_BAD_IDEAS[1] in config:
IGNORE_BAD_IDEAS.extend(p for p in config[CFG_IGNORE_BAD_IDEAS[1]] if p)
|
python
|
def read_config():
config = _load_config()
if config.get(CFG_IGNORE_DEFAULT_RULES[1], False):
del IGNORE[:]
if CFG_IGNORE[1] in config:
IGNORE.extend(p for p in config[CFG_IGNORE[1]] if p)
if CFG_IGNORE_BAD_IDEAS[1] in config:
IGNORE_BAD_IDEAS.extend(p for p in config[CFG_IGNORE_BAD_IDEAS[1]] if p)
|
[
"def",
"read_config",
"(",
")",
":",
"# XXX modifies global state, which is kind of evil",
"config",
"=",
"_load_config",
"(",
")",
"if",
"config",
".",
"get",
"(",
"CFG_IGNORE_DEFAULT_RULES",
"[",
"1",
"]",
",",
"False",
")",
":",
"del",
"IGNORE",
"[",
":",
"]",
"if",
"CFG_IGNORE",
"[",
"1",
"]",
"in",
"config",
":",
"IGNORE",
".",
"extend",
"(",
"p",
"for",
"p",
"in",
"config",
"[",
"CFG_IGNORE",
"[",
"1",
"]",
"]",
"if",
"p",
")",
"if",
"CFG_IGNORE_BAD_IDEAS",
"[",
"1",
"]",
"in",
"config",
":",
"IGNORE_BAD_IDEAS",
".",
"extend",
"(",
"p",
"for",
"p",
"in",
"config",
"[",
"CFG_IGNORE_BAD_IDEAS",
"[",
"1",
"]",
"]",
"if",
"p",
")"
] |
Read configuration from file if possible.
|
[
"Read",
"configuration",
"from",
"file",
"if",
"possible",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L593-L602
|
mgedmin/check-manifest
|
check_manifest.py
|
_load_config
|
def _load_config():
"""Searches for config files, reads them and returns a dictionary
Looks for a ``check-manifest`` section in ``pyproject.toml``,
``setup.cfg``, and ``tox.ini``, in that order. The first file
that exists and has that section will be loaded and returned as a
dictionary.
"""
if os.path.exists("pyproject.toml"):
config = toml.load("pyproject.toml")
if CFG_SECTION_CHECK_MANIFEST in config.get("tool", {}):
return config["tool"][CFG_SECTION_CHECK_MANIFEST]
search_files = ['setup.cfg', 'tox.ini']
config_parser = ConfigParser.ConfigParser()
for filename in search_files:
if (config_parser.read([filename])
and config_parser.has_section(CFG_SECTION_CHECK_MANIFEST)):
config = {}
if config_parser.has_option(*CFG_IGNORE_DEFAULT_RULES):
ignore_defaults = config_parser.getboolean(*CFG_IGNORE_DEFAULT_RULES)
config[CFG_IGNORE_DEFAULT_RULES[1]] = ignore_defaults
if config_parser.has_option(*CFG_IGNORE):
patterns = [
p.strip()
for p in config_parser.get(*CFG_IGNORE).splitlines()
]
config[CFG_IGNORE[1]] = patterns
if config_parser.has_option(*CFG_IGNORE_BAD_IDEAS):
patterns = [
p.strip()
for p in config_parser.get(*CFG_IGNORE_BAD_IDEAS).splitlines()
]
config[CFG_IGNORE_BAD_IDEAS[1]] = patterns
return config
return {}
|
python
|
def _load_config():
if os.path.exists("pyproject.toml"):
config = toml.load("pyproject.toml")
if CFG_SECTION_CHECK_MANIFEST in config.get("tool", {}):
return config["tool"][CFG_SECTION_CHECK_MANIFEST]
search_files = ['setup.cfg', 'tox.ini']
config_parser = ConfigParser.ConfigParser()
for filename in search_files:
if (config_parser.read([filename])
and config_parser.has_section(CFG_SECTION_CHECK_MANIFEST)):
config = {}
if config_parser.has_option(*CFG_IGNORE_DEFAULT_RULES):
ignore_defaults = config_parser.getboolean(*CFG_IGNORE_DEFAULT_RULES)
config[CFG_IGNORE_DEFAULT_RULES[1]] = ignore_defaults
if config_parser.has_option(*CFG_IGNORE):
patterns = [
p.strip()
for p in config_parser.get(*CFG_IGNORE).splitlines()
]
config[CFG_IGNORE[1]] = patterns
if config_parser.has_option(*CFG_IGNORE_BAD_IDEAS):
patterns = [
p.strip()
for p in config_parser.get(*CFG_IGNORE_BAD_IDEAS).splitlines()
]
config[CFG_IGNORE_BAD_IDEAS[1]] = patterns
return config
return {}
|
[
"def",
"_load_config",
"(",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"\"pyproject.toml\"",
")",
":",
"config",
"=",
"toml",
".",
"load",
"(",
"\"pyproject.toml\"",
")",
"if",
"CFG_SECTION_CHECK_MANIFEST",
"in",
"config",
".",
"get",
"(",
"\"tool\"",
",",
"{",
"}",
")",
":",
"return",
"config",
"[",
"\"tool\"",
"]",
"[",
"CFG_SECTION_CHECK_MANIFEST",
"]",
"search_files",
"=",
"[",
"'setup.cfg'",
",",
"'tox.ini'",
"]",
"config_parser",
"=",
"ConfigParser",
".",
"ConfigParser",
"(",
")",
"for",
"filename",
"in",
"search_files",
":",
"if",
"(",
"config_parser",
".",
"read",
"(",
"[",
"filename",
"]",
")",
"and",
"config_parser",
".",
"has_section",
"(",
"CFG_SECTION_CHECK_MANIFEST",
")",
")",
":",
"config",
"=",
"{",
"}",
"if",
"config_parser",
".",
"has_option",
"(",
"*",
"CFG_IGNORE_DEFAULT_RULES",
")",
":",
"ignore_defaults",
"=",
"config_parser",
".",
"getboolean",
"(",
"*",
"CFG_IGNORE_DEFAULT_RULES",
")",
"config",
"[",
"CFG_IGNORE_DEFAULT_RULES",
"[",
"1",
"]",
"]",
"=",
"ignore_defaults",
"if",
"config_parser",
".",
"has_option",
"(",
"*",
"CFG_IGNORE",
")",
":",
"patterns",
"=",
"[",
"p",
".",
"strip",
"(",
")",
"for",
"p",
"in",
"config_parser",
".",
"get",
"(",
"*",
"CFG_IGNORE",
")",
".",
"splitlines",
"(",
")",
"]",
"config",
"[",
"CFG_IGNORE",
"[",
"1",
"]",
"]",
"=",
"patterns",
"if",
"config_parser",
".",
"has_option",
"(",
"*",
"CFG_IGNORE_BAD_IDEAS",
")",
":",
"patterns",
"=",
"[",
"p",
".",
"strip",
"(",
")",
"for",
"p",
"in",
"config_parser",
".",
"get",
"(",
"*",
"CFG_IGNORE_BAD_IDEAS",
")",
".",
"splitlines",
"(",
")",
"]",
"config",
"[",
"CFG_IGNORE_BAD_IDEAS",
"[",
"1",
"]",
"]",
"=",
"patterns",
"return",
"config",
"return",
"{",
"}"
] |
Searches for config files, reads them and returns a dictionary
Looks for a ``check-manifest`` section in ``pyproject.toml``,
``setup.cfg``, and ``tox.ini``, in that order. The first file
that exists and has that section will be loaded and returned as a
dictionary.
|
[
"Searches",
"for",
"config",
"files",
"reads",
"them",
"and",
"returns",
"a",
"dictionary"
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L605-L646
|
mgedmin/check-manifest
|
check_manifest.py
|
read_manifest
|
def read_manifest():
"""Read existing configuration from MANIFEST.in.
We use that to ignore anything the MANIFEST.in ignores.
"""
# XXX modifies global state, which is kind of evil
if not os.path.isfile('MANIFEST.in'):
return
ignore, ignore_regexps = _get_ignore_from_manifest('MANIFEST.in')
IGNORE.extend(ignore)
IGNORE_REGEXPS.extend(ignore_regexps)
|
python
|
def read_manifest():
if not os.path.isfile('MANIFEST.in'):
return
ignore, ignore_regexps = _get_ignore_from_manifest('MANIFEST.in')
IGNORE.extend(ignore)
IGNORE_REGEXPS.extend(ignore_regexps)
|
[
"def",
"read_manifest",
"(",
")",
":",
"# XXX modifies global state, which is kind of evil",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"'MANIFEST.in'",
")",
":",
"return",
"ignore",
",",
"ignore_regexps",
"=",
"_get_ignore_from_manifest",
"(",
"'MANIFEST.in'",
")",
"IGNORE",
".",
"extend",
"(",
"ignore",
")",
"IGNORE_REGEXPS",
".",
"extend",
"(",
"ignore_regexps",
")"
] |
Read existing configuration from MANIFEST.in.
We use that to ignore anything the MANIFEST.in ignores.
|
[
"Read",
"existing",
"configuration",
"from",
"MANIFEST",
".",
"in",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L649-L659
|
mgedmin/check-manifest
|
check_manifest.py
|
_get_ignore_from_manifest
|
def _get_ignore_from_manifest(filename):
"""Gather the various ignore patterns from a MANIFEST.in.
Returns a list of standard ignore patterns and a list of regular
expressions to ignore.
"""
class MyTextFile(TextFile):
def error(self, msg, line=None): # pragma: nocover
# (this is never called by TextFile in current versions of CPython)
raise Failure(self.gen_error(msg, line))
def warn(self, msg, line=None):
warning(self.gen_error(msg, line))
template = MyTextFile(filename,
strip_comments=True,
skip_blanks=True,
join_lines=True,
lstrip_ws=True,
rstrip_ws=True,
collapse_join=True)
try:
lines = template.readlines()
finally:
template.close()
return _get_ignore_from_manifest_lines(lines)
|
python
|
def _get_ignore_from_manifest(filename):
class MyTextFile(TextFile):
def error(self, msg, line=None):
raise Failure(self.gen_error(msg, line))
def warn(self, msg, line=None):
warning(self.gen_error(msg, line))
template = MyTextFile(filename,
strip_comments=True,
skip_blanks=True,
join_lines=True,
lstrip_ws=True,
rstrip_ws=True,
collapse_join=True)
try:
lines = template.readlines()
finally:
template.close()
return _get_ignore_from_manifest_lines(lines)
|
[
"def",
"_get_ignore_from_manifest",
"(",
"filename",
")",
":",
"class",
"MyTextFile",
"(",
"TextFile",
")",
":",
"def",
"error",
"(",
"self",
",",
"msg",
",",
"line",
"=",
"None",
")",
":",
"# pragma: nocover",
"# (this is never called by TextFile in current versions of CPython)",
"raise",
"Failure",
"(",
"self",
".",
"gen_error",
"(",
"msg",
",",
"line",
")",
")",
"def",
"warn",
"(",
"self",
",",
"msg",
",",
"line",
"=",
"None",
")",
":",
"warning",
"(",
"self",
".",
"gen_error",
"(",
"msg",
",",
"line",
")",
")",
"template",
"=",
"MyTextFile",
"(",
"filename",
",",
"strip_comments",
"=",
"True",
",",
"skip_blanks",
"=",
"True",
",",
"join_lines",
"=",
"True",
",",
"lstrip_ws",
"=",
"True",
",",
"rstrip_ws",
"=",
"True",
",",
"collapse_join",
"=",
"True",
")",
"try",
":",
"lines",
"=",
"template",
".",
"readlines",
"(",
")",
"finally",
":",
"template",
".",
"close",
"(",
")",
"return",
"_get_ignore_from_manifest_lines",
"(",
"lines",
")"
] |
Gather the various ignore patterns from a MANIFEST.in.
Returns a list of standard ignore patterns and a list of regular
expressions to ignore.
|
[
"Gather",
"the",
"various",
"ignore",
"patterns",
"from",
"a",
"MANIFEST",
".",
"in",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L672-L698
|
mgedmin/check-manifest
|
check_manifest.py
|
_get_ignore_from_manifest_lines
|
def _get_ignore_from_manifest_lines(lines):
"""Gather the various ignore patterns from a MANIFEST.in.
'lines' should be a list of strings with comments removed
and continuation lines joined.
Returns a list of standard ignore patterns and a list of regular
expressions to ignore.
"""
ignore = []
ignore_regexps = []
for line in lines:
try:
cmd, rest = line.split(None, 1)
except ValueError:
# no whitespace, so not interesting
continue
for part in rest.split():
# distutils enforces these warnings on Windows only
if part.startswith('/'):
warning("ERROR: Leading slashes are not allowed in MANIFEST.in on Windows: %s" % part)
if part.endswith('/'):
warning("ERROR: Trailing slashes are not allowed in MANIFEST.in on Windows: %s" % part)
if cmd == 'exclude':
# An exclude of 'dirname/*css' can match 'dirname/foo.css'
# but not 'dirname/subdir/bar.css'. We need a regular
# expression for that, since fnmatch doesn't pay attention to
# directory separators.
for pat in rest.split():
if '*' in pat or '?' in pat or '[!' in pat:
ignore_regexps.append(_glob_to_regexp(pat))
else:
# No need for special handling.
ignore.append(pat)
elif cmd == 'global-exclude':
ignore.extend(rest.split())
elif cmd == 'recursive-exclude':
try:
dirname, patterns = rest.split(None, 1)
except ValueError:
# Wrong MANIFEST.in line.
warning("You have a wrong line in MANIFEST.in: %r\n"
"'recursive-exclude' expects <dir> <pattern1> "
"<pattern2> ..." % line)
continue
# Strip path separator for clarity.
dirname = dirname.rstrip(os.path.sep)
for pattern in patterns.split():
if pattern.startswith('*'):
ignore.append(dirname + os.path.sep + pattern)
else:
# 'recursive-exclude plone metadata.xml' should
# exclude plone/metadata.xml and
# plone/*/metadata.xml, where * can be any number
# of sub directories. We could use a regexp, but
# two ignores seems easier.
ignore.append(dirname + os.path.sep + pattern)
ignore.append(
dirname + os.path.sep + '*' + os.path.sep + pattern)
elif cmd == 'prune':
# rest is considered to be a directory name. It should
# not contain a path separator, as it actually has no
# effect in that case, but that could differ per python
# version. We strip it here to avoid double separators.
# XXX: mg: I'm not 100% sure the above is correct, AFAICS
# all pythons from 2.6 complain if the path has a leading or
# trailing slash -- on Windows, that is.
rest = rest.rstrip('/\\')
ignore.append(rest)
ignore.append(rest + os.path.sep + '*')
return ignore, ignore_regexps
|
python
|
def _get_ignore_from_manifest_lines(lines):
ignore = []
ignore_regexps = []
for line in lines:
try:
cmd, rest = line.split(None, 1)
except ValueError:
continue
for part in rest.split():
if part.startswith('/'):
warning("ERROR: Leading slashes are not allowed in MANIFEST.in on Windows: %s" % part)
if part.endswith('/'):
warning("ERROR: Trailing slashes are not allowed in MANIFEST.in on Windows: %s" % part)
if cmd == 'exclude':
for pat in rest.split():
if '*' in pat or '?' in pat or '[!' in pat:
ignore_regexps.append(_glob_to_regexp(pat))
else:
ignore.append(pat)
elif cmd == 'global-exclude':
ignore.extend(rest.split())
elif cmd == 'recursive-exclude':
try:
dirname, patterns = rest.split(None, 1)
except ValueError:
warning("You have a wrong line in MANIFEST.in: %r\n"
"'recursive-exclude' expects <dir> <pattern1> "
"<pattern2> ..." % line)
continue
dirname = dirname.rstrip(os.path.sep)
for pattern in patterns.split():
if pattern.startswith('*'):
ignore.append(dirname + os.path.sep + pattern)
else:
ignore.append(dirname + os.path.sep + pattern)
ignore.append(
dirname + os.path.sep + '*' + os.path.sep + pattern)
elif cmd == 'prune':
rest = rest.rstrip('/\\')
ignore.append(rest)
ignore.append(rest + os.path.sep + '*')
return ignore, ignore_regexps
|
[
"def",
"_get_ignore_from_manifest_lines",
"(",
"lines",
")",
":",
"ignore",
"=",
"[",
"]",
"ignore_regexps",
"=",
"[",
"]",
"for",
"line",
"in",
"lines",
":",
"try",
":",
"cmd",
",",
"rest",
"=",
"line",
".",
"split",
"(",
"None",
",",
"1",
")",
"except",
"ValueError",
":",
"# no whitespace, so not interesting",
"continue",
"for",
"part",
"in",
"rest",
".",
"split",
"(",
")",
":",
"# distutils enforces these warnings on Windows only",
"if",
"part",
".",
"startswith",
"(",
"'/'",
")",
":",
"warning",
"(",
"\"ERROR: Leading slashes are not allowed in MANIFEST.in on Windows: %s\"",
"%",
"part",
")",
"if",
"part",
".",
"endswith",
"(",
"'/'",
")",
":",
"warning",
"(",
"\"ERROR: Trailing slashes are not allowed in MANIFEST.in on Windows: %s\"",
"%",
"part",
")",
"if",
"cmd",
"==",
"'exclude'",
":",
"# An exclude of 'dirname/*css' can match 'dirname/foo.css'",
"# but not 'dirname/subdir/bar.css'. We need a regular",
"# expression for that, since fnmatch doesn't pay attention to",
"# directory separators.",
"for",
"pat",
"in",
"rest",
".",
"split",
"(",
")",
":",
"if",
"'*'",
"in",
"pat",
"or",
"'?'",
"in",
"pat",
"or",
"'[!'",
"in",
"pat",
":",
"ignore_regexps",
".",
"append",
"(",
"_glob_to_regexp",
"(",
"pat",
")",
")",
"else",
":",
"# No need for special handling.",
"ignore",
".",
"append",
"(",
"pat",
")",
"elif",
"cmd",
"==",
"'global-exclude'",
":",
"ignore",
".",
"extend",
"(",
"rest",
".",
"split",
"(",
")",
")",
"elif",
"cmd",
"==",
"'recursive-exclude'",
":",
"try",
":",
"dirname",
",",
"patterns",
"=",
"rest",
".",
"split",
"(",
"None",
",",
"1",
")",
"except",
"ValueError",
":",
"# Wrong MANIFEST.in line.",
"warning",
"(",
"\"You have a wrong line in MANIFEST.in: %r\\n\"",
"\"'recursive-exclude' expects <dir> <pattern1> \"",
"\"<pattern2> ...\"",
"%",
"line",
")",
"continue",
"# Strip path separator for clarity.",
"dirname",
"=",
"dirname",
".",
"rstrip",
"(",
"os",
".",
"path",
".",
"sep",
")",
"for",
"pattern",
"in",
"patterns",
".",
"split",
"(",
")",
":",
"if",
"pattern",
".",
"startswith",
"(",
"'*'",
")",
":",
"ignore",
".",
"append",
"(",
"dirname",
"+",
"os",
".",
"path",
".",
"sep",
"+",
"pattern",
")",
"else",
":",
"# 'recursive-exclude plone metadata.xml' should",
"# exclude plone/metadata.xml and",
"# plone/*/metadata.xml, where * can be any number",
"# of sub directories. We could use a regexp, but",
"# two ignores seems easier.",
"ignore",
".",
"append",
"(",
"dirname",
"+",
"os",
".",
"path",
".",
"sep",
"+",
"pattern",
")",
"ignore",
".",
"append",
"(",
"dirname",
"+",
"os",
".",
"path",
".",
"sep",
"+",
"'*'",
"+",
"os",
".",
"path",
".",
"sep",
"+",
"pattern",
")",
"elif",
"cmd",
"==",
"'prune'",
":",
"# rest is considered to be a directory name. It should",
"# not contain a path separator, as it actually has no",
"# effect in that case, but that could differ per python",
"# version. We strip it here to avoid double separators.",
"# XXX: mg: I'm not 100% sure the above is correct, AFAICS",
"# all pythons from 2.6 complain if the path has a leading or",
"# trailing slash -- on Windows, that is.",
"rest",
"=",
"rest",
".",
"rstrip",
"(",
"'/\\\\'",
")",
"ignore",
".",
"append",
"(",
"rest",
")",
"ignore",
".",
"append",
"(",
"rest",
"+",
"os",
".",
"path",
".",
"sep",
"+",
"'*'",
")",
"return",
"ignore",
",",
"ignore_regexps"
] |
Gather the various ignore patterns from a MANIFEST.in.
'lines' should be a list of strings with comments removed
and continuation lines joined.
Returns a list of standard ignore patterns and a list of regular
expressions to ignore.
|
[
"Gather",
"the",
"various",
"ignore",
"patterns",
"from",
"a",
"MANIFEST",
".",
"in",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L701-L771
|
mgedmin/check-manifest
|
check_manifest.py
|
file_matches
|
def file_matches(filename, patterns):
"""Does this filename match any of the patterns?"""
return any(fnmatch.fnmatch(filename, pat)
or fnmatch.fnmatch(os.path.basename(filename), pat)
for pat in patterns)
|
python
|
def file_matches(filename, patterns):
return any(fnmatch.fnmatch(filename, pat)
or fnmatch.fnmatch(os.path.basename(filename), pat)
for pat in patterns)
|
[
"def",
"file_matches",
"(",
"filename",
",",
"patterns",
")",
":",
"return",
"any",
"(",
"fnmatch",
".",
"fnmatch",
"(",
"filename",
",",
"pat",
")",
"or",
"fnmatch",
".",
"fnmatch",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
",",
"pat",
")",
"for",
"pat",
"in",
"patterns",
")"
] |
Does this filename match any of the patterns?
|
[
"Does",
"this",
"filename",
"match",
"any",
"of",
"the",
"patterns?"
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L774-L778
|
mgedmin/check-manifest
|
check_manifest.py
|
file_matches_regexps
|
def file_matches_regexps(filename, patterns):
"""Does this filename match any of the regular expressions?"""
return any(re.match(pat, filename) for pat in patterns)
|
python
|
def file_matches_regexps(filename, patterns):
return any(re.match(pat, filename) for pat in patterns)
|
[
"def",
"file_matches_regexps",
"(",
"filename",
",",
"patterns",
")",
":",
"return",
"any",
"(",
"re",
".",
"match",
"(",
"pat",
",",
"filename",
")",
"for",
"pat",
"in",
"patterns",
")"
] |
Does this filename match any of the regular expressions?
|
[
"Does",
"this",
"filename",
"match",
"any",
"of",
"the",
"regular",
"expressions?"
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L781-L783
|
mgedmin/check-manifest
|
check_manifest.py
|
strip_sdist_extras
|
def strip_sdist_extras(filelist):
"""Strip generated files that are only present in source distributions.
We also strip files that are ignored for other reasons, like
command line arguments, setup.cfg rules or MANIFEST.in rules.
"""
return [name for name in filelist
if not file_matches(name, IGNORE)
and not file_matches_regexps(name, IGNORE_REGEXPS)]
|
python
|
def strip_sdist_extras(filelist):
return [name for name in filelist
if not file_matches(name, IGNORE)
and not file_matches_regexps(name, IGNORE_REGEXPS)]
|
[
"def",
"strip_sdist_extras",
"(",
"filelist",
")",
":",
"return",
"[",
"name",
"for",
"name",
"in",
"filelist",
"if",
"not",
"file_matches",
"(",
"name",
",",
"IGNORE",
")",
"and",
"not",
"file_matches_regexps",
"(",
"name",
",",
"IGNORE_REGEXPS",
")",
"]"
] |
Strip generated files that are only present in source distributions.
We also strip files that are ignored for other reasons, like
command line arguments, setup.cfg rules or MANIFEST.in rules.
|
[
"Strip",
"generated",
"files",
"that",
"are",
"only",
"present",
"in",
"source",
"distributions",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L786-L794
|
mgedmin/check-manifest
|
check_manifest.py
|
find_suggestions
|
def find_suggestions(filelist):
"""Suggest MANIFEST.in patterns for missing files."""
suggestions = set()
unknowns = []
for filename in filelist:
if os.path.isdir(filename):
# it's impossible to add empty directories via MANIFEST.in anyway,
# and non-empty directories will be added automatically when we
# specify patterns for files inside them
continue
for pattern, suggestion in SUGGESTIONS:
m = pattern.match(filename)
if m is not None:
suggestions.add(pattern.sub(suggestion, filename))
break
else:
unknowns.append(filename)
return sorted(suggestions), unknowns
|
python
|
def find_suggestions(filelist):
suggestions = set()
unknowns = []
for filename in filelist:
if os.path.isdir(filename):
continue
for pattern, suggestion in SUGGESTIONS:
m = pattern.match(filename)
if m is not None:
suggestions.add(pattern.sub(suggestion, filename))
break
else:
unknowns.append(filename)
return sorted(suggestions), unknowns
|
[
"def",
"find_suggestions",
"(",
"filelist",
")",
":",
"suggestions",
"=",
"set",
"(",
")",
"unknowns",
"=",
"[",
"]",
"for",
"filename",
"in",
"filelist",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"filename",
")",
":",
"# it's impossible to add empty directories via MANIFEST.in anyway,",
"# and non-empty directories will be added automatically when we",
"# specify patterns for files inside them",
"continue",
"for",
"pattern",
",",
"suggestion",
"in",
"SUGGESTIONS",
":",
"m",
"=",
"pattern",
".",
"match",
"(",
"filename",
")",
"if",
"m",
"is",
"not",
"None",
":",
"suggestions",
".",
"add",
"(",
"pattern",
".",
"sub",
"(",
"suggestion",
",",
"filename",
")",
")",
"break",
"else",
":",
"unknowns",
".",
"append",
"(",
"filename",
")",
"return",
"sorted",
"(",
"suggestions",
")",
",",
"unknowns"
] |
Suggest MANIFEST.in patterns for missing files.
|
[
"Suggest",
"MANIFEST",
".",
"in",
"patterns",
"for",
"missing",
"files",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L803-L820
|
mgedmin/check-manifest
|
check_manifest.py
|
extract_version_from_filename
|
def extract_version_from_filename(filename):
"""Extract version number from sdist filename."""
filename = os.path.splitext(os.path.basename(filename))[0]
if filename.endswith('.tar'):
filename = os.path.splitext(filename)[0]
return filename.partition('-')[2]
|
python
|
def extract_version_from_filename(filename):
filename = os.path.splitext(os.path.basename(filename))[0]
if filename.endswith('.tar'):
filename = os.path.splitext(filename)[0]
return filename.partition('-')[2]
|
[
"def",
"extract_version_from_filename",
"(",
"filename",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
")",
"[",
"0",
"]",
"if",
"filename",
".",
"endswith",
"(",
"'.tar'",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"[",
"0",
"]",
"return",
"filename",
".",
"partition",
"(",
"'-'",
")",
"[",
"2",
"]"
] |
Extract version number from sdist filename.
|
[
"Extract",
"version",
"number",
"from",
"sdist",
"filename",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L832-L837
|
mgedmin/check-manifest
|
check_manifest.py
|
check_manifest
|
def check_manifest(source_tree='.', create=False, update=False,
python=sys.executable):
"""Compare a generated source distribution with list of files in a VCS.
Returns True if the manifest is fine.
"""
all_ok = True
if os.path.sep in python:
python = os.path.abspath(python)
with cd(source_tree):
if not is_package():
raise Failure('This is not a Python project (no setup.py).')
read_config()
read_manifest()
info_begin("listing source files under version control")
all_source_files = sorted(get_vcs_files())
source_files = strip_sdist_extras(all_source_files)
info_continue(": %d files and directories" % len(source_files))
if not all_source_files:
raise Failure('There are no files added to version control!')
info_begin("building an sdist")
with mkdtemp('-sdist') as tempdir:
run([python, 'setup.py', 'sdist', '-d', tempdir])
sdist_filename = get_one_file_in(tempdir)
info_continue(": %s" % os.path.basename(sdist_filename))
sdist_files = sorted(normalize_names(strip_sdist_extras(
strip_toplevel_name(get_archive_file_list(sdist_filename)))))
info_continue(": %d files and directories" % len(sdist_files))
version = extract_version_from_filename(sdist_filename)
existing_source_files = list(filter(os.path.exists, all_source_files))
missing_source_files = sorted(set(all_source_files) - set(existing_source_files))
if missing_source_files:
warning("some files listed as being under source control are missing:\n%s"
% format_list(missing_source_files))
info_begin("copying source files to a temporary directory")
with mkdtemp('-sources') as tempsourcedir:
copy_files(existing_source_files, tempsourcedir)
if os.path.exists('MANIFEST.in') and 'MANIFEST.in' not in source_files:
# See https://github.com/mgedmin/check-manifest/issues/7
# if do this, we will emit a warning about MANIFEST.in not
# being in source control, if we don't do this, the user
# gets confused about their new manifest rules being
# ignored.
copy_files(['MANIFEST.in'], tempsourcedir)
if 'setup.py' not in source_files:
# See https://github.com/mgedmin/check-manifest/issues/46
# if do this, we will emit a warning about setup.py not
# being in source control, if we don't do this, the user
# gets a scary error
copy_files(['setup.py'], tempsourcedir)
info_begin("building a clean sdist")
with cd(tempsourcedir):
with mkdtemp('-sdist') as tempdir:
os.environ['SETUPTOOLS_SCM_PRETEND_VERSION'] = version
run([python, 'setup.py', 'sdist', '-d', tempdir])
sdist_filename = get_one_file_in(tempdir)
info_continue(": %s" % os.path.basename(sdist_filename))
clean_sdist_files = sorted(normalize_names(strip_sdist_extras(
strip_toplevel_name(get_archive_file_list(sdist_filename)))))
info_continue(": %d files and directories" % len(clean_sdist_files))
missing_from_manifest = set(source_files) - set(clean_sdist_files)
missing_from_VCS = set(sdist_files + clean_sdist_files) - set(source_files)
if not missing_from_manifest and not missing_from_VCS:
info("lists of files in version control and sdist match")
else:
error("lists of files in version control and sdist do not match!\n%s"
% format_missing(missing_from_VCS, missing_from_manifest,
"VCS", "sdist"))
suggestions, unknowns = find_suggestions(missing_from_manifest)
user_asked_for_help = update or (create and not
os.path.exists('MANIFEST.in'))
if 'MANIFEST.in' not in existing_source_files:
if suggestions and not user_asked_for_help:
info("no MANIFEST.in found; you can run 'check-manifest -c' to create one")
else:
info("no MANIFEST.in found")
if suggestions:
info("suggested MANIFEST.in rules:\n%s"
% format_list(suggestions))
if user_asked_for_help:
existed = os.path.exists('MANIFEST.in')
with open('MANIFEST.in', 'a') as f:
if not existed:
info("creating MANIFEST.in")
else:
info("updating MANIFEST.in")
f.write('\n# added by check_manifest.py\n')
f.write('\n'.join(suggestions) + '\n')
if unknowns:
info("don't know how to come up with rules matching\n%s"
% format_list(unknowns))
elif user_asked_for_help:
info("don't know how to come up with rules"
" matching any of the files, sorry!")
all_ok = False
bad_ideas = find_bad_ideas(all_source_files)
filtered_bad_ideas = [bad_idea for bad_idea in bad_ideas
if not file_matches(bad_idea, IGNORE_BAD_IDEAS)]
if filtered_bad_ideas:
warning("you have %s in source control!\nthat's a bad idea:"
" auto-generated files should not be versioned"
% filtered_bad_ideas[0])
if len(filtered_bad_ideas) > 1:
warning("this also applies to the following:\n%s"
% format_list(filtered_bad_ideas[1:]))
all_ok = False
return all_ok
|
python
|
def check_manifest(source_tree='.', create=False, update=False,
python=sys.executable):
all_ok = True
if os.path.sep in python:
python = os.path.abspath(python)
with cd(source_tree):
if not is_package():
raise Failure('This is not a Python project (no setup.py).')
read_config()
read_manifest()
info_begin("listing source files under version control")
all_source_files = sorted(get_vcs_files())
source_files = strip_sdist_extras(all_source_files)
info_continue(": %d files and directories" % len(source_files))
if not all_source_files:
raise Failure('There are no files added to version control!')
info_begin("building an sdist")
with mkdtemp('-sdist') as tempdir:
run([python, 'setup.py', 'sdist', '-d', tempdir])
sdist_filename = get_one_file_in(tempdir)
info_continue(": %s" % os.path.basename(sdist_filename))
sdist_files = sorted(normalize_names(strip_sdist_extras(
strip_toplevel_name(get_archive_file_list(sdist_filename)))))
info_continue(": %d files and directories" % len(sdist_files))
version = extract_version_from_filename(sdist_filename)
existing_source_files = list(filter(os.path.exists, all_source_files))
missing_source_files = sorted(set(all_source_files) - set(existing_source_files))
if missing_source_files:
warning("some files listed as being under source control are missing:\n%s"
% format_list(missing_source_files))
info_begin("copying source files to a temporary directory")
with mkdtemp('-sources') as tempsourcedir:
copy_files(existing_source_files, tempsourcedir)
if os.path.exists('MANIFEST.in') and 'MANIFEST.in' not in source_files:
copy_files(['MANIFEST.in'], tempsourcedir)
if 'setup.py' not in source_files:
copy_files(['setup.py'], tempsourcedir)
info_begin("building a clean sdist")
with cd(tempsourcedir):
with mkdtemp('-sdist') as tempdir:
os.environ['SETUPTOOLS_SCM_PRETEND_VERSION'] = version
run([python, 'setup.py', 'sdist', '-d', tempdir])
sdist_filename = get_one_file_in(tempdir)
info_continue(": %s" % os.path.basename(sdist_filename))
clean_sdist_files = sorted(normalize_names(strip_sdist_extras(
strip_toplevel_name(get_archive_file_list(sdist_filename)))))
info_continue(": %d files and directories" % len(clean_sdist_files))
missing_from_manifest = set(source_files) - set(clean_sdist_files)
missing_from_VCS = set(sdist_files + clean_sdist_files) - set(source_files)
if not missing_from_manifest and not missing_from_VCS:
info("lists of files in version control and sdist match")
else:
error("lists of files in version control and sdist do not match!\n%s"
% format_missing(missing_from_VCS, missing_from_manifest,
"VCS", "sdist"))
suggestions, unknowns = find_suggestions(missing_from_manifest)
user_asked_for_help = update or (create and not
os.path.exists('MANIFEST.in'))
if 'MANIFEST.in' not in existing_source_files:
if suggestions and not user_asked_for_help:
info("no MANIFEST.in found; you can run 'check-manifest -c' to create one")
else:
info("no MANIFEST.in found")
if suggestions:
info("suggested MANIFEST.in rules:\n%s"
% format_list(suggestions))
if user_asked_for_help:
existed = os.path.exists('MANIFEST.in')
with open('MANIFEST.in', 'a') as f:
if not existed:
info("creating MANIFEST.in")
else:
info("updating MANIFEST.in")
f.write('\n
f.write('\n'.join(suggestions) + '\n')
if unknowns:
info("don't know how to come up with rules matching\n%s"
% format_list(unknowns))
elif user_asked_for_help:
info("don't know how to come up with rules"
" matching any of the files, sorry!")
all_ok = False
bad_ideas = find_bad_ideas(all_source_files)
filtered_bad_ideas = [bad_idea for bad_idea in bad_ideas
if not file_matches(bad_idea, IGNORE_BAD_IDEAS)]
if filtered_bad_ideas:
warning("you have %s in source control!\nthat's a bad idea:"
" auto-generated files should not be versioned"
% filtered_bad_ideas[0])
if len(filtered_bad_ideas) > 1:
warning("this also applies to the following:\n%s"
% format_list(filtered_bad_ideas[1:]))
all_ok = False
return all_ok
|
[
"def",
"check_manifest",
"(",
"source_tree",
"=",
"'.'",
",",
"create",
"=",
"False",
",",
"update",
"=",
"False",
",",
"python",
"=",
"sys",
".",
"executable",
")",
":",
"all_ok",
"=",
"True",
"if",
"os",
".",
"path",
".",
"sep",
"in",
"python",
":",
"python",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"python",
")",
"with",
"cd",
"(",
"source_tree",
")",
":",
"if",
"not",
"is_package",
"(",
")",
":",
"raise",
"Failure",
"(",
"'This is not a Python project (no setup.py).'",
")",
"read_config",
"(",
")",
"read_manifest",
"(",
")",
"info_begin",
"(",
"\"listing source files under version control\"",
")",
"all_source_files",
"=",
"sorted",
"(",
"get_vcs_files",
"(",
")",
")",
"source_files",
"=",
"strip_sdist_extras",
"(",
"all_source_files",
")",
"info_continue",
"(",
"\": %d files and directories\"",
"%",
"len",
"(",
"source_files",
")",
")",
"if",
"not",
"all_source_files",
":",
"raise",
"Failure",
"(",
"'There are no files added to version control!'",
")",
"info_begin",
"(",
"\"building an sdist\"",
")",
"with",
"mkdtemp",
"(",
"'-sdist'",
")",
"as",
"tempdir",
":",
"run",
"(",
"[",
"python",
",",
"'setup.py'",
",",
"'sdist'",
",",
"'-d'",
",",
"tempdir",
"]",
")",
"sdist_filename",
"=",
"get_one_file_in",
"(",
"tempdir",
")",
"info_continue",
"(",
"\": %s\"",
"%",
"os",
".",
"path",
".",
"basename",
"(",
"sdist_filename",
")",
")",
"sdist_files",
"=",
"sorted",
"(",
"normalize_names",
"(",
"strip_sdist_extras",
"(",
"strip_toplevel_name",
"(",
"get_archive_file_list",
"(",
"sdist_filename",
")",
")",
")",
")",
")",
"info_continue",
"(",
"\": %d files and directories\"",
"%",
"len",
"(",
"sdist_files",
")",
")",
"version",
"=",
"extract_version_from_filename",
"(",
"sdist_filename",
")",
"existing_source_files",
"=",
"list",
"(",
"filter",
"(",
"os",
".",
"path",
".",
"exists",
",",
"all_source_files",
")",
")",
"missing_source_files",
"=",
"sorted",
"(",
"set",
"(",
"all_source_files",
")",
"-",
"set",
"(",
"existing_source_files",
")",
")",
"if",
"missing_source_files",
":",
"warning",
"(",
"\"some files listed as being under source control are missing:\\n%s\"",
"%",
"format_list",
"(",
"missing_source_files",
")",
")",
"info_begin",
"(",
"\"copying source files to a temporary directory\"",
")",
"with",
"mkdtemp",
"(",
"'-sources'",
")",
"as",
"tempsourcedir",
":",
"copy_files",
"(",
"existing_source_files",
",",
"tempsourcedir",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"'MANIFEST.in'",
")",
"and",
"'MANIFEST.in'",
"not",
"in",
"source_files",
":",
"# See https://github.com/mgedmin/check-manifest/issues/7",
"# if do this, we will emit a warning about MANIFEST.in not",
"# being in source control, if we don't do this, the user",
"# gets confused about their new manifest rules being",
"# ignored.",
"copy_files",
"(",
"[",
"'MANIFEST.in'",
"]",
",",
"tempsourcedir",
")",
"if",
"'setup.py'",
"not",
"in",
"source_files",
":",
"# See https://github.com/mgedmin/check-manifest/issues/46",
"# if do this, we will emit a warning about setup.py not",
"# being in source control, if we don't do this, the user",
"# gets a scary error",
"copy_files",
"(",
"[",
"'setup.py'",
"]",
",",
"tempsourcedir",
")",
"info_begin",
"(",
"\"building a clean sdist\"",
")",
"with",
"cd",
"(",
"tempsourcedir",
")",
":",
"with",
"mkdtemp",
"(",
"'-sdist'",
")",
"as",
"tempdir",
":",
"os",
".",
"environ",
"[",
"'SETUPTOOLS_SCM_PRETEND_VERSION'",
"]",
"=",
"version",
"run",
"(",
"[",
"python",
",",
"'setup.py'",
",",
"'sdist'",
",",
"'-d'",
",",
"tempdir",
"]",
")",
"sdist_filename",
"=",
"get_one_file_in",
"(",
"tempdir",
")",
"info_continue",
"(",
"\": %s\"",
"%",
"os",
".",
"path",
".",
"basename",
"(",
"sdist_filename",
")",
")",
"clean_sdist_files",
"=",
"sorted",
"(",
"normalize_names",
"(",
"strip_sdist_extras",
"(",
"strip_toplevel_name",
"(",
"get_archive_file_list",
"(",
"sdist_filename",
")",
")",
")",
")",
")",
"info_continue",
"(",
"\": %d files and directories\"",
"%",
"len",
"(",
"clean_sdist_files",
")",
")",
"missing_from_manifest",
"=",
"set",
"(",
"source_files",
")",
"-",
"set",
"(",
"clean_sdist_files",
")",
"missing_from_VCS",
"=",
"set",
"(",
"sdist_files",
"+",
"clean_sdist_files",
")",
"-",
"set",
"(",
"source_files",
")",
"if",
"not",
"missing_from_manifest",
"and",
"not",
"missing_from_VCS",
":",
"info",
"(",
"\"lists of files in version control and sdist match\"",
")",
"else",
":",
"error",
"(",
"\"lists of files in version control and sdist do not match!\\n%s\"",
"%",
"format_missing",
"(",
"missing_from_VCS",
",",
"missing_from_manifest",
",",
"\"VCS\"",
",",
"\"sdist\"",
")",
")",
"suggestions",
",",
"unknowns",
"=",
"find_suggestions",
"(",
"missing_from_manifest",
")",
"user_asked_for_help",
"=",
"update",
"or",
"(",
"create",
"and",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"'MANIFEST.in'",
")",
")",
"if",
"'MANIFEST.in'",
"not",
"in",
"existing_source_files",
":",
"if",
"suggestions",
"and",
"not",
"user_asked_for_help",
":",
"info",
"(",
"\"no MANIFEST.in found; you can run 'check-manifest -c' to create one\"",
")",
"else",
":",
"info",
"(",
"\"no MANIFEST.in found\"",
")",
"if",
"suggestions",
":",
"info",
"(",
"\"suggested MANIFEST.in rules:\\n%s\"",
"%",
"format_list",
"(",
"suggestions",
")",
")",
"if",
"user_asked_for_help",
":",
"existed",
"=",
"os",
".",
"path",
".",
"exists",
"(",
"'MANIFEST.in'",
")",
"with",
"open",
"(",
"'MANIFEST.in'",
",",
"'a'",
")",
"as",
"f",
":",
"if",
"not",
"existed",
":",
"info",
"(",
"\"creating MANIFEST.in\"",
")",
"else",
":",
"info",
"(",
"\"updating MANIFEST.in\"",
")",
"f",
".",
"write",
"(",
"'\\n# added by check_manifest.py\\n'",
")",
"f",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"suggestions",
")",
"+",
"'\\n'",
")",
"if",
"unknowns",
":",
"info",
"(",
"\"don't know how to come up with rules matching\\n%s\"",
"%",
"format_list",
"(",
"unknowns",
")",
")",
"elif",
"user_asked_for_help",
":",
"info",
"(",
"\"don't know how to come up with rules\"",
"\" matching any of the files, sorry!\"",
")",
"all_ok",
"=",
"False",
"bad_ideas",
"=",
"find_bad_ideas",
"(",
"all_source_files",
")",
"filtered_bad_ideas",
"=",
"[",
"bad_idea",
"for",
"bad_idea",
"in",
"bad_ideas",
"if",
"not",
"file_matches",
"(",
"bad_idea",
",",
"IGNORE_BAD_IDEAS",
")",
"]",
"if",
"filtered_bad_ideas",
":",
"warning",
"(",
"\"you have %s in source control!\\nthat's a bad idea:\"",
"\" auto-generated files should not be versioned\"",
"%",
"filtered_bad_ideas",
"[",
"0",
"]",
")",
"if",
"len",
"(",
"filtered_bad_ideas",
")",
">",
"1",
":",
"warning",
"(",
"\"this also applies to the following:\\n%s\"",
"%",
"format_list",
"(",
"filtered_bad_ideas",
"[",
"1",
":",
"]",
")",
")",
"all_ok",
"=",
"False",
"return",
"all_ok"
] |
Compare a generated source distribution with list of files in a VCS.
Returns True if the manifest is fine.
|
[
"Compare",
"a",
"generated",
"source",
"distribution",
"with",
"list",
"of",
"files",
"in",
"a",
"VCS",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L840-L946
|
mgedmin/check-manifest
|
check_manifest.py
|
zest_releaser_check
|
def zest_releaser_check(data):
"""Check the completeness of MANIFEST.in before the release.
This is an entry point for zest.releaser. See the documentation at
https://zestreleaser.readthedocs.io/en/latest/entrypoints.html
"""
from zest.releaser.utils import ask
source_tree = data['workingdir']
if not is_package(source_tree):
# You can use zest.releaser on things that are not Python packages.
# It's pointless to run check-manifest in those circumstances.
# See https://github.com/mgedmin/check-manifest/issues/9 for details.
return
if not ask("Do you want to run check-manifest?"):
return
try:
if not check_manifest(source_tree):
if not ask("MANIFEST.in has problems. "
" Do you want to continue despite that?", default=False):
sys.exit(1)
except Failure as e:
error(str(e))
if not ask("Something bad happened. "
" Do you want to continue despite that?", default=False):
sys.exit(2)
|
python
|
def zest_releaser_check(data):
from zest.releaser.utils import ask
source_tree = data['workingdir']
if not is_package(source_tree):
return
if not ask("Do you want to run check-manifest?"):
return
try:
if not check_manifest(source_tree):
if not ask("MANIFEST.in has problems. "
" Do you want to continue despite that?", default=False):
sys.exit(1)
except Failure as e:
error(str(e))
if not ask("Something bad happened. "
" Do you want to continue despite that?", default=False):
sys.exit(2)
|
[
"def",
"zest_releaser_check",
"(",
"data",
")",
":",
"from",
"zest",
".",
"releaser",
".",
"utils",
"import",
"ask",
"source_tree",
"=",
"data",
"[",
"'workingdir'",
"]",
"if",
"not",
"is_package",
"(",
"source_tree",
")",
":",
"# You can use zest.releaser on things that are not Python packages.",
"# It's pointless to run check-manifest in those circumstances.",
"# See https://github.com/mgedmin/check-manifest/issues/9 for details.",
"return",
"if",
"not",
"ask",
"(",
"\"Do you want to run check-manifest?\"",
")",
":",
"return",
"try",
":",
"if",
"not",
"check_manifest",
"(",
"source_tree",
")",
":",
"if",
"not",
"ask",
"(",
"\"MANIFEST.in has problems. \"",
"\" Do you want to continue despite that?\"",
",",
"default",
"=",
"False",
")",
":",
"sys",
".",
"exit",
"(",
"1",
")",
"except",
"Failure",
"as",
"e",
":",
"error",
"(",
"str",
"(",
"e",
")",
")",
"if",
"not",
"ask",
"(",
"\"Something bad happened. \"",
"\" Do you want to continue despite that?\"",
",",
"default",
"=",
"False",
")",
":",
"sys",
".",
"exit",
"(",
"2",
")"
] |
Check the completeness of MANIFEST.in before the release.
This is an entry point for zest.releaser. See the documentation at
https://zestreleaser.readthedocs.io/en/latest/entrypoints.html
|
[
"Check",
"the",
"completeness",
"of",
"MANIFEST",
".",
"in",
"before",
"the",
"release",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L1000-L1024
|
mgedmin/check-manifest
|
check_manifest.py
|
Git.get_versioned_files
|
def get_versioned_files(cls):
"""List all files versioned by git in the current directory."""
files = cls._git_ls_files()
submodules = cls._list_submodules()
for subdir in submodules:
subdir = os.path.relpath(subdir).replace(os.path.sep, '/')
files += add_prefix_to_each(subdir, cls._git_ls_files(subdir))
return add_directories(files)
|
python
|
def get_versioned_files(cls):
files = cls._git_ls_files()
submodules = cls._list_submodules()
for subdir in submodules:
subdir = os.path.relpath(subdir).replace(os.path.sep, '/')
files += add_prefix_to_each(subdir, cls._git_ls_files(subdir))
return add_directories(files)
|
[
"def",
"get_versioned_files",
"(",
"cls",
")",
":",
"files",
"=",
"cls",
".",
"_git_ls_files",
"(",
")",
"submodules",
"=",
"cls",
".",
"_list_submodules",
"(",
")",
"for",
"subdir",
"in",
"submodules",
":",
"subdir",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"subdir",
")",
".",
"replace",
"(",
"os",
".",
"path",
".",
"sep",
",",
"'/'",
")",
"files",
"+=",
"add_prefix_to_each",
"(",
"subdir",
",",
"cls",
".",
"_git_ls_files",
"(",
"subdir",
")",
")",
"return",
"add_directories",
"(",
"files",
")"
] |
List all files versioned by git in the current directory.
|
[
"List",
"all",
"files",
"versioned",
"by",
"git",
"in",
"the",
"current",
"directory",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L337-L344
|
mgedmin/check-manifest
|
check_manifest.py
|
Bazaar.get_versioned_files
|
def get_versioned_files(cls):
"""List all files versioned in Bazaar in the current directory."""
encoding = cls._get_terminal_encoding()
output = run(['bzr', 'ls', '-VR'], encoding=encoding)
return output.splitlines()
|
python
|
def get_versioned_files(cls):
encoding = cls._get_terminal_encoding()
output = run(['bzr', 'ls', '-VR'], encoding=encoding)
return output.splitlines()
|
[
"def",
"get_versioned_files",
"(",
"cls",
")",
":",
"encoding",
"=",
"cls",
".",
"_get_terminal_encoding",
"(",
")",
"output",
"=",
"run",
"(",
"[",
"'bzr'",
",",
"'ls'",
",",
"'-VR'",
"]",
",",
"encoding",
"=",
"encoding",
")",
"return",
"output",
".",
"splitlines",
"(",
")"
] |
List all files versioned in Bazaar in the current directory.
|
[
"List",
"all",
"files",
"versioned",
"in",
"Bazaar",
"in",
"the",
"current",
"directory",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L404-L408
|
mgedmin/check-manifest
|
check_manifest.py
|
Subversion.get_versioned_files
|
def get_versioned_files(cls):
"""List all files under SVN control in the current directory."""
output = run(['svn', 'st', '-vq', '--xml'], decode=False)
tree = ET.XML(output)
return sorted(entry.get('path') for entry in tree.findall('.//entry')
if cls.is_interesting(entry))
|
python
|
def get_versioned_files(cls):
output = run(['svn', 'st', '-vq', '--xml'], decode=False)
tree = ET.XML(output)
return sorted(entry.get('path') for entry in tree.findall('.//entry')
if cls.is_interesting(entry))
|
[
"def",
"get_versioned_files",
"(",
"cls",
")",
":",
"output",
"=",
"run",
"(",
"[",
"'svn'",
",",
"'st'",
",",
"'-vq'",
",",
"'--xml'",
"]",
",",
"decode",
"=",
"False",
")",
"tree",
"=",
"ET",
".",
"XML",
"(",
"output",
")",
"return",
"sorted",
"(",
"entry",
".",
"get",
"(",
"'path'",
")",
"for",
"entry",
"in",
"tree",
".",
"findall",
"(",
"'.//entry'",
")",
"if",
"cls",
".",
"is_interesting",
"(",
"entry",
")",
")"
] |
List all files under SVN control in the current directory.
|
[
"List",
"all",
"files",
"under",
"SVN",
"control",
"in",
"the",
"current",
"directory",
"."
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L415-L420
|
mgedmin/check-manifest
|
check_manifest.py
|
Subversion.is_interesting
|
def is_interesting(entry):
"""Is this entry interesting?
``entry`` is an XML node representing one entry of the svn status
XML output. It looks like this::
<entry path="unchanged.txt">
<wc-status item="normal" revision="1" props="none">
<commit revision="1">
<author>mg</author>
<date>2015-02-06T07:52:38.163516Z</date>
</commit>
</wc-status>
</entry>
<entry path="added-but-not-committed.txt">
<wc-status item="added" revision="-1" props="none"></wc-status>
</entry>
<entry path="ext">
<wc-status item="external" props="none"></wc-status>
</entry>
<entry path="unknown.txt">
<wc-status props="none" item="unversioned"></wc-status>
</entry>
"""
if entry.get('path') == '.':
return False
status = entry.find('wc-status')
if status is None:
warning('svn status --xml parse error: <entry path="%s"> without'
' <wc-status>' % entry.get('path'))
return False
# For SVN externals we get two entries: one mentioning the
# existence of the external, and one about the status of the external.
if status.get('item') in ('unversioned', 'external'):
return False
return True
|
python
|
def is_interesting(entry):
if entry.get('path') == '.':
return False
status = entry.find('wc-status')
if status is None:
warning('svn status --xml parse error: <entry path="%s"> without'
' <wc-status>' % entry.get('path'))
return False
if status.get('item') in ('unversioned', 'external'):
return False
return True
|
[
"def",
"is_interesting",
"(",
"entry",
")",
":",
"if",
"entry",
".",
"get",
"(",
"'path'",
")",
"==",
"'.'",
":",
"return",
"False",
"status",
"=",
"entry",
".",
"find",
"(",
"'wc-status'",
")",
"if",
"status",
"is",
"None",
":",
"warning",
"(",
"'svn status --xml parse error: <entry path=\"%s\"> without'",
"' <wc-status>'",
"%",
"entry",
".",
"get",
"(",
"'path'",
")",
")",
"return",
"False",
"# For SVN externals we get two entries: one mentioning the",
"# existence of the external, and one about the status of the external.",
"if",
"status",
".",
"get",
"(",
"'item'",
")",
"in",
"(",
"'unversioned'",
",",
"'external'",
")",
":",
"return",
"False",
"return",
"True"
] |
Is this entry interesting?
``entry`` is an XML node representing one entry of the svn status
XML output. It looks like this::
<entry path="unchanged.txt">
<wc-status item="normal" revision="1" props="none">
<commit revision="1">
<author>mg</author>
<date>2015-02-06T07:52:38.163516Z</date>
</commit>
</wc-status>
</entry>
<entry path="added-but-not-committed.txt">
<wc-status item="added" revision="-1" props="none"></wc-status>
</entry>
<entry path="ext">
<wc-status item="external" props="none"></wc-status>
</entry>
<entry path="unknown.txt">
<wc-status props="none" item="unversioned"></wc-status>
</entry>
|
[
"Is",
"this",
"entry",
"interesting?"
] |
train
|
https://github.com/mgedmin/check-manifest/blob/7f787e8272f56c5750670bfb3223509e0df72708/check_manifest.py#L423-L462
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/application_policy.py
|
ApplicationPolicy.validate
|
def validate(self):
"""
Check if the formats of principals and actions are valid.
:return: True, if the policy is valid
:raises: InvalidApplicationPolicyError
"""
if not self.principals:
raise InvalidApplicationPolicyError(error_message='principals not provided')
if not self.actions:
raise InvalidApplicationPolicyError(error_message='actions not provided')
if any(not self._PRINCIPAL_PATTERN.match(p) for p in self.principals):
raise InvalidApplicationPolicyError(
error_message='principal should be 12-digit AWS account ID or "*"')
unsupported_actions = sorted(set(self.actions) - set(self.SUPPORTED_ACTIONS))
if unsupported_actions:
raise InvalidApplicationPolicyError(
error_message='{} not supported'.format(', '.join(unsupported_actions)))
return True
|
python
|
def validate(self):
if not self.principals:
raise InvalidApplicationPolicyError(error_message='principals not provided')
if not self.actions:
raise InvalidApplicationPolicyError(error_message='actions not provided')
if any(not self._PRINCIPAL_PATTERN.match(p) for p in self.principals):
raise InvalidApplicationPolicyError(
error_message='principal should be 12-digit AWS account ID or "*"')
unsupported_actions = sorted(set(self.actions) - set(self.SUPPORTED_ACTIONS))
if unsupported_actions:
raise InvalidApplicationPolicyError(
error_message='{} not supported'.format(', '.join(unsupported_actions)))
return True
|
[
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"principals",
":",
"raise",
"InvalidApplicationPolicyError",
"(",
"error_message",
"=",
"'principals not provided'",
")",
"if",
"not",
"self",
".",
"actions",
":",
"raise",
"InvalidApplicationPolicyError",
"(",
"error_message",
"=",
"'actions not provided'",
")",
"if",
"any",
"(",
"not",
"self",
".",
"_PRINCIPAL_PATTERN",
".",
"match",
"(",
"p",
")",
"for",
"p",
"in",
"self",
".",
"principals",
")",
":",
"raise",
"InvalidApplicationPolicyError",
"(",
"error_message",
"=",
"'principal should be 12-digit AWS account ID or \"*\"'",
")",
"unsupported_actions",
"=",
"sorted",
"(",
"set",
"(",
"self",
".",
"actions",
")",
"-",
"set",
"(",
"self",
".",
"SUPPORTED_ACTIONS",
")",
")",
"if",
"unsupported_actions",
":",
"raise",
"InvalidApplicationPolicyError",
"(",
"error_message",
"=",
"'{} not supported'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"unsupported_actions",
")",
")",
")",
"return",
"True"
] |
Check if the formats of principals and actions are valid.
:return: True, if the policy is valid
:raises: InvalidApplicationPolicyError
|
[
"Check",
"if",
"the",
"formats",
"of",
"principals",
"and",
"actions",
"are",
"valid",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/application_policy.py#L44-L66
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/publish.py
|
publish_application
|
def publish_application(template, sar_client=None):
"""
Create a new application or new application version in SAR.
:param template: Content of a packaged YAML or JSON SAM template
:type template: str_or_dict
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:return: Dictionary containing application id, actions taken, and updated details
:rtype: dict
:raises ValueError
"""
if not template:
raise ValueError('Require SAM template to publish the application')
if not sar_client:
sar_client = boto3.client('serverlessrepo')
template_dict = _get_template_dict(template)
app_metadata = get_app_metadata(template_dict)
stripped_template_dict = strip_app_metadata(template_dict)
stripped_template = yaml_dump(stripped_template_dict)
try:
request = _create_application_request(app_metadata, stripped_template)
response = sar_client.create_application(**request)
application_id = response['ApplicationId']
actions = [CREATE_APPLICATION]
except ClientError as e:
if not _is_conflict_exception(e):
raise _wrap_client_error(e)
# Update the application if it already exists
error_message = e.response['Error']['Message']
application_id = parse_application_id(error_message)
try:
request = _update_application_request(app_metadata, application_id)
sar_client.update_application(**request)
actions = [UPDATE_APPLICATION]
except ClientError as e:
raise _wrap_client_error(e)
# Create application version if semantic version is specified
if app_metadata.semantic_version:
try:
request = _create_application_version_request(app_metadata, application_id, stripped_template)
sar_client.create_application_version(**request)
actions.append(CREATE_APPLICATION_VERSION)
except ClientError as e:
if not _is_conflict_exception(e):
raise _wrap_client_error(e)
return {
'application_id': application_id,
'actions': actions,
'details': _get_publish_details(actions, app_metadata.template_dict)
}
|
python
|
def publish_application(template, sar_client=None):
if not template:
raise ValueError('Require SAM template to publish the application')
if not sar_client:
sar_client = boto3.client('serverlessrepo')
template_dict = _get_template_dict(template)
app_metadata = get_app_metadata(template_dict)
stripped_template_dict = strip_app_metadata(template_dict)
stripped_template = yaml_dump(stripped_template_dict)
try:
request = _create_application_request(app_metadata, stripped_template)
response = sar_client.create_application(**request)
application_id = response['ApplicationId']
actions = [CREATE_APPLICATION]
except ClientError as e:
if not _is_conflict_exception(e):
raise _wrap_client_error(e)
error_message = e.response['Error']['Message']
application_id = parse_application_id(error_message)
try:
request = _update_application_request(app_metadata, application_id)
sar_client.update_application(**request)
actions = [UPDATE_APPLICATION]
except ClientError as e:
raise _wrap_client_error(e)
if app_metadata.semantic_version:
try:
request = _create_application_version_request(app_metadata, application_id, stripped_template)
sar_client.create_application_version(**request)
actions.append(CREATE_APPLICATION_VERSION)
except ClientError as e:
if not _is_conflict_exception(e):
raise _wrap_client_error(e)
return {
'application_id': application_id,
'actions': actions,
'details': _get_publish_details(actions, app_metadata.template_dict)
}
|
[
"def",
"publish_application",
"(",
"template",
",",
"sar_client",
"=",
"None",
")",
":",
"if",
"not",
"template",
":",
"raise",
"ValueError",
"(",
"'Require SAM template to publish the application'",
")",
"if",
"not",
"sar_client",
":",
"sar_client",
"=",
"boto3",
".",
"client",
"(",
"'serverlessrepo'",
")",
"template_dict",
"=",
"_get_template_dict",
"(",
"template",
")",
"app_metadata",
"=",
"get_app_metadata",
"(",
"template_dict",
")",
"stripped_template_dict",
"=",
"strip_app_metadata",
"(",
"template_dict",
")",
"stripped_template",
"=",
"yaml_dump",
"(",
"stripped_template_dict",
")",
"try",
":",
"request",
"=",
"_create_application_request",
"(",
"app_metadata",
",",
"stripped_template",
")",
"response",
"=",
"sar_client",
".",
"create_application",
"(",
"*",
"*",
"request",
")",
"application_id",
"=",
"response",
"[",
"'ApplicationId'",
"]",
"actions",
"=",
"[",
"CREATE_APPLICATION",
"]",
"except",
"ClientError",
"as",
"e",
":",
"if",
"not",
"_is_conflict_exception",
"(",
"e",
")",
":",
"raise",
"_wrap_client_error",
"(",
"e",
")",
"# Update the application if it already exists",
"error_message",
"=",
"e",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Message'",
"]",
"application_id",
"=",
"parse_application_id",
"(",
"error_message",
")",
"try",
":",
"request",
"=",
"_update_application_request",
"(",
"app_metadata",
",",
"application_id",
")",
"sar_client",
".",
"update_application",
"(",
"*",
"*",
"request",
")",
"actions",
"=",
"[",
"UPDATE_APPLICATION",
"]",
"except",
"ClientError",
"as",
"e",
":",
"raise",
"_wrap_client_error",
"(",
"e",
")",
"# Create application version if semantic version is specified",
"if",
"app_metadata",
".",
"semantic_version",
":",
"try",
":",
"request",
"=",
"_create_application_version_request",
"(",
"app_metadata",
",",
"application_id",
",",
"stripped_template",
")",
"sar_client",
".",
"create_application_version",
"(",
"*",
"*",
"request",
")",
"actions",
".",
"append",
"(",
"CREATE_APPLICATION_VERSION",
")",
"except",
"ClientError",
"as",
"e",
":",
"if",
"not",
"_is_conflict_exception",
"(",
"e",
")",
":",
"raise",
"_wrap_client_error",
"(",
"e",
")",
"return",
"{",
"'application_id'",
":",
"application_id",
",",
"'actions'",
":",
"actions",
",",
"'details'",
":",
"_get_publish_details",
"(",
"actions",
",",
"app_metadata",
".",
"template_dict",
")",
"}"
] |
Create a new application or new application version in SAR.
:param template: Content of a packaged YAML or JSON SAM template
:type template: str_or_dict
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:return: Dictionary containing application id, actions taken, and updated details
:rtype: dict
:raises ValueError
|
[
"Create",
"a",
"new",
"application",
"or",
"new",
"application",
"version",
"in",
"SAR",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/publish.py#L21-L76
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/publish.py
|
update_application_metadata
|
def update_application_metadata(template, application_id, sar_client=None):
"""
Update the application metadata.
:param template: Content of a packaged YAML or JSON SAM template
:type template: str_or_dict
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:raises ValueError
"""
if not template or not application_id:
raise ValueError('Require SAM template and application ID to update application metadata')
if not sar_client:
sar_client = boto3.client('serverlessrepo')
template_dict = _get_template_dict(template)
app_metadata = get_app_metadata(template_dict)
request = _update_application_request(app_metadata, application_id)
sar_client.update_application(**request)
|
python
|
def update_application_metadata(template, application_id, sar_client=None):
if not template or not application_id:
raise ValueError('Require SAM template and application ID to update application metadata')
if not sar_client:
sar_client = boto3.client('serverlessrepo')
template_dict = _get_template_dict(template)
app_metadata = get_app_metadata(template_dict)
request = _update_application_request(app_metadata, application_id)
sar_client.update_application(**request)
|
[
"def",
"update_application_metadata",
"(",
"template",
",",
"application_id",
",",
"sar_client",
"=",
"None",
")",
":",
"if",
"not",
"template",
"or",
"not",
"application_id",
":",
"raise",
"ValueError",
"(",
"'Require SAM template and application ID to update application metadata'",
")",
"if",
"not",
"sar_client",
":",
"sar_client",
"=",
"boto3",
".",
"client",
"(",
"'serverlessrepo'",
")",
"template_dict",
"=",
"_get_template_dict",
"(",
"template",
")",
"app_metadata",
"=",
"get_app_metadata",
"(",
"template_dict",
")",
"request",
"=",
"_update_application_request",
"(",
"app_metadata",
",",
"application_id",
")",
"sar_client",
".",
"update_application",
"(",
"*",
"*",
"request",
")"
] |
Update the application metadata.
:param template: Content of a packaged YAML or JSON SAM template
:type template: str_or_dict
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:raises ValueError
|
[
"Update",
"the",
"application",
"metadata",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/publish.py#L79-L100
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/publish.py
|
_get_template_dict
|
def _get_template_dict(template):
"""
Parse string template and or copy dictionary template.
:param template: Content of a packaged YAML or JSON SAM template
:type template: str_or_dict
:return: Template as a dictionary
:rtype: dict
:raises ValueError
"""
if isinstance(template, str):
return parse_template(template)
if isinstance(template, dict):
return copy.deepcopy(template)
raise ValueError('Input template should be a string or dictionary')
|
python
|
def _get_template_dict(template):
if isinstance(template, str):
return parse_template(template)
if isinstance(template, dict):
return copy.deepcopy(template)
raise ValueError('Input template should be a string or dictionary')
|
[
"def",
"_get_template_dict",
"(",
"template",
")",
":",
"if",
"isinstance",
"(",
"template",
",",
"str",
")",
":",
"return",
"parse_template",
"(",
"template",
")",
"if",
"isinstance",
"(",
"template",
",",
"dict",
")",
":",
"return",
"copy",
".",
"deepcopy",
"(",
"template",
")",
"raise",
"ValueError",
"(",
"'Input template should be a string or dictionary'",
")"
] |
Parse string template and or copy dictionary template.
:param template: Content of a packaged YAML or JSON SAM template
:type template: str_or_dict
:return: Template as a dictionary
:rtype: dict
:raises ValueError
|
[
"Parse",
"string",
"template",
"and",
"or",
"copy",
"dictionary",
"template",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/publish.py#L103-L119
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/publish.py
|
_create_application_request
|
def _create_application_request(app_metadata, template):
"""
Construct the request body to create application.
:param app_metadata: Object containing app metadata
:type app_metadata: ApplicationMetadata
:param template: A packaged YAML or JSON SAM template
:type template: str
:return: SAR CreateApplication request body
:rtype: dict
"""
app_metadata.validate(['author', 'description', 'name'])
request = {
'Author': app_metadata.author,
'Description': app_metadata.description,
'HomePageUrl': app_metadata.home_page_url,
'Labels': app_metadata.labels,
'LicenseUrl': app_metadata.license_url,
'Name': app_metadata.name,
'ReadmeUrl': app_metadata.readme_url,
'SemanticVersion': app_metadata.semantic_version,
'SourceCodeUrl': app_metadata.source_code_url,
'SpdxLicenseId': app_metadata.spdx_license_id,
'TemplateBody': template
}
# Remove None values
return {k: v for k, v in request.items() if v}
|
python
|
def _create_application_request(app_metadata, template):
app_metadata.validate(['author', 'description', 'name'])
request = {
'Author': app_metadata.author,
'Description': app_metadata.description,
'HomePageUrl': app_metadata.home_page_url,
'Labels': app_metadata.labels,
'LicenseUrl': app_metadata.license_url,
'Name': app_metadata.name,
'ReadmeUrl': app_metadata.readme_url,
'SemanticVersion': app_metadata.semantic_version,
'SourceCodeUrl': app_metadata.source_code_url,
'SpdxLicenseId': app_metadata.spdx_license_id,
'TemplateBody': template
}
return {k: v for k, v in request.items() if v}
|
[
"def",
"_create_application_request",
"(",
"app_metadata",
",",
"template",
")",
":",
"app_metadata",
".",
"validate",
"(",
"[",
"'author'",
",",
"'description'",
",",
"'name'",
"]",
")",
"request",
"=",
"{",
"'Author'",
":",
"app_metadata",
".",
"author",
",",
"'Description'",
":",
"app_metadata",
".",
"description",
",",
"'HomePageUrl'",
":",
"app_metadata",
".",
"home_page_url",
",",
"'Labels'",
":",
"app_metadata",
".",
"labels",
",",
"'LicenseUrl'",
":",
"app_metadata",
".",
"license_url",
",",
"'Name'",
":",
"app_metadata",
".",
"name",
",",
"'ReadmeUrl'",
":",
"app_metadata",
".",
"readme_url",
",",
"'SemanticVersion'",
":",
"app_metadata",
".",
"semantic_version",
",",
"'SourceCodeUrl'",
":",
"app_metadata",
".",
"source_code_url",
",",
"'SpdxLicenseId'",
":",
"app_metadata",
".",
"spdx_license_id",
",",
"'TemplateBody'",
":",
"template",
"}",
"# Remove None values",
"return",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"request",
".",
"items",
"(",
")",
"if",
"v",
"}"
] |
Construct the request body to create application.
:param app_metadata: Object containing app metadata
:type app_metadata: ApplicationMetadata
:param template: A packaged YAML or JSON SAM template
:type template: str
:return: SAR CreateApplication request body
:rtype: dict
|
[
"Construct",
"the",
"request",
"body",
"to",
"create",
"application",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/publish.py#L122-L148
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/publish.py
|
_update_application_request
|
def _update_application_request(app_metadata, application_id):
"""
Construct the request body to update application.
:param app_metadata: Object containing app metadata
:type app_metadata: ApplicationMetadata
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:return: SAR UpdateApplication request body
:rtype: dict
"""
request = {
'ApplicationId': application_id,
'Author': app_metadata.author,
'Description': app_metadata.description,
'HomePageUrl': app_metadata.home_page_url,
'Labels': app_metadata.labels,
'ReadmeUrl': app_metadata.readme_url
}
return {k: v for k, v in request.items() if v}
|
python
|
def _update_application_request(app_metadata, application_id):
request = {
'ApplicationId': application_id,
'Author': app_metadata.author,
'Description': app_metadata.description,
'HomePageUrl': app_metadata.home_page_url,
'Labels': app_metadata.labels,
'ReadmeUrl': app_metadata.readme_url
}
return {k: v for k, v in request.items() if v}
|
[
"def",
"_update_application_request",
"(",
"app_metadata",
",",
"application_id",
")",
":",
"request",
"=",
"{",
"'ApplicationId'",
":",
"application_id",
",",
"'Author'",
":",
"app_metadata",
".",
"author",
",",
"'Description'",
":",
"app_metadata",
".",
"description",
",",
"'HomePageUrl'",
":",
"app_metadata",
".",
"home_page_url",
",",
"'Labels'",
":",
"app_metadata",
".",
"labels",
",",
"'ReadmeUrl'",
":",
"app_metadata",
".",
"readme_url",
"}",
"return",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"request",
".",
"items",
"(",
")",
"if",
"v",
"}"
] |
Construct the request body to update application.
:param app_metadata: Object containing app metadata
:type app_metadata: ApplicationMetadata
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:return: SAR UpdateApplication request body
:rtype: dict
|
[
"Construct",
"the",
"request",
"body",
"to",
"update",
"application",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/publish.py#L151-L170
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/publish.py
|
_create_application_version_request
|
def _create_application_version_request(app_metadata, application_id, template):
"""
Construct the request body to create application version.
:param app_metadata: Object containing app metadata
:type app_metadata: ApplicationMetadata
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param template: A packaged YAML or JSON SAM template
:type template: str
:return: SAR CreateApplicationVersion request body
:rtype: dict
"""
app_metadata.validate(['semantic_version'])
request = {
'ApplicationId': application_id,
'SemanticVersion': app_metadata.semantic_version,
'SourceCodeUrl': app_metadata.source_code_url,
'TemplateBody': template
}
return {k: v for k, v in request.items() if v}
|
python
|
def _create_application_version_request(app_metadata, application_id, template):
app_metadata.validate(['semantic_version'])
request = {
'ApplicationId': application_id,
'SemanticVersion': app_metadata.semantic_version,
'SourceCodeUrl': app_metadata.source_code_url,
'TemplateBody': template
}
return {k: v for k, v in request.items() if v}
|
[
"def",
"_create_application_version_request",
"(",
"app_metadata",
",",
"application_id",
",",
"template",
")",
":",
"app_metadata",
".",
"validate",
"(",
"[",
"'semantic_version'",
"]",
")",
"request",
"=",
"{",
"'ApplicationId'",
":",
"application_id",
",",
"'SemanticVersion'",
":",
"app_metadata",
".",
"semantic_version",
",",
"'SourceCodeUrl'",
":",
"app_metadata",
".",
"source_code_url",
",",
"'TemplateBody'",
":",
"template",
"}",
"return",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"request",
".",
"items",
"(",
")",
"if",
"v",
"}"
] |
Construct the request body to create application version.
:param app_metadata: Object containing app metadata
:type app_metadata: ApplicationMetadata
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param template: A packaged YAML or JSON SAM template
:type template: str
:return: SAR CreateApplicationVersion request body
:rtype: dict
|
[
"Construct",
"the",
"request",
"body",
"to",
"create",
"application",
"version",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/publish.py#L173-L193
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/publish.py
|
_wrap_client_error
|
def _wrap_client_error(e):
"""
Wrap botocore ClientError exception into ServerlessRepoClientError.
:param e: botocore exception
:type e: ClientError
:return: S3PermissionsRequired or InvalidS3UriError or general ServerlessRepoClientError
"""
error_code = e.response['Error']['Code']
message = e.response['Error']['Message']
if error_code == 'BadRequestException':
if "Failed to copy S3 object. Access denied:" in message:
match = re.search('bucket=(.+?), key=(.+?)$', message)
if match:
return S3PermissionsRequired(bucket=match.group(1), key=match.group(2))
if "Invalid S3 URI" in message:
return InvalidS3UriError(message=message)
return ServerlessRepoClientError(message=message)
|
python
|
def _wrap_client_error(e):
error_code = e.response['Error']['Code']
message = e.response['Error']['Message']
if error_code == 'BadRequestException':
if "Failed to copy S3 object. Access denied:" in message:
match = re.search('bucket=(.+?), key=(.+?)$', message)
if match:
return S3PermissionsRequired(bucket=match.group(1), key=match.group(2))
if "Invalid S3 URI" in message:
return InvalidS3UriError(message=message)
return ServerlessRepoClientError(message=message)
|
[
"def",
"_wrap_client_error",
"(",
"e",
")",
":",
"error_code",
"=",
"e",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Code'",
"]",
"message",
"=",
"e",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Message'",
"]",
"if",
"error_code",
"==",
"'BadRequestException'",
":",
"if",
"\"Failed to copy S3 object. Access denied:\"",
"in",
"message",
":",
"match",
"=",
"re",
".",
"search",
"(",
"'bucket=(.+?), key=(.+?)$'",
",",
"message",
")",
"if",
"match",
":",
"return",
"S3PermissionsRequired",
"(",
"bucket",
"=",
"match",
".",
"group",
"(",
"1",
")",
",",
"key",
"=",
"match",
".",
"group",
"(",
"2",
")",
")",
"if",
"\"Invalid S3 URI\"",
"in",
"message",
":",
"return",
"InvalidS3UriError",
"(",
"message",
"=",
"message",
")",
"return",
"ServerlessRepoClientError",
"(",
"message",
"=",
"message",
")"
] |
Wrap botocore ClientError exception into ServerlessRepoClientError.
:param e: botocore exception
:type e: ClientError
:return: S3PermissionsRequired or InvalidS3UriError or general ServerlessRepoClientError
|
[
"Wrap",
"botocore",
"ClientError",
"exception",
"into",
"ServerlessRepoClientError",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/publish.py#L208-L227
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/publish.py
|
_get_publish_details
|
def _get_publish_details(actions, app_metadata_template):
"""
Get the changed application details after publishing.
:param actions: Actions taken during publishing
:type actions: list of str
:param app_metadata_template: Original template definitions of app metadata
:type app_metadata_template: dict
:return: Updated fields and values of the application
:rtype: dict
"""
if actions == [CREATE_APPLICATION]:
return {k: v for k, v in app_metadata_template.items() if v}
include_keys = [
ApplicationMetadata.AUTHOR,
ApplicationMetadata.DESCRIPTION,
ApplicationMetadata.HOME_PAGE_URL,
ApplicationMetadata.LABELS,
ApplicationMetadata.README_URL
]
if CREATE_APPLICATION_VERSION in actions:
# SemanticVersion and SourceCodeUrl can only be updated by creating a new version
additional_keys = [ApplicationMetadata.SEMANTIC_VERSION, ApplicationMetadata.SOURCE_CODE_URL]
include_keys.extend(additional_keys)
return {k: v for k, v in app_metadata_template.items() if k in include_keys and v}
|
python
|
def _get_publish_details(actions, app_metadata_template):
if actions == [CREATE_APPLICATION]:
return {k: v for k, v in app_metadata_template.items() if v}
include_keys = [
ApplicationMetadata.AUTHOR,
ApplicationMetadata.DESCRIPTION,
ApplicationMetadata.HOME_PAGE_URL,
ApplicationMetadata.LABELS,
ApplicationMetadata.README_URL
]
if CREATE_APPLICATION_VERSION in actions:
additional_keys = [ApplicationMetadata.SEMANTIC_VERSION, ApplicationMetadata.SOURCE_CODE_URL]
include_keys.extend(additional_keys)
return {k: v for k, v in app_metadata_template.items() if k in include_keys and v}
|
[
"def",
"_get_publish_details",
"(",
"actions",
",",
"app_metadata_template",
")",
":",
"if",
"actions",
"==",
"[",
"CREATE_APPLICATION",
"]",
":",
"return",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"app_metadata_template",
".",
"items",
"(",
")",
"if",
"v",
"}",
"include_keys",
"=",
"[",
"ApplicationMetadata",
".",
"AUTHOR",
",",
"ApplicationMetadata",
".",
"DESCRIPTION",
",",
"ApplicationMetadata",
".",
"HOME_PAGE_URL",
",",
"ApplicationMetadata",
".",
"LABELS",
",",
"ApplicationMetadata",
".",
"README_URL",
"]",
"if",
"CREATE_APPLICATION_VERSION",
"in",
"actions",
":",
"# SemanticVersion and SourceCodeUrl can only be updated by creating a new version",
"additional_keys",
"=",
"[",
"ApplicationMetadata",
".",
"SEMANTIC_VERSION",
",",
"ApplicationMetadata",
".",
"SOURCE_CODE_URL",
"]",
"include_keys",
".",
"extend",
"(",
"additional_keys",
")",
"return",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"app_metadata_template",
".",
"items",
"(",
")",
"if",
"k",
"in",
"include_keys",
"and",
"v",
"}"
] |
Get the changed application details after publishing.
:param actions: Actions taken during publishing
:type actions: list of str
:param app_metadata_template: Original template definitions of app metadata
:type app_metadata_template: dict
:return: Updated fields and values of the application
:rtype: dict
|
[
"Get",
"the",
"changed",
"application",
"details",
"after",
"publishing",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/publish.py#L230-L256
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/application_metadata.py
|
ApplicationMetadata.validate
|
def validate(self, required_props):
"""
Check if the required application metadata properties have been populated.
:param required_props: List of required properties
:type required_props: list
:return: True, if the metadata is valid
:raises: InvalidApplicationMetadataError
"""
missing_props = [p for p in required_props if not getattr(self, p)]
if missing_props:
missing_props_str = ', '.join(sorted(missing_props))
raise InvalidApplicationMetadataError(properties=missing_props_str)
return True
|
python
|
def validate(self, required_props):
missing_props = [p for p in required_props if not getattr(self, p)]
if missing_props:
missing_props_str = ', '.join(sorted(missing_props))
raise InvalidApplicationMetadataError(properties=missing_props_str)
return True
|
[
"def",
"validate",
"(",
"self",
",",
"required_props",
")",
":",
"missing_props",
"=",
"[",
"p",
"for",
"p",
"in",
"required_props",
"if",
"not",
"getattr",
"(",
"self",
",",
"p",
")",
"]",
"if",
"missing_props",
":",
"missing_props_str",
"=",
"', '",
".",
"join",
"(",
"sorted",
"(",
"missing_props",
")",
")",
"raise",
"InvalidApplicationMetadataError",
"(",
"properties",
"=",
"missing_props_str",
")",
"return",
"True"
] |
Check if the required application metadata properties have been populated.
:param required_props: List of required properties
:type required_props: list
:return: True, if the metadata is valid
:raises: InvalidApplicationMetadataError
|
[
"Check",
"if",
"the",
"required",
"application",
"metadata",
"properties",
"have",
"been",
"populated",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/application_metadata.py#L44-L57
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/parser.py
|
yaml_dump
|
def yaml_dump(dict_to_dump):
"""
Dump the dictionary as a YAML document.
:param dict_to_dump: Data to be serialized as YAML
:type dict_to_dump: dict
:return: YAML document
:rtype: str
"""
yaml.SafeDumper.add_representer(OrderedDict, _dict_representer)
return yaml.safe_dump(dict_to_dump, default_flow_style=False)
|
python
|
def yaml_dump(dict_to_dump):
yaml.SafeDumper.add_representer(OrderedDict, _dict_representer)
return yaml.safe_dump(dict_to_dump, default_flow_style=False)
|
[
"def",
"yaml_dump",
"(",
"dict_to_dump",
")",
":",
"yaml",
".",
"SafeDumper",
".",
"add_representer",
"(",
"OrderedDict",
",",
"_dict_representer",
")",
"return",
"yaml",
".",
"safe_dump",
"(",
"dict_to_dump",
",",
"default_flow_style",
"=",
"False",
")"
] |
Dump the dictionary as a YAML document.
:param dict_to_dump: Data to be serialized as YAML
:type dict_to_dump: dict
:return: YAML document
:rtype: str
|
[
"Dump",
"the",
"dictionary",
"as",
"a",
"YAML",
"document",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/parser.py#L61-L71
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/parser.py
|
parse_template
|
def parse_template(template_str):
"""
Parse the SAM template.
:param template_str: A packaged YAML or json CloudFormation template
:type template_str: str
:return: Dictionary with keys defined in the template
:rtype: dict
"""
try:
# PyYAML doesn't support json as well as it should, so if the input
# is actually just json it is better to parse it with the standard
# json parser.
return json.loads(template_str, object_pairs_hook=OrderedDict)
except ValueError:
yaml.SafeLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _dict_constructor)
yaml.SafeLoader.add_multi_constructor('!', intrinsics_multi_constructor)
return yaml.safe_load(template_str)
|
python
|
def parse_template(template_str):
try:
return json.loads(template_str, object_pairs_hook=OrderedDict)
except ValueError:
yaml.SafeLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, _dict_constructor)
yaml.SafeLoader.add_multi_constructor('!', intrinsics_multi_constructor)
return yaml.safe_load(template_str)
|
[
"def",
"parse_template",
"(",
"template_str",
")",
":",
"try",
":",
"# PyYAML doesn't support json as well as it should, so if the input",
"# is actually just json it is better to parse it with the standard",
"# json parser.",
"return",
"json",
".",
"loads",
"(",
"template_str",
",",
"object_pairs_hook",
"=",
"OrderedDict",
")",
"except",
"ValueError",
":",
"yaml",
".",
"SafeLoader",
".",
"add_constructor",
"(",
"yaml",
".",
"resolver",
".",
"BaseResolver",
".",
"DEFAULT_MAPPING_TAG",
",",
"_dict_constructor",
")",
"yaml",
".",
"SafeLoader",
".",
"add_multi_constructor",
"(",
"'!'",
",",
"intrinsics_multi_constructor",
")",
"return",
"yaml",
".",
"safe_load",
"(",
"template_str",
")"
] |
Parse the SAM template.
:param template_str: A packaged YAML or json CloudFormation template
:type template_str: str
:return: Dictionary with keys defined in the template
:rtype: dict
|
[
"Parse",
"the",
"SAM",
"template",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/parser.py#L78-L95
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/parser.py
|
get_app_metadata
|
def get_app_metadata(template_dict):
"""
Get the application metadata from a SAM template.
:param template_dict: SAM template as a dictionary
:type template_dict: dict
:return: Application metadata as defined in the template
:rtype: ApplicationMetadata
:raises ApplicationMetadataNotFoundError
"""
if SERVERLESS_REPO_APPLICATION in template_dict.get(METADATA, {}):
app_metadata_dict = template_dict.get(METADATA).get(SERVERLESS_REPO_APPLICATION)
return ApplicationMetadata(app_metadata_dict)
raise ApplicationMetadataNotFoundError(
error_message='missing {} section in template Metadata'.format(SERVERLESS_REPO_APPLICATION))
|
python
|
def get_app_metadata(template_dict):
if SERVERLESS_REPO_APPLICATION in template_dict.get(METADATA, {}):
app_metadata_dict = template_dict.get(METADATA).get(SERVERLESS_REPO_APPLICATION)
return ApplicationMetadata(app_metadata_dict)
raise ApplicationMetadataNotFoundError(
error_message='missing {} section in template Metadata'.format(SERVERLESS_REPO_APPLICATION))
|
[
"def",
"get_app_metadata",
"(",
"template_dict",
")",
":",
"if",
"SERVERLESS_REPO_APPLICATION",
"in",
"template_dict",
".",
"get",
"(",
"METADATA",
",",
"{",
"}",
")",
":",
"app_metadata_dict",
"=",
"template_dict",
".",
"get",
"(",
"METADATA",
")",
".",
"get",
"(",
"SERVERLESS_REPO_APPLICATION",
")",
"return",
"ApplicationMetadata",
"(",
"app_metadata_dict",
")",
"raise",
"ApplicationMetadataNotFoundError",
"(",
"error_message",
"=",
"'missing {} section in template Metadata'",
".",
"format",
"(",
"SERVERLESS_REPO_APPLICATION",
")",
")"
] |
Get the application metadata from a SAM template.
:param template_dict: SAM template as a dictionary
:type template_dict: dict
:return: Application metadata as defined in the template
:rtype: ApplicationMetadata
:raises ApplicationMetadataNotFoundError
|
[
"Get",
"the",
"application",
"metadata",
"from",
"a",
"SAM",
"template",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/parser.py#L98-L113
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/parser.py
|
parse_application_id
|
def parse_application_id(text):
"""
Extract the application id from input text.
:param text: text to parse
:type text: str
:return: application id if found in the input
:rtype: str
"""
result = re.search(APPLICATION_ID_PATTERN, text)
return result.group(0) if result else None
|
python
|
def parse_application_id(text):
result = re.search(APPLICATION_ID_PATTERN, text)
return result.group(0) if result else None
|
[
"def",
"parse_application_id",
"(",
"text",
")",
":",
"result",
"=",
"re",
".",
"search",
"(",
"APPLICATION_ID_PATTERN",
",",
"text",
")",
"return",
"result",
".",
"group",
"(",
"0",
")",
"if",
"result",
"else",
"None"
] |
Extract the application id from input text.
:param text: text to parse
:type text: str
:return: application id if found in the input
:rtype: str
|
[
"Extract",
"the",
"application",
"id",
"from",
"input",
"text",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/parser.py#L116-L126
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/parser.py
|
strip_app_metadata
|
def strip_app_metadata(template_dict):
"""
Strip the "AWS::ServerlessRepo::Application" metadata section from template.
:param template_dict: SAM template as a dictionary
:type template_dict: dict
:return: stripped template content
:rtype: str
"""
if SERVERLESS_REPO_APPLICATION not in template_dict.get(METADATA, {}):
return template_dict
template_dict_copy = copy.deepcopy(template_dict)
# strip the whole metadata section if SERVERLESS_REPO_APPLICATION is the only key in it
if not [k for k in template_dict_copy.get(METADATA) if k != SERVERLESS_REPO_APPLICATION]:
template_dict_copy.pop(METADATA, None)
else:
template_dict_copy.get(METADATA).pop(SERVERLESS_REPO_APPLICATION, None)
return template_dict_copy
|
python
|
def strip_app_metadata(template_dict):
if SERVERLESS_REPO_APPLICATION not in template_dict.get(METADATA, {}):
return template_dict
template_dict_copy = copy.deepcopy(template_dict)
if not [k for k in template_dict_copy.get(METADATA) if k != SERVERLESS_REPO_APPLICATION]:
template_dict_copy.pop(METADATA, None)
else:
template_dict_copy.get(METADATA).pop(SERVERLESS_REPO_APPLICATION, None)
return template_dict_copy
|
[
"def",
"strip_app_metadata",
"(",
"template_dict",
")",
":",
"if",
"SERVERLESS_REPO_APPLICATION",
"not",
"in",
"template_dict",
".",
"get",
"(",
"METADATA",
",",
"{",
"}",
")",
":",
"return",
"template_dict",
"template_dict_copy",
"=",
"copy",
".",
"deepcopy",
"(",
"template_dict",
")",
"# strip the whole metadata section if SERVERLESS_REPO_APPLICATION is the only key in it",
"if",
"not",
"[",
"k",
"for",
"k",
"in",
"template_dict_copy",
".",
"get",
"(",
"METADATA",
")",
"if",
"k",
"!=",
"SERVERLESS_REPO_APPLICATION",
"]",
":",
"template_dict_copy",
".",
"pop",
"(",
"METADATA",
",",
"None",
")",
"else",
":",
"template_dict_copy",
".",
"get",
"(",
"METADATA",
")",
".",
"pop",
"(",
"SERVERLESS_REPO_APPLICATION",
",",
"None",
")",
"return",
"template_dict_copy"
] |
Strip the "AWS::ServerlessRepo::Application" metadata section from template.
:param template_dict: SAM template as a dictionary
:type template_dict: dict
:return: stripped template content
:rtype: str
|
[
"Strip",
"the",
"AWS",
"::",
"ServerlessRepo",
"::",
"Application",
"metadata",
"section",
"from",
"template",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/parser.py#L129-L149
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/permission_helper.py
|
make_application_private
|
def make_application_private(application_id, sar_client=None):
"""
Set the application to be private.
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:raises ValueError
"""
if not application_id:
raise ValueError('Require application id to make the app private')
if not sar_client:
sar_client = boto3.client('serverlessrepo')
sar_client.put_application_policy(
ApplicationId=application_id,
Statements=[]
)
|
python
|
def make_application_private(application_id, sar_client=None):
if not application_id:
raise ValueError('Require application id to make the app private')
if not sar_client:
sar_client = boto3.client('serverlessrepo')
sar_client.put_application_policy(
ApplicationId=application_id,
Statements=[]
)
|
[
"def",
"make_application_private",
"(",
"application_id",
",",
"sar_client",
"=",
"None",
")",
":",
"if",
"not",
"application_id",
":",
"raise",
"ValueError",
"(",
"'Require application id to make the app private'",
")",
"if",
"not",
"sar_client",
":",
"sar_client",
"=",
"boto3",
".",
"client",
"(",
"'serverlessrepo'",
")",
"sar_client",
".",
"put_application_policy",
"(",
"ApplicationId",
"=",
"application_id",
",",
"Statements",
"=",
"[",
"]",
")"
] |
Set the application to be private.
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:raises ValueError
|
[
"Set",
"the",
"application",
"to",
"be",
"private",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/permission_helper.py#L32-L51
|
awslabs/aws-serverlessrepo-python
|
serverlessrepo/permission_helper.py
|
share_application_with_accounts
|
def share_application_with_accounts(application_id, account_ids, sar_client=None):
"""
Share the application privately with given AWS account IDs.
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param account_ids: List of AWS account IDs, or *
:type account_ids: list of str
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:raises ValueError
"""
if not application_id or not account_ids:
raise ValueError('Require application id and list of AWS account IDs to share the app')
if not sar_client:
sar_client = boto3.client('serverlessrepo')
application_policy = ApplicationPolicy(account_ids, [ApplicationPolicy.DEPLOY])
application_policy.validate()
sar_client.put_application_policy(
ApplicationId=application_id,
Statements=[application_policy.to_statement()]
)
|
python
|
def share_application_with_accounts(application_id, account_ids, sar_client=None):
if not application_id or not account_ids:
raise ValueError('Require application id and list of AWS account IDs to share the app')
if not sar_client:
sar_client = boto3.client('serverlessrepo')
application_policy = ApplicationPolicy(account_ids, [ApplicationPolicy.DEPLOY])
application_policy.validate()
sar_client.put_application_policy(
ApplicationId=application_id,
Statements=[application_policy.to_statement()]
)
|
[
"def",
"share_application_with_accounts",
"(",
"application_id",
",",
"account_ids",
",",
"sar_client",
"=",
"None",
")",
":",
"if",
"not",
"application_id",
"or",
"not",
"account_ids",
":",
"raise",
"ValueError",
"(",
"'Require application id and list of AWS account IDs to share the app'",
")",
"if",
"not",
"sar_client",
":",
"sar_client",
"=",
"boto3",
".",
"client",
"(",
"'serverlessrepo'",
")",
"application_policy",
"=",
"ApplicationPolicy",
"(",
"account_ids",
",",
"[",
"ApplicationPolicy",
".",
"DEPLOY",
"]",
")",
"application_policy",
".",
"validate",
"(",
")",
"sar_client",
".",
"put_application_policy",
"(",
"ApplicationId",
"=",
"application_id",
",",
"Statements",
"=",
"[",
"application_policy",
".",
"to_statement",
"(",
")",
"]",
")"
] |
Share the application privately with given AWS account IDs.
:param application_id: The Amazon Resource Name (ARN) of the application
:type application_id: str
:param account_ids: List of AWS account IDs, or *
:type account_ids: list of str
:param sar_client: The boto3 client used to access SAR
:type sar_client: boto3.client
:raises ValueError
|
[
"Share",
"the",
"application",
"privately",
"with",
"given",
"AWS",
"account",
"IDs",
"."
] |
train
|
https://github.com/awslabs/aws-serverlessrepo-python/blob/e2126cee0191266cfb8a3a2bc3270bf50330907c/serverlessrepo/permission_helper.py#L54-L77
|
asifpy/django-crudbuilder
|
crudbuilder/registry.py
|
CrudBuilderRegistry.extract_args
|
def extract_args(cls, *args):
"""
Takes any arguments like a model and crud, or just one of
those, in any order, and return a model and crud.
"""
model = None
crudbuilder = None
for arg in args:
if issubclass(arg, models.Model):
model = arg
else:
crudbuilder = arg
return [model, crudbuilder]
|
python
|
def extract_args(cls, *args):
model = None
crudbuilder = None
for arg in args:
if issubclass(arg, models.Model):
model = arg
else:
crudbuilder = arg
return [model, crudbuilder]
|
[
"def",
"extract_args",
"(",
"cls",
",",
"*",
"args",
")",
":",
"model",
"=",
"None",
"crudbuilder",
"=",
"None",
"for",
"arg",
"in",
"args",
":",
"if",
"issubclass",
"(",
"arg",
",",
"models",
".",
"Model",
")",
":",
"model",
"=",
"arg",
"else",
":",
"crudbuilder",
"=",
"arg",
"return",
"[",
"model",
",",
"crudbuilder",
"]"
] |
Takes any arguments like a model and crud, or just one of
those, in any order, and return a model and crud.
|
[
"Takes",
"any",
"arguments",
"like",
"a",
"model",
"and",
"crud",
"or",
"just",
"one",
"of",
"those",
"in",
"any",
"order",
"and",
"return",
"a",
"model",
"and",
"crud",
"."
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/registry.py#L18-L32
|
asifpy/django-crudbuilder
|
crudbuilder/registry.py
|
CrudBuilderRegistry.register
|
def register(self, *args, **kwargs):
"""
Register a crud.
Two unordered arguments are accepted, at least one should be passed:
- a model,
- a crudbuilder class
"""
assert len(args) <= 2, 'register takes at most 2 args'
assert len(args) > 0, 'register takes at least 1 arg'
model, crudbuilder = self.__class__.extract_args(*args)
if not issubclass(model, models.Model):
msg = "First argument should be Django Model"
raise NotModelException(msg)
key = self._model_key(model, crudbuilder)
if key in self:
msg = "Key '{key}' has already been registered.".format(
key=key
)
raise AlreadyRegistered(msg)
self.__setitem__(key, crudbuilder)
return crudbuilder
|
python
|
def register(self, *args, **kwargs):
assert len(args) <= 2, 'register takes at most 2 args'
assert len(args) > 0, 'register takes at least 1 arg'
model, crudbuilder = self.__class__.extract_args(*args)
if not issubclass(model, models.Model):
msg = "First argument should be Django Model"
raise NotModelException(msg)
key = self._model_key(model, crudbuilder)
if key in self:
msg = "Key '{key}' has already been registered.".format(
key=key
)
raise AlreadyRegistered(msg)
self.__setitem__(key, crudbuilder)
return crudbuilder
|
[
"def",
"register",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"assert",
"len",
"(",
"args",
")",
"<=",
"2",
",",
"'register takes at most 2 args'",
"assert",
"len",
"(",
"args",
")",
">",
"0",
",",
"'register takes at least 1 arg'",
"model",
",",
"crudbuilder",
"=",
"self",
".",
"__class__",
".",
"extract_args",
"(",
"*",
"args",
")",
"if",
"not",
"issubclass",
"(",
"model",
",",
"models",
".",
"Model",
")",
":",
"msg",
"=",
"\"First argument should be Django Model\"",
"raise",
"NotModelException",
"(",
"msg",
")",
"key",
"=",
"self",
".",
"_model_key",
"(",
"model",
",",
"crudbuilder",
")",
"if",
"key",
"in",
"self",
":",
"msg",
"=",
"\"Key '{key}' has already been registered.\"",
".",
"format",
"(",
"key",
"=",
"key",
")",
"raise",
"AlreadyRegistered",
"(",
"msg",
")",
"self",
".",
"__setitem__",
"(",
"key",
",",
"crudbuilder",
")",
"return",
"crudbuilder"
] |
Register a crud.
Two unordered arguments are accepted, at least one should be passed:
- a model,
- a crudbuilder class
|
[
"Register",
"a",
"crud",
"."
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/registry.py#L34-L61
|
asifpy/django-crudbuilder
|
crudbuilder/formset.py
|
BaseInlineFormset.construct_formset
|
def construct_formset(self):
"""
Returns an instance of the inline formset
"""
if not self.inline_model or not self.parent_model:
msg = "Parent and Inline models are required in {}".format(self.__class__.__name__)
raise NotModelException(msg)
return inlineformset_factory(
self.parent_model,
self.inline_model,
**self.get_factory_kwargs())
|
python
|
def construct_formset(self):
if not self.inline_model or not self.parent_model:
msg = "Parent and Inline models are required in {}".format(self.__class__.__name__)
raise NotModelException(msg)
return inlineformset_factory(
self.parent_model,
self.inline_model,
**self.get_factory_kwargs())
|
[
"def",
"construct_formset",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"inline_model",
"or",
"not",
"self",
".",
"parent_model",
":",
"msg",
"=",
"\"Parent and Inline models are required in {}\"",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
"raise",
"NotModelException",
"(",
"msg",
")",
"return",
"inlineformset_factory",
"(",
"self",
".",
"parent_model",
",",
"self",
".",
"inline_model",
",",
"*",
"*",
"self",
".",
"get_factory_kwargs",
"(",
")",
")"
] |
Returns an instance of the inline formset
|
[
"Returns",
"an",
"instance",
"of",
"the",
"inline",
"formset"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/formset.py#L18-L29
|
asifpy/django-crudbuilder
|
crudbuilder/formset.py
|
BaseInlineFormset.get_factory_kwargs
|
def get_factory_kwargs(self):
"""
Returns the keyword arguments for calling the formset factory
"""
kwargs = {}
kwargs.update({
'can_delete': self.can_delete,
'extra': self.extra,
'exclude': self.exclude,
'fields': self.fields,
'formfield_callback': self.formfield_callback,
'fk_name': self.fk_name,
})
if self.formset_class:
kwargs['formset'] = self.formset_class
if self.child_form:
kwargs['form'] = self.child_form
return kwargs
|
python
|
def get_factory_kwargs(self):
kwargs = {}
kwargs.update({
'can_delete': self.can_delete,
'extra': self.extra,
'exclude': self.exclude,
'fields': self.fields,
'formfield_callback': self.formfield_callback,
'fk_name': self.fk_name,
})
if self.formset_class:
kwargs['formset'] = self.formset_class
if self.child_form:
kwargs['form'] = self.child_form
return kwargs
|
[
"def",
"get_factory_kwargs",
"(",
"self",
")",
":",
"kwargs",
"=",
"{",
"}",
"kwargs",
".",
"update",
"(",
"{",
"'can_delete'",
":",
"self",
".",
"can_delete",
",",
"'extra'",
":",
"self",
".",
"extra",
",",
"'exclude'",
":",
"self",
".",
"exclude",
",",
"'fields'",
":",
"self",
".",
"fields",
",",
"'formfield_callback'",
":",
"self",
".",
"formfield_callback",
",",
"'fk_name'",
":",
"self",
".",
"fk_name",
",",
"}",
")",
"if",
"self",
".",
"formset_class",
":",
"kwargs",
"[",
"'formset'",
"]",
"=",
"self",
".",
"formset_class",
"if",
"self",
".",
"child_form",
":",
"kwargs",
"[",
"'form'",
"]",
"=",
"self",
".",
"child_form",
"return",
"kwargs"
] |
Returns the keyword arguments for calling the formset factory
|
[
"Returns",
"the",
"keyword",
"arguments",
"for",
"calling",
"the",
"formset",
"factory"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/formset.py#L31-L49
|
asifpy/django-crudbuilder
|
crudbuilder/helpers.py
|
plural
|
def plural(text):
"""
>>> plural('activity')
'activities'
"""
aberrant = {
'knife': 'knives',
'self': 'selves',
'elf': 'elves',
'life': 'lives',
'hoof': 'hooves',
'leaf': 'leaves',
'echo': 'echoes',
'embargo': 'embargoes',
'hero': 'heroes',
'potato': 'potatoes',
'tomato': 'tomatoes',
'torpedo': 'torpedoes',
'veto': 'vetoes',
'child': 'children',
'woman': 'women',
'man': 'men',
'person': 'people',
'goose': 'geese',
'mouse': 'mice',
'barracks': 'barracks',
'deer': 'deer',
'nucleus': 'nuclei',
'syllabus': 'syllabi',
'focus': 'foci',
'fungus': 'fungi',
'cactus': 'cacti',
'phenomenon': 'phenomena',
'index': 'indices',
'appendix': 'appendices',
'criterion': 'criteria',
}
if text in aberrant:
result = '%s' % aberrant[text]
else:
postfix = 's'
if len(text) > 2:
vowels = 'aeiou'
if text[-2:] in ('ch', 'sh'):
postfix = 'es'
elif text[-1:] == 'y':
if (text[-2:-1] in vowels) or (text[0] in string.ascii_uppercase):
postfix = 's'
else:
postfix = 'ies'
text = text[:-1]
elif text[-2:] == 'is':
postfix = 'es'
text = text[:-2]
elif text[-1:] in ('s', 'z', 'x'):
postfix = 'es'
result = '%s%s' % (text, postfix)
return result
|
python
|
def plural(text):
aberrant = {
'knife': 'knives',
'self': 'selves',
'elf': 'elves',
'life': 'lives',
'hoof': 'hooves',
'leaf': 'leaves',
'echo': 'echoes',
'embargo': 'embargoes',
'hero': 'heroes',
'potato': 'potatoes',
'tomato': 'tomatoes',
'torpedo': 'torpedoes',
'veto': 'vetoes',
'child': 'children',
'woman': 'women',
'man': 'men',
'person': 'people',
'goose': 'geese',
'mouse': 'mice',
'barracks': 'barracks',
'deer': 'deer',
'nucleus': 'nuclei',
'syllabus': 'syllabi',
'focus': 'foci',
'fungus': 'fungi',
'cactus': 'cacti',
'phenomenon': 'phenomena',
'index': 'indices',
'appendix': 'appendices',
'criterion': 'criteria',
}
if text in aberrant:
result = '%s' % aberrant[text]
else:
postfix = 's'
if len(text) > 2:
vowels = 'aeiou'
if text[-2:] in ('ch', 'sh'):
postfix = 'es'
elif text[-1:] == 'y':
if (text[-2:-1] in vowels) or (text[0] in string.ascii_uppercase):
postfix = 's'
else:
postfix = 'ies'
text = text[:-1]
elif text[-2:] == 'is':
postfix = 'es'
text = text[:-2]
elif text[-1:] in ('s', 'z', 'x'):
postfix = 'es'
result = '%s%s' % (text, postfix)
return result
|
[
"def",
"plural",
"(",
"text",
")",
":",
"aberrant",
"=",
"{",
"'knife'",
":",
"'knives'",
",",
"'self'",
":",
"'selves'",
",",
"'elf'",
":",
"'elves'",
",",
"'life'",
":",
"'lives'",
",",
"'hoof'",
":",
"'hooves'",
",",
"'leaf'",
":",
"'leaves'",
",",
"'echo'",
":",
"'echoes'",
",",
"'embargo'",
":",
"'embargoes'",
",",
"'hero'",
":",
"'heroes'",
",",
"'potato'",
":",
"'potatoes'",
",",
"'tomato'",
":",
"'tomatoes'",
",",
"'torpedo'",
":",
"'torpedoes'",
",",
"'veto'",
":",
"'vetoes'",
",",
"'child'",
":",
"'children'",
",",
"'woman'",
":",
"'women'",
",",
"'man'",
":",
"'men'",
",",
"'person'",
":",
"'people'",
",",
"'goose'",
":",
"'geese'",
",",
"'mouse'",
":",
"'mice'",
",",
"'barracks'",
":",
"'barracks'",
",",
"'deer'",
":",
"'deer'",
",",
"'nucleus'",
":",
"'nuclei'",
",",
"'syllabus'",
":",
"'syllabi'",
",",
"'focus'",
":",
"'foci'",
",",
"'fungus'",
":",
"'fungi'",
",",
"'cactus'",
":",
"'cacti'",
",",
"'phenomenon'",
":",
"'phenomena'",
",",
"'index'",
":",
"'indices'",
",",
"'appendix'",
":",
"'appendices'",
",",
"'criterion'",
":",
"'criteria'",
",",
"}",
"if",
"text",
"in",
"aberrant",
":",
"result",
"=",
"'%s'",
"%",
"aberrant",
"[",
"text",
"]",
"else",
":",
"postfix",
"=",
"'s'",
"if",
"len",
"(",
"text",
")",
">",
"2",
":",
"vowels",
"=",
"'aeiou'",
"if",
"text",
"[",
"-",
"2",
":",
"]",
"in",
"(",
"'ch'",
",",
"'sh'",
")",
":",
"postfix",
"=",
"'es'",
"elif",
"text",
"[",
"-",
"1",
":",
"]",
"==",
"'y'",
":",
"if",
"(",
"text",
"[",
"-",
"2",
":",
"-",
"1",
"]",
"in",
"vowels",
")",
"or",
"(",
"text",
"[",
"0",
"]",
"in",
"string",
".",
"ascii_uppercase",
")",
":",
"postfix",
"=",
"'s'",
"else",
":",
"postfix",
"=",
"'ies'",
"text",
"=",
"text",
"[",
":",
"-",
"1",
"]",
"elif",
"text",
"[",
"-",
"2",
":",
"]",
"==",
"'is'",
":",
"postfix",
"=",
"'es'",
"text",
"=",
"text",
"[",
":",
"-",
"2",
"]",
"elif",
"text",
"[",
"-",
"1",
":",
"]",
"in",
"(",
"'s'",
",",
"'z'",
",",
"'x'",
")",
":",
"postfix",
"=",
"'es'",
"result",
"=",
"'%s%s'",
"%",
"(",
"text",
",",
"postfix",
")",
"return",
"result"
] |
>>> plural('activity')
'activities'
|
[
">>>",
"plural",
"(",
"activity",
")",
"activities"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/helpers.py#L19-L80
|
asifpy/django-crudbuilder
|
crudbuilder/helpers.py
|
mixedToUnder
|
def mixedToUnder(s): # pragma: no cover
"""
Sample:
>>> mixedToUnder("FooBarBaz")
'foo_bar_baz'
Special case for ID:
>>> mixedToUnder("FooBarID")
'foo_bar_id'
"""
if s.endswith('ID'):
return mixedToUnder(s[:-2] + "_id")
trans = _mixedToUnderRE.sub(mixedToUnderSub, s)
if trans.startswith('_'):
trans = trans[1:]
return trans
|
python
|
def mixedToUnder(s):
if s.endswith('ID'):
return mixedToUnder(s[:-2] + "_id")
trans = _mixedToUnderRE.sub(mixedToUnderSub, s)
if trans.startswith('_'):
trans = trans[1:]
return trans
|
[
"def",
"mixedToUnder",
"(",
"s",
")",
":",
"# pragma: no cover",
"if",
"s",
".",
"endswith",
"(",
"'ID'",
")",
":",
"return",
"mixedToUnder",
"(",
"s",
"[",
":",
"-",
"2",
"]",
"+",
"\"_id\"",
")",
"trans",
"=",
"_mixedToUnderRE",
".",
"sub",
"(",
"mixedToUnderSub",
",",
"s",
")",
"if",
"trans",
".",
"startswith",
"(",
"'_'",
")",
":",
"trans",
"=",
"trans",
"[",
"1",
":",
"]",
"return",
"trans"
] |
Sample:
>>> mixedToUnder("FooBarBaz")
'foo_bar_baz'
Special case for ID:
>>> mixedToUnder("FooBarID")
'foo_bar_id'
|
[
"Sample",
":",
">>>",
"mixedToUnder",
"(",
"FooBarBaz",
")",
"foo_bar_baz"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/helpers.py#L88-L103
|
asifpy/django-crudbuilder
|
crudbuilder/helpers.py
|
underToMixed
|
def underToMixed(name):
"""
>>> underToMixed('some_large_model_name_perhaps')
'someLargeModelNamePerhaps'
>>> underToMixed('exception_for_id')
'exceptionForID'
"""
if name.endswith('_id'):
return underToMixed(name[:-3] + "ID")
return _underToMixedRE.sub(lambda m: m.group(0)[1].upper(),
name)
|
python
|
def underToMixed(name):
if name.endswith('_id'):
return underToMixed(name[:-3] + "ID")
return _underToMixedRE.sub(lambda m: m.group(0)[1].upper(),
name)
|
[
"def",
"underToMixed",
"(",
"name",
")",
":",
"if",
"name",
".",
"endswith",
"(",
"'_id'",
")",
":",
"return",
"underToMixed",
"(",
"name",
"[",
":",
"-",
"3",
"]",
"+",
"\"ID\"",
")",
"return",
"_underToMixedRE",
".",
"sub",
"(",
"lambda",
"m",
":",
"m",
".",
"group",
"(",
"0",
")",
"[",
"1",
"]",
".",
"upper",
"(",
")",
",",
"name",
")"
] |
>>> underToMixed('some_large_model_name_perhaps')
'someLargeModelNamePerhaps'
>>> underToMixed('exception_for_id')
'exceptionForID'
|
[
">>>",
"underToMixed",
"(",
"some_large_model_name_perhaps",
")",
"someLargeModelNamePerhaps"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/helpers.py#L132-L143
|
asifpy/django-crudbuilder
|
crudbuilder/helpers.py
|
import_crud
|
def import_crud(app):
'''
Import crud module and register all model cruds which it contains
'''
try:
app_path = import_module(app).__path__
except (AttributeError, ImportError):
return None
try:
imp.find_module('crud', app_path)
except ImportError:
return None
module = import_module("%s.crud" % app)
return module
|
python
|
def import_crud(app):
try:
app_path = import_module(app).__path__
except (AttributeError, ImportError):
return None
try:
imp.find_module('crud', app_path)
except ImportError:
return None
module = import_module("%s.crud" % app)
return module
|
[
"def",
"import_crud",
"(",
"app",
")",
":",
"try",
":",
"app_path",
"=",
"import_module",
"(",
"app",
")",
".",
"__path__",
"except",
"(",
"AttributeError",
",",
"ImportError",
")",
":",
"return",
"None",
"try",
":",
"imp",
".",
"find_module",
"(",
"'crud'",
",",
"app_path",
")",
"except",
"ImportError",
":",
"return",
"None",
"module",
"=",
"import_module",
"(",
"\"%s.crud\"",
"%",
"app",
")",
"return",
"module"
] |
Import crud module and register all model cruds which it contains
|
[
"Import",
"crud",
"module",
"and",
"register",
"all",
"model",
"cruds",
"which",
"it",
"contains"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/helpers.py#L162-L179
|
asifpy/django-crudbuilder
|
crudbuilder/abstract.py
|
BaseBuilder.get_model_class
|
def get_model_class(self):
"""Returns model class"""
try:
c = ContentType.objects.get(app_label=self.app, model=self.model)
except ContentType.DoesNotExist:
# try another kind of resolution
# fixes a situation where a proxy model is defined in some external app.
if django.VERSION >= (1, 7):
return apps.get_model(self.app, self.model)
else:
return c.model_class()
|
python
|
def get_model_class(self):
try:
c = ContentType.objects.get(app_label=self.app, model=self.model)
except ContentType.DoesNotExist:
if django.VERSION >= (1, 7):
return apps.get_model(self.app, self.model)
else:
return c.model_class()
|
[
"def",
"get_model_class",
"(",
"self",
")",
":",
"try",
":",
"c",
"=",
"ContentType",
".",
"objects",
".",
"get",
"(",
"app_label",
"=",
"self",
".",
"app",
",",
"model",
"=",
"self",
".",
"model",
")",
"except",
"ContentType",
".",
"DoesNotExist",
":",
"# try another kind of resolution",
"# fixes a situation where a proxy model is defined in some external app.",
"if",
"django",
".",
"VERSION",
">=",
"(",
"1",
",",
"7",
")",
":",
"return",
"apps",
".",
"get_model",
"(",
"self",
".",
"app",
",",
"self",
".",
"model",
")",
"else",
":",
"return",
"c",
".",
"model_class",
"(",
")"
] |
Returns model class
|
[
"Returns",
"model",
"class"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/abstract.py#L53-L63
|
asifpy/django-crudbuilder
|
crudbuilder/templatetags/crudbuilder.py
|
get_verbose_field_name
|
def get_verbose_field_name(instance, field_name):
"""
Returns verbose_name for a field.
"""
fields = [field.name for field in instance._meta.fields]
if field_name in fields:
return instance._meta.get_field(field_name).verbose_name
else:
return field_name
|
python
|
def get_verbose_field_name(instance, field_name):
fields = [field.name for field in instance._meta.fields]
if field_name in fields:
return instance._meta.get_field(field_name).verbose_name
else:
return field_name
|
[
"def",
"get_verbose_field_name",
"(",
"instance",
",",
"field_name",
")",
":",
"fields",
"=",
"[",
"field",
".",
"name",
"for",
"field",
"in",
"instance",
".",
"_meta",
".",
"fields",
"]",
"if",
"field_name",
"in",
"fields",
":",
"return",
"instance",
".",
"_meta",
".",
"get_field",
"(",
"field_name",
")",
".",
"verbose_name",
"else",
":",
"return",
"field_name"
] |
Returns verbose_name for a field.
|
[
"Returns",
"verbose_name",
"for",
"a",
"field",
"."
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/templatetags/crudbuilder.py#L63-L71
|
asifpy/django-crudbuilder
|
crudbuilder/views.py
|
ViewBuilder.generate_modelform
|
def generate_modelform(self):
"""Generate modelform from Django modelform_factory"""
model_class = self.get_model_class
excludes = self.modelform_excludes if self.modelform_excludes else []
_ObjectForm = modelform_factory(model_class, exclude=excludes)
return _ObjectForm
|
python
|
def generate_modelform(self):
model_class = self.get_model_class
excludes = self.modelform_excludes if self.modelform_excludes else []
_ObjectForm = modelform_factory(model_class, exclude=excludes)
return _ObjectForm
|
[
"def",
"generate_modelform",
"(",
"self",
")",
":",
"model_class",
"=",
"self",
".",
"get_model_class",
"excludes",
"=",
"self",
".",
"modelform_excludes",
"if",
"self",
".",
"modelform_excludes",
"else",
"[",
"]",
"_ObjectForm",
"=",
"modelform_factory",
"(",
"model_class",
",",
"exclude",
"=",
"excludes",
")",
"return",
"_ObjectForm"
] |
Generate modelform from Django modelform_factory
|
[
"Generate",
"modelform",
"from",
"Django",
"modelform_factory"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/views.py#L58-L64
|
asifpy/django-crudbuilder
|
crudbuilder/views.py
|
ViewBuilder.get_template
|
def get_template(self, tname):
"""
- Get custom template from CRUD class, if it is defined in it
- No custom template in CRUD class, then use the default template
"""
if self.custom_templates and self.custom_templates.get(tname, None):
return self.custom_templates.get(tname)
elif self.inlineformset:
return 'crudbuilder/inline/{}.html'.format(tname)
else:
return 'crudbuilder/instance/{}.html'.format(tname)
|
python
|
def get_template(self, tname):
if self.custom_templates and self.custom_templates.get(tname, None):
return self.custom_templates.get(tname)
elif self.inlineformset:
return 'crudbuilder/inline/{}.html'.format(tname)
else:
return 'crudbuilder/instance/{}.html'.format(tname)
|
[
"def",
"get_template",
"(",
"self",
",",
"tname",
")",
":",
"if",
"self",
".",
"custom_templates",
"and",
"self",
".",
"custom_templates",
".",
"get",
"(",
"tname",
",",
"None",
")",
":",
"return",
"self",
".",
"custom_templates",
".",
"get",
"(",
"tname",
")",
"elif",
"self",
".",
"inlineformset",
":",
"return",
"'crudbuilder/inline/{}.html'",
".",
"format",
"(",
"tname",
")",
"else",
":",
"return",
"'crudbuilder/instance/{}.html'",
".",
"format",
"(",
"tname",
")"
] |
- Get custom template from CRUD class, if it is defined in it
- No custom template in CRUD class, then use the default template
|
[
"-",
"Get",
"custom",
"template",
"from",
"CRUD",
"class",
"if",
"it",
"is",
"defined",
"in",
"it",
"-",
"No",
"custom",
"template",
"in",
"CRUD",
"class",
"then",
"use",
"the",
"default",
"template"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/views.py#L66-L77
|
asifpy/django-crudbuilder
|
crudbuilder/views.py
|
ViewBuilder.generate_list_view
|
def generate_list_view(self):
"""Generate class based view for ListView"""
name = model_class_form(self.model + 'ListView')
list_args = dict(
model=self.get_model_class,
context_object_name=plural(self.model),
template_name=self.get_template('list'),
table_class=self.get_actual_table(),
context_table_name='table_objects',
crud=self.crud,
permissions=self.view_permission('list'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
table_pagination={'per_page': self.tables2_pagination or 10},
custom_queryset=self.custom_queryset,
custom_context=self.custom_context,
custom_postfix_url=self.custom_postfix_url
)
list_class = type(
name,
(BaseListViewMixin, SingleTableView),
list_args
)
self.classes[name] = list_class
return list_class
|
python
|
def generate_list_view(self):
name = model_class_form(self.model + 'ListView')
list_args = dict(
model=self.get_model_class,
context_object_name=plural(self.model),
template_name=self.get_template('list'),
table_class=self.get_actual_table(),
context_table_name='table_objects',
crud=self.crud,
permissions=self.view_permission('list'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
table_pagination={'per_page': self.tables2_pagination or 10},
custom_queryset=self.custom_queryset,
custom_context=self.custom_context,
custom_postfix_url=self.custom_postfix_url
)
list_class = type(
name,
(BaseListViewMixin, SingleTableView),
list_args
)
self.classes[name] = list_class
return list_class
|
[
"def",
"generate_list_view",
"(",
"self",
")",
":",
"name",
"=",
"model_class_form",
"(",
"self",
".",
"model",
"+",
"'ListView'",
")",
"list_args",
"=",
"dict",
"(",
"model",
"=",
"self",
".",
"get_model_class",
",",
"context_object_name",
"=",
"plural",
"(",
"self",
".",
"model",
")",
",",
"template_name",
"=",
"self",
".",
"get_template",
"(",
"'list'",
")",
",",
"table_class",
"=",
"self",
".",
"get_actual_table",
"(",
")",
",",
"context_table_name",
"=",
"'table_objects'",
",",
"crud",
"=",
"self",
".",
"crud",
",",
"permissions",
"=",
"self",
".",
"view_permission",
"(",
"'list'",
")",
",",
"permission_required",
"=",
"self",
".",
"check_permission_required",
",",
"login_required",
"=",
"self",
".",
"check_login_required",
",",
"table_pagination",
"=",
"{",
"'per_page'",
":",
"self",
".",
"tables2_pagination",
"or",
"10",
"}",
",",
"custom_queryset",
"=",
"self",
".",
"custom_queryset",
",",
"custom_context",
"=",
"self",
".",
"custom_context",
",",
"custom_postfix_url",
"=",
"self",
".",
"custom_postfix_url",
")",
"list_class",
"=",
"type",
"(",
"name",
",",
"(",
"BaseListViewMixin",
",",
"SingleTableView",
")",
",",
"list_args",
")",
"self",
".",
"classes",
"[",
"name",
"]",
"=",
"list_class",
"return",
"list_class"
] |
Generate class based view for ListView
|
[
"Generate",
"class",
"based",
"view",
"for",
"ListView"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/views.py#L85-L111
|
asifpy/django-crudbuilder
|
crudbuilder/views.py
|
ViewBuilder.generate_create_view
|
def generate_create_view(self):
"""Generate class based view for CreateView"""
name = model_class_form(self.model + 'CreateView')
create_args = dict(
form_class=self.get_actual_form('create'),
model=self.get_model_class,
template_name=self.get_template('create'),
permissions=self.view_permission('create'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
inlineformset=self.inlineformset,
success_url=reverse_lazy('{}-{}-list'.format(self.app, self.custom_postfix_url)),
custom_form=self.createupdate_forms or self.custom_modelform,
custom_postfix_url=self.custom_postfix_url
)
parent_classes = [self.get_createupdate_mixin(), CreateView]
if self.custom_create_view_mixin:
parent_classes.insert(0, self.custom_create_view_mixin)
create_class = type(
name,
tuple(parent_classes),
create_args
)
self.classes[name] = create_class
return create_class
|
python
|
def generate_create_view(self):
name = model_class_form(self.model + 'CreateView')
create_args = dict(
form_class=self.get_actual_form('create'),
model=self.get_model_class,
template_name=self.get_template('create'),
permissions=self.view_permission('create'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
inlineformset=self.inlineformset,
success_url=reverse_lazy('{}-{}-list'.format(self.app, self.custom_postfix_url)),
custom_form=self.createupdate_forms or self.custom_modelform,
custom_postfix_url=self.custom_postfix_url
)
parent_classes = [self.get_createupdate_mixin(), CreateView]
if self.custom_create_view_mixin:
parent_classes.insert(0, self.custom_create_view_mixin)
create_class = type(
name,
tuple(parent_classes),
create_args
)
self.classes[name] = create_class
return create_class
|
[
"def",
"generate_create_view",
"(",
"self",
")",
":",
"name",
"=",
"model_class_form",
"(",
"self",
".",
"model",
"+",
"'CreateView'",
")",
"create_args",
"=",
"dict",
"(",
"form_class",
"=",
"self",
".",
"get_actual_form",
"(",
"'create'",
")",
",",
"model",
"=",
"self",
".",
"get_model_class",
",",
"template_name",
"=",
"self",
".",
"get_template",
"(",
"'create'",
")",
",",
"permissions",
"=",
"self",
".",
"view_permission",
"(",
"'create'",
")",
",",
"permission_required",
"=",
"self",
".",
"check_permission_required",
",",
"login_required",
"=",
"self",
".",
"check_login_required",
",",
"inlineformset",
"=",
"self",
".",
"inlineformset",
",",
"success_url",
"=",
"reverse_lazy",
"(",
"'{}-{}-list'",
".",
"format",
"(",
"self",
".",
"app",
",",
"self",
".",
"custom_postfix_url",
")",
")",
",",
"custom_form",
"=",
"self",
".",
"createupdate_forms",
"or",
"self",
".",
"custom_modelform",
",",
"custom_postfix_url",
"=",
"self",
".",
"custom_postfix_url",
")",
"parent_classes",
"=",
"[",
"self",
".",
"get_createupdate_mixin",
"(",
")",
",",
"CreateView",
"]",
"if",
"self",
".",
"custom_create_view_mixin",
":",
"parent_classes",
".",
"insert",
"(",
"0",
",",
"self",
".",
"custom_create_view_mixin",
")",
"create_class",
"=",
"type",
"(",
"name",
",",
"tuple",
"(",
"parent_classes",
")",
",",
"create_args",
")",
"self",
".",
"classes",
"[",
"name",
"]",
"=",
"create_class",
"return",
"create_class"
] |
Generate class based view for CreateView
|
[
"Generate",
"class",
"based",
"view",
"for",
"CreateView"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/views.py#L113-L141
|
asifpy/django-crudbuilder
|
crudbuilder/views.py
|
ViewBuilder.generate_detail_view
|
def generate_detail_view(self):
"""Generate class based view for DetailView"""
name = model_class_form(self.model + 'DetailView')
detail_args = dict(
detailview_excludes=self.detailview_excludes,
model=self.get_model_class,
template_name=self.get_template('detail'),
login_required=self.check_login_required,
permissions=self.view_permission('detail'),
inlineformset=self.inlineformset,
permission_required=self.check_permission_required,
custom_postfix_url=self.custom_postfix_url
)
detail_class = type(name, (BaseDetailViewMixin, DetailView), detail_args)
self.classes[name] = detail_class
return detail_class
|
python
|
def generate_detail_view(self):
name = model_class_form(self.model + 'DetailView')
detail_args = dict(
detailview_excludes=self.detailview_excludes,
model=self.get_model_class,
template_name=self.get_template('detail'),
login_required=self.check_login_required,
permissions=self.view_permission('detail'),
inlineformset=self.inlineformset,
permission_required=self.check_permission_required,
custom_postfix_url=self.custom_postfix_url
)
detail_class = type(name, (BaseDetailViewMixin, DetailView), detail_args)
self.classes[name] = detail_class
return detail_class
|
[
"def",
"generate_detail_view",
"(",
"self",
")",
":",
"name",
"=",
"model_class_form",
"(",
"self",
".",
"model",
"+",
"'DetailView'",
")",
"detail_args",
"=",
"dict",
"(",
"detailview_excludes",
"=",
"self",
".",
"detailview_excludes",
",",
"model",
"=",
"self",
".",
"get_model_class",
",",
"template_name",
"=",
"self",
".",
"get_template",
"(",
"'detail'",
")",
",",
"login_required",
"=",
"self",
".",
"check_login_required",
",",
"permissions",
"=",
"self",
".",
"view_permission",
"(",
"'detail'",
")",
",",
"inlineformset",
"=",
"self",
".",
"inlineformset",
",",
"permission_required",
"=",
"self",
".",
"check_permission_required",
",",
"custom_postfix_url",
"=",
"self",
".",
"custom_postfix_url",
")",
"detail_class",
"=",
"type",
"(",
"name",
",",
"(",
"BaseDetailViewMixin",
",",
"DetailView",
")",
",",
"detail_args",
")",
"self",
".",
"classes",
"[",
"name",
"]",
"=",
"detail_class",
"return",
"detail_class"
] |
Generate class based view for DetailView
|
[
"Generate",
"class",
"based",
"view",
"for",
"DetailView"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/views.py#L143-L160
|
asifpy/django-crudbuilder
|
crudbuilder/views.py
|
ViewBuilder.generate_update_view
|
def generate_update_view(self):
"""Generate class based view for UpdateView"""
name = model_class_form(self.model + 'UpdateView')
update_args = dict(
form_class=self.get_actual_form('update'),
model=self.get_model_class,
template_name=self.get_template('update'),
permissions=self.view_permission('update'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
inlineformset=self.inlineformset,
custom_form=self.createupdate_forms or self.custom_modelform,
success_url=reverse_lazy('{}-{}-list'.format(self.app, self.custom_postfix_url)),
custom_postfix_url=self.custom_postfix_url
)
parent_classes = [self.get_createupdate_mixin(), UpdateView]
if self.custom_update_view_mixin:
parent_classes.insert(0, self.custom_update_view_mixin)
update_class = type(
name,
tuple(parent_classes),
update_args
)
self.classes[name] = update_class
return update_class
|
python
|
def generate_update_view(self):
name = model_class_form(self.model + 'UpdateView')
update_args = dict(
form_class=self.get_actual_form('update'),
model=self.get_model_class,
template_name=self.get_template('update'),
permissions=self.view_permission('update'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
inlineformset=self.inlineformset,
custom_form=self.createupdate_forms or self.custom_modelform,
success_url=reverse_lazy('{}-{}-list'.format(self.app, self.custom_postfix_url)),
custom_postfix_url=self.custom_postfix_url
)
parent_classes = [self.get_createupdate_mixin(), UpdateView]
if self.custom_update_view_mixin:
parent_classes.insert(0, self.custom_update_view_mixin)
update_class = type(
name,
tuple(parent_classes),
update_args
)
self.classes[name] = update_class
return update_class
|
[
"def",
"generate_update_view",
"(",
"self",
")",
":",
"name",
"=",
"model_class_form",
"(",
"self",
".",
"model",
"+",
"'UpdateView'",
")",
"update_args",
"=",
"dict",
"(",
"form_class",
"=",
"self",
".",
"get_actual_form",
"(",
"'update'",
")",
",",
"model",
"=",
"self",
".",
"get_model_class",
",",
"template_name",
"=",
"self",
".",
"get_template",
"(",
"'update'",
")",
",",
"permissions",
"=",
"self",
".",
"view_permission",
"(",
"'update'",
")",
",",
"permission_required",
"=",
"self",
".",
"check_permission_required",
",",
"login_required",
"=",
"self",
".",
"check_login_required",
",",
"inlineformset",
"=",
"self",
".",
"inlineformset",
",",
"custom_form",
"=",
"self",
".",
"createupdate_forms",
"or",
"self",
".",
"custom_modelform",
",",
"success_url",
"=",
"reverse_lazy",
"(",
"'{}-{}-list'",
".",
"format",
"(",
"self",
".",
"app",
",",
"self",
".",
"custom_postfix_url",
")",
")",
",",
"custom_postfix_url",
"=",
"self",
".",
"custom_postfix_url",
")",
"parent_classes",
"=",
"[",
"self",
".",
"get_createupdate_mixin",
"(",
")",
",",
"UpdateView",
"]",
"if",
"self",
".",
"custom_update_view_mixin",
":",
"parent_classes",
".",
"insert",
"(",
"0",
",",
"self",
".",
"custom_update_view_mixin",
")",
"update_class",
"=",
"type",
"(",
"name",
",",
"tuple",
"(",
"parent_classes",
")",
",",
"update_args",
")",
"self",
".",
"classes",
"[",
"name",
"]",
"=",
"update_class",
"return",
"update_class"
] |
Generate class based view for UpdateView
|
[
"Generate",
"class",
"based",
"view",
"for",
"UpdateView"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/views.py#L162-L189
|
asifpy/django-crudbuilder
|
crudbuilder/views.py
|
ViewBuilder.generate_delete_view
|
def generate_delete_view(self):
"""Generate class based view for DeleteView"""
name = model_class_form(self.model + 'DeleteView')
delete_args = dict(
model=self.get_model_class,
template_name=self.get_template('delete'),
permissions=self.view_permission('delete'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
success_url=reverse_lazy('{}-{}-list'.format(self.app, self.custom_postfix_url)),
custom_postfix_url=self.custom_postfix_url
)
delete_class = type(name, (CrudBuilderMixin, DeleteView), delete_args)
self.classes[name] = delete_class
return delete_class
|
python
|
def generate_delete_view(self):
name = model_class_form(self.model + 'DeleteView')
delete_args = dict(
model=self.get_model_class,
template_name=self.get_template('delete'),
permissions=self.view_permission('delete'),
permission_required=self.check_permission_required,
login_required=self.check_login_required,
success_url=reverse_lazy('{}-{}-list'.format(self.app, self.custom_postfix_url)),
custom_postfix_url=self.custom_postfix_url
)
delete_class = type(name, (CrudBuilderMixin, DeleteView), delete_args)
self.classes[name] = delete_class
return delete_class
|
[
"def",
"generate_delete_view",
"(",
"self",
")",
":",
"name",
"=",
"model_class_form",
"(",
"self",
".",
"model",
"+",
"'DeleteView'",
")",
"delete_args",
"=",
"dict",
"(",
"model",
"=",
"self",
".",
"get_model_class",
",",
"template_name",
"=",
"self",
".",
"get_template",
"(",
"'delete'",
")",
",",
"permissions",
"=",
"self",
".",
"view_permission",
"(",
"'delete'",
")",
",",
"permission_required",
"=",
"self",
".",
"check_permission_required",
",",
"login_required",
"=",
"self",
".",
"check_login_required",
",",
"success_url",
"=",
"reverse_lazy",
"(",
"'{}-{}-list'",
".",
"format",
"(",
"self",
".",
"app",
",",
"self",
".",
"custom_postfix_url",
")",
")",
",",
"custom_postfix_url",
"=",
"self",
".",
"custom_postfix_url",
")",
"delete_class",
"=",
"type",
"(",
"name",
",",
"(",
"CrudBuilderMixin",
",",
"DeleteView",
")",
",",
"delete_args",
")",
"self",
".",
"classes",
"[",
"name",
"]",
"=",
"delete_class",
"return",
"delete_class"
] |
Generate class based view for DeleteView
|
[
"Generate",
"class",
"based",
"view",
"for",
"DeleteView"
] |
train
|
https://github.com/asifpy/django-crudbuilder/blob/9de1c6fa555086673dd7ccc351d4b771c6192489/crudbuilder/views.py#L191-L207
|
contentful/contentful.py
|
contentful/entry.py
|
Entry.incoming_references
|
def incoming_references(self, client=None, query={}):
"""Fetches all entries referencing the entry
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/search-parameters/links-to-asset
:param client Client instance
:param query: (optional) Dict with API options.
:return: List of :class:`Entry <contentful.entry.Entry>` objects.
:rtype: List of contentful.entry.Entry
Usage:
>>> entries = entry.incoming_references(client)
[<Entry[cat] id='happycat'>]
"""
if client is None:
return False
query.update({'links_to_entry': self.id})
return client.entries(query)
|
python
|
def incoming_references(self, client=None, query={}):
if client is None:
return False
query.update({'links_to_entry': self.id})
return client.entries(query)
|
[
"def",
"incoming_references",
"(",
"self",
",",
"client",
"=",
"None",
",",
"query",
"=",
"{",
"}",
")",
":",
"if",
"client",
"is",
"None",
":",
"return",
"False",
"query",
".",
"update",
"(",
"{",
"'links_to_entry'",
":",
"self",
".",
"id",
"}",
")",
"return",
"client",
".",
"entries",
"(",
"query",
")"
] |
Fetches all entries referencing the entry
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/search-parameters/links-to-asset
:param client Client instance
:param query: (optional) Dict with API options.
:return: List of :class:`Entry <contentful.entry.Entry>` objects.
:rtype: List of contentful.entry.Entry
Usage:
>>> entries = entry.incoming_references(client)
[<Entry[cat] id='happycat'>]
|
[
"Fetches",
"all",
"entries",
"referencing",
"the",
"entry"
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/entry.py#L118-L137
|
contentful/contentful.py
|
contentful/resource_builder.py
|
ResourceBuilder.build
|
def build(self):
"""Creates the objects from the JSON response"""
if self.json['sys']['type'] == 'Array':
if any(k in self.json for k in ['nextSyncUrl', 'nextPageUrl']):
return SyncPage(
self.json,
default_locale=self.default_locale,
localized=True
)
return self._build_array()
return self._build_single()
|
python
|
def build(self):
if self.json['sys']['type'] == 'Array':
if any(k in self.json for k in ['nextSyncUrl', 'nextPageUrl']):
return SyncPage(
self.json,
default_locale=self.default_locale,
localized=True
)
return self._build_array()
return self._build_single()
|
[
"def",
"build",
"(",
"self",
")",
":",
"if",
"self",
".",
"json",
"[",
"'sys'",
"]",
"[",
"'type'",
"]",
"==",
"'Array'",
":",
"if",
"any",
"(",
"k",
"in",
"self",
".",
"json",
"for",
"k",
"in",
"[",
"'nextSyncUrl'",
",",
"'nextPageUrl'",
"]",
")",
":",
"return",
"SyncPage",
"(",
"self",
".",
"json",
",",
"default_locale",
"=",
"self",
".",
"default_locale",
",",
"localized",
"=",
"True",
")",
"return",
"self",
".",
"_build_array",
"(",
")",
"return",
"self",
".",
"_build_single",
"(",
")"
] |
Creates the objects from the JSON response
|
[
"Creates",
"the",
"objects",
"from",
"the",
"JSON",
"response"
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/resource_builder.py#L51-L62
|
contentful/contentful.py
|
contentful/content_type_cache.py
|
ContentTypeCache.get
|
def get(cls, content_type_id):
"""
Fetches a Content Type from the Cache.
"""
for content_type in cls.__CACHE__:
if content_type.sys.get('id') == content_type_id:
return content_type
return None
|
python
|
def get(cls, content_type_id):
for content_type in cls.__CACHE__:
if content_type.sys.get('id') == content_type_id:
return content_type
return None
|
[
"def",
"get",
"(",
"cls",
",",
"content_type_id",
")",
":",
"for",
"content_type",
"in",
"cls",
".",
"__CACHE__",
":",
"if",
"content_type",
".",
"sys",
".",
"get",
"(",
"'id'",
")",
"==",
"content_type_id",
":",
"return",
"content_type",
"return",
"None"
] |
Fetches a Content Type from the Cache.
|
[
"Fetches",
"a",
"Content",
"Type",
"from",
"the",
"Cache",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/content_type_cache.py#L22-L30
|
contentful/contentful.py
|
contentful/errors.py
|
get_error
|
def get_error(response):
"""Gets Error by HTTP Status Code"""
errors = {
400: BadRequestError,
401: UnauthorizedError,
403: AccessDeniedError,
404: NotFoundError,
429: RateLimitExceededError,
500: ServerError,
502: BadGatewayError,
503: ServiceUnavailableError
}
error_class = HTTPError
if response.status_code in errors:
error_class = errors[response.status_code]
return error_class(response)
|
python
|
def get_error(response):
errors = {
400: BadRequestError,
401: UnauthorizedError,
403: AccessDeniedError,
404: NotFoundError,
429: RateLimitExceededError,
500: ServerError,
502: BadGatewayError,
503: ServiceUnavailableError
}
error_class = HTTPError
if response.status_code in errors:
error_class = errors[response.status_code]
return error_class(response)
|
[
"def",
"get_error",
"(",
"response",
")",
":",
"errors",
"=",
"{",
"400",
":",
"BadRequestError",
",",
"401",
":",
"UnauthorizedError",
",",
"403",
":",
"AccessDeniedError",
",",
"404",
":",
"NotFoundError",
",",
"429",
":",
"RateLimitExceededError",
",",
"500",
":",
"ServerError",
",",
"502",
":",
"BadGatewayError",
",",
"503",
":",
"ServiceUnavailableError",
"}",
"error_class",
"=",
"HTTPError",
"if",
"response",
".",
"status_code",
"in",
"errors",
":",
"error_class",
"=",
"errors",
"[",
"response",
".",
"status_code",
"]",
"return",
"error_class",
"(",
"response",
")"
] |
Gets Error by HTTP Status Code
|
[
"Gets",
"Error",
"by",
"HTTP",
"Status",
"Code"
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/errors.py#L203-L221
|
contentful/contentful.py
|
contentful/content_type.py
|
ContentType.field_for
|
def field_for(self, field_id):
"""Fetches the field for the given Field ID.
:param field_id: ID for Field to fetch.
:return: :class:`ContentTypeField <ContentTypeField>` object.
:rtype: contentful.ContentTypeField
"""
for field in self.fields:
if field.id == field_id:
return field
return None
|
python
|
def field_for(self, field_id):
for field in self.fields:
if field.id == field_id:
return field
return None
|
[
"def",
"field_for",
"(",
"self",
",",
"field_id",
")",
":",
"for",
"field",
"in",
"self",
".",
"fields",
":",
"if",
"field",
".",
"id",
"==",
"field_id",
":",
"return",
"field",
"return",
"None"
] |
Fetches the field for the given Field ID.
:param field_id: ID for Field to fetch.
:return: :class:`ContentTypeField <ContentTypeField>` object.
:rtype: contentful.ContentTypeField
|
[
"Fetches",
"the",
"field",
"for",
"the",
"given",
"Field",
"ID",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/content_type.py#L31-L42
|
contentful/contentful.py
|
contentful/content_type_field_types.py
|
LocationField.coerce
|
def coerce(self, value, **kwargs):
"""Coerces value to Location object"""
Location = namedtuple('Location', ['lat', 'lon'])
return Location(float(value.get('lat')), float(value.get('lon')))
|
python
|
def coerce(self, value, **kwargs):
Location = namedtuple('Location', ['lat', 'lon'])
return Location(float(value.get('lat')), float(value.get('lon')))
|
[
"def",
"coerce",
"(",
"self",
",",
"value",
",",
"*",
"*",
"kwargs",
")",
":",
"Location",
"=",
"namedtuple",
"(",
"'Location'",
",",
"[",
"'lat'",
",",
"'lon'",
"]",
")",
"return",
"Location",
"(",
"float",
"(",
"value",
".",
"get",
"(",
"'lat'",
")",
")",
",",
"float",
"(",
"value",
".",
"get",
"(",
"'lon'",
")",
")",
")"
] |
Coerces value to Location object
|
[
"Coerces",
"value",
"to",
"Location",
"object"
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/content_type_field_types.py#L96-L100
|
contentful/contentful.py
|
contentful/content_type_field_types.py
|
ArrayField.coerce
|
def coerce(self, value, **kwargs):
"""Coerces array items with proper coercion."""
result = []
for v in value:
result.append(self._coercion.coerce(v, **kwargs))
return result
|
python
|
def coerce(self, value, **kwargs):
result = []
for v in value:
result.append(self._coercion.coerce(v, **kwargs))
return result
|
[
"def",
"coerce",
"(",
"self",
",",
"value",
",",
"*",
"*",
"kwargs",
")",
":",
"result",
"=",
"[",
"]",
"for",
"v",
"in",
"value",
":",
"result",
".",
"append",
"(",
"self",
".",
"_coercion",
".",
"coerce",
"(",
"v",
",",
"*",
"*",
"kwargs",
")",
")",
"return",
"result"
] |
Coerces array items with proper coercion.
|
[
"Coerces",
"array",
"items",
"with",
"proper",
"coercion",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/content_type_field_types.py#L124-L130
|
contentful/contentful.py
|
contentful/content_type_field_types.py
|
RichTextField.coerce
|
def coerce(self, value, includes=None, errors=None, resources=None, default_locale='en-US', locale=None):
"""Coerces Rich Text properly."""
if includes is None:
includes = []
if errors is None:
errors = []
return self._coerce_block(
value,
includes=includes,
errors=errors,
resources=resources,
default_locale=default_locale,
locale=locale
)
|
python
|
def coerce(self, value, includes=None, errors=None, resources=None, default_locale='en-US', locale=None):
if includes is None:
includes = []
if errors is None:
errors = []
return self._coerce_block(
value,
includes=includes,
errors=errors,
resources=resources,
default_locale=default_locale,
locale=locale
)
|
[
"def",
"coerce",
"(",
"self",
",",
"value",
",",
"includes",
"=",
"None",
",",
"errors",
"=",
"None",
",",
"resources",
"=",
"None",
",",
"default_locale",
"=",
"'en-US'",
",",
"locale",
"=",
"None",
")",
":",
"if",
"includes",
"is",
"None",
":",
"includes",
"=",
"[",
"]",
"if",
"errors",
"is",
"None",
":",
"errors",
"=",
"[",
"]",
"return",
"self",
".",
"_coerce_block",
"(",
"value",
",",
"includes",
"=",
"includes",
",",
"errors",
"=",
"errors",
",",
"resources",
"=",
"resources",
",",
"default_locale",
"=",
"default_locale",
",",
"locale",
"=",
"locale",
")"
] |
Coerces Rich Text properly.
|
[
"Coerces",
"Rich",
"Text",
"properly",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/content_type_field_types.py#L225-L240
|
contentful/contentful.py
|
contentful/content_type_field.py
|
ContentTypeField.coerce
|
def coerce(self, value, **kwargs):
"""Coerces the value to the proper type."""
if value is None:
return None
return self._coercion.coerce(value, **kwargs)
|
python
|
def coerce(self, value, **kwargs):
if value is None:
return None
return self._coercion.coerce(value, **kwargs)
|
[
"def",
"coerce",
"(",
"self",
",",
"value",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"value",
"is",
"None",
":",
"return",
"None",
"return",
"self",
".",
"_coercion",
".",
"coerce",
"(",
"value",
",",
"*",
"*",
"kwargs",
")"
] |
Coerces the value to the proper type.
|
[
"Coerces",
"the",
"value",
"to",
"the",
"proper",
"type",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/content_type_field.py#L34-L39
|
contentful/contentful.py
|
contentful/client.py
|
Client.content_type
|
def content_type(self, content_type_id, query=None):
"""Fetches a Content Type by ID.
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/content-types/content-type/get-a-single-content-type
:param content_type_id: The ID of the target Content Type.
:param query: (optional) Dict with API options.
:return: :class:`ContentType <contentful.content_type.ContentType>` object.
:rtype: contentful.content_type.ContentType
Usage:
>>> cat_content_type = client.content_type('cat')
<ContentType[Cat] id='cat'>
"""
return self._get(
self.environment_url(
'/content_types/{0}'.format(content_type_id)
),
query
)
|
python
|
def content_type(self, content_type_id, query=None):
return self._get(
self.environment_url(
'/content_types/{0}'.format(content_type_id)
),
query
)
|
[
"def",
"content_type",
"(",
"self",
",",
"content_type_id",
",",
"query",
"=",
"None",
")",
":",
"return",
"self",
".",
"_get",
"(",
"self",
".",
"environment_url",
"(",
"'/content_types/{0}'",
".",
"format",
"(",
"content_type_id",
")",
")",
",",
"query",
")"
] |
Fetches a Content Type by ID.
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/content-types/content-type/get-a-single-content-type
:param content_type_id: The ID of the target Content Type.
:param query: (optional) Dict with API options.
:return: :class:`ContentType <contentful.content_type.ContentType>` object.
:rtype: contentful.content_type.ContentType
Usage:
>>> cat_content_type = client.content_type('cat')
<ContentType[Cat] id='cat'>
|
[
"Fetches",
"a",
"Content",
"Type",
"by",
"ID",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L153-L173
|
contentful/contentful.py
|
contentful/client.py
|
Client.entry
|
def entry(self, entry_id, query=None):
"""Fetches an Entry by ID.
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/entries/entry/get-a-single-entry
:param entry_id: The ID of the target Entry.
:param query: (optional) Dict with API options.
:return: :class:`Entry <contentful.entry.Entry>` object.
:rtype: contentful.entry.Entry
Usage:
>>> nyancat_entry = client.entry('nyancat')
<Entry[cat] id='nyancat'>
"""
if query is None:
query = {}
self._normalize_select(query)
try:
query.update({'sys.id': entry_id})
return self._get(
self.environment_url('/entries'),
query
)[0]
except IndexError:
raise EntryNotFoundError(
"Entry not found for ID: '{0}'".format(entry_id)
)
|
python
|
def entry(self, entry_id, query=None):
if query is None:
query = {}
self._normalize_select(query)
try:
query.update({'sys.id': entry_id})
return self._get(
self.environment_url('/entries'),
query
)[0]
except IndexError:
raise EntryNotFoundError(
"Entry not found for ID: '{0}'".format(entry_id)
)
|
[
"def",
"entry",
"(",
"self",
",",
"entry_id",
",",
"query",
"=",
"None",
")",
":",
"if",
"query",
"is",
"None",
":",
"query",
"=",
"{",
"}",
"self",
".",
"_normalize_select",
"(",
"query",
")",
"try",
":",
"query",
".",
"update",
"(",
"{",
"'sys.id'",
":",
"entry_id",
"}",
")",
"return",
"self",
".",
"_get",
"(",
"self",
".",
"environment_url",
"(",
"'/entries'",
")",
",",
"query",
")",
"[",
"0",
"]",
"except",
"IndexError",
":",
"raise",
"EntryNotFoundError",
"(",
"\"Entry not found for ID: '{0}'\"",
".",
"format",
"(",
"entry_id",
")",
")"
] |
Fetches an Entry by ID.
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/entries/entry/get-a-single-entry
:param entry_id: The ID of the target Entry.
:param query: (optional) Dict with API options.
:return: :class:`Entry <contentful.entry.Entry>` object.
:rtype: contentful.entry.Entry
Usage:
>>> nyancat_entry = client.entry('nyancat')
<Entry[cat] id='nyancat'>
|
[
"Fetches",
"an",
"Entry",
"by",
"ID",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L197-L225
|
contentful/contentful.py
|
contentful/client.py
|
Client.entries
|
def entries(self, query=None):
"""Fetches all Entries from the Space (up to the set limit, can be modified in `query`).
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/entries/entries-collection/get-all-entries-of-a-space
:param query: (optional) Dict with API options.
:return: List of :class:`Entry <contentful.entry.Entry>` objects.
:rtype: List of contentful.entry.Entry
Usage:
>>> entries = client.entries()
[<Entry[cat] id='happycat'>,
<Entry[1t9IbcfdCk6m04uISSsaIK] id='5ETMRzkl9KM4omyMwKAOki'>,
<Entry[dog] id='6KntaYXaHSyIw8M6eo26OK'>,
<Entry[1t9IbcfdCk6m04uISSsaIK] id='7qVBlCjpWE86Oseo40gAEY'>,
<Entry[cat] id='garfield'>,
<Entry[1t9IbcfdCk6m04uISSsaIK] id='4MU1s3potiUEM2G4okYOqw'>,
<Entry[cat] id='nyancat'>,
<Entry[1t9IbcfdCk6m04uISSsaIK] id='ge1xHyH3QOWucKWCCAgIG'>,
<Entry[human] id='finn'>,
<Entry[dog] id='jake'>]
"""
if query is None:
query = {}
self._normalize_select(query)
return self._get(
self.environment_url('/entries'),
query
)
|
python
|
def entries(self, query=None):
if query is None:
query = {}
self._normalize_select(query)
return self._get(
self.environment_url('/entries'),
query
)
|
[
"def",
"entries",
"(",
"self",
",",
"query",
"=",
"None",
")",
":",
"if",
"query",
"is",
"None",
":",
"query",
"=",
"{",
"}",
"self",
".",
"_normalize_select",
"(",
"query",
")",
"return",
"self",
".",
"_get",
"(",
"self",
".",
"environment_url",
"(",
"'/entries'",
")",
",",
"query",
")"
] |
Fetches all Entries from the Space (up to the set limit, can be modified in `query`).
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/entries/entries-collection/get-all-entries-of-a-space
:param query: (optional) Dict with API options.
:return: List of :class:`Entry <contentful.entry.Entry>` objects.
:rtype: List of contentful.entry.Entry
Usage:
>>> entries = client.entries()
[<Entry[cat] id='happycat'>,
<Entry[1t9IbcfdCk6m04uISSsaIK] id='5ETMRzkl9KM4omyMwKAOki'>,
<Entry[dog] id='6KntaYXaHSyIw8M6eo26OK'>,
<Entry[1t9IbcfdCk6m04uISSsaIK] id='7qVBlCjpWE86Oseo40gAEY'>,
<Entry[cat] id='garfield'>,
<Entry[1t9IbcfdCk6m04uISSsaIK] id='4MU1s3potiUEM2G4okYOqw'>,
<Entry[cat] id='nyancat'>,
<Entry[1t9IbcfdCk6m04uISSsaIK] id='ge1xHyH3QOWucKWCCAgIG'>,
<Entry[human] id='finn'>,
<Entry[dog] id='jake'>]
|
[
"Fetches",
"all",
"Entries",
"from",
"the",
"Space",
"(",
"up",
"to",
"the",
"set",
"limit",
"can",
"be",
"modified",
"in",
"query",
")",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L227-L257
|
contentful/contentful.py
|
contentful/client.py
|
Client.asset
|
def asset(self, asset_id, query=None):
"""Fetches an Asset by ID.
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/asset/get-a-single-asset
:param asset_id: The ID of the target Asset.
:param query: (optional) Dict with API options.
:return: :class:`Asset <Asset>` object.
:rtype: contentful.asset.Asset
Usage:
>>> nyancat_asset = client.asset('nyancat')
<Asset id='nyancat' url='//images.contentful.com/cfex...'>
"""
return self._get(
self.environment_url(
'/assets/{0}'.format(asset_id)
),
query
)
|
python
|
def asset(self, asset_id, query=None):
return self._get(
self.environment_url(
'/assets/{0}'.format(asset_id)
),
query
)
|
[
"def",
"asset",
"(",
"self",
",",
"asset_id",
",",
"query",
"=",
"None",
")",
":",
"return",
"self",
".",
"_get",
"(",
"self",
".",
"environment_url",
"(",
"'/assets/{0}'",
".",
"format",
"(",
"asset_id",
")",
")",
",",
"query",
")"
] |
Fetches an Asset by ID.
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/asset/get-a-single-asset
:param asset_id: The ID of the target Asset.
:param query: (optional) Dict with API options.
:return: :class:`Asset <Asset>` object.
:rtype: contentful.asset.Asset
Usage:
>>> nyancat_asset = client.asset('nyancat')
<Asset id='nyancat' url='//images.contentful.com/cfex...'>
|
[
"Fetches",
"an",
"Asset",
"by",
"ID",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L259-L279
|
contentful/contentful.py
|
contentful/client.py
|
Client.assets
|
def assets(self, query=None):
"""Fetches all Assets from the Space (up to the set limit, can be modified in `query`).
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space
:param query: (optional) Dict with API options.
:return: List of :class:`Asset <contentful.asset.Asset>` objects.
:rtype: List of contentful.asset.Asset
Usage:
>>> assets = client.assets()
[<Asset id='1x0xpXu4pSGS4OukSyWGUK' url='//images.content...'>,
<Asset id='happycat' url='//images.contentful.com/cfexam...'>,
<Asset id='nyancat' url='//images.contentful.com/cfexamp...'>,
<Asset id='jake' url='//images.contentful.com/cfexamplea...'>]
"""
if query is None:
query = {}
self._normalize_select(query)
return self._get(
self.environment_url('/assets'),
query
)
|
python
|
def assets(self, query=None):
if query is None:
query = {}
self._normalize_select(query)
return self._get(
self.environment_url('/assets'),
query
)
|
[
"def",
"assets",
"(",
"self",
",",
"query",
"=",
"None",
")",
":",
"if",
"query",
"is",
"None",
":",
"query",
"=",
"{",
"}",
"self",
".",
"_normalize_select",
"(",
"query",
")",
"return",
"self",
".",
"_get",
"(",
"self",
".",
"environment_url",
"(",
"'/assets'",
")",
",",
"query",
")"
] |
Fetches all Assets from the Space (up to the set limit, can be modified in `query`).
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space
:param query: (optional) Dict with API options.
:return: List of :class:`Asset <contentful.asset.Asset>` objects.
:rtype: List of contentful.asset.Asset
Usage:
>>> assets = client.assets()
[<Asset id='1x0xpXu4pSGS4OukSyWGUK' url='//images.content...'>,
<Asset id='happycat' url='//images.contentful.com/cfexam...'>,
<Asset id='nyancat' url='//images.contentful.com/cfexamp...'>,
<Asset id='jake' url='//images.contentful.com/cfexamplea...'>]
|
[
"Fetches",
"all",
"Assets",
"from",
"the",
"Space",
"(",
"up",
"to",
"the",
"set",
"limit",
"can",
"be",
"modified",
"in",
"query",
")",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L281-L305
|
contentful/contentful.py
|
contentful/client.py
|
Client.locales
|
def locales(self, query=None):
"""Fetches all Locales from the Environment (up to the set limit, can be modified in `query`).
# TODO: fix url
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space
:param query: (optional) Dict with API options.
:return: List of :class:`Locale <contentful.locale.Locale>` objects.
:rtype: List of contentful.locale.Locale
Usage:
>>> locales = client.locales()
[<Locale[English (United States)] code='en-US' default=True fallback_code=None optional=False>]
"""
if query is None:
query = {}
return self._get(
self.environment_url('/locales'),
query
)
|
python
|
def locales(self, query=None):
if query is None:
query = {}
return self._get(
self.environment_url('/locales'),
query
)
|
[
"def",
"locales",
"(",
"self",
",",
"query",
"=",
"None",
")",
":",
"if",
"query",
"is",
"None",
":",
"query",
"=",
"{",
"}",
"return",
"self",
".",
"_get",
"(",
"self",
".",
"environment_url",
"(",
"'/locales'",
")",
",",
"query",
")"
] |
Fetches all Locales from the Environment (up to the set limit, can be modified in `query`).
# TODO: fix url
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space
:param query: (optional) Dict with API options.
:return: List of :class:`Locale <contentful.locale.Locale>` objects.
:rtype: List of contentful.locale.Locale
Usage:
>>> locales = client.locales()
[<Locale[English (United States)] code='en-US' default=True fallback_code=None optional=False>]
|
[
"Fetches",
"all",
"Locales",
"from",
"the",
"Environment",
"(",
"up",
"to",
"the",
"set",
"limit",
"can",
"be",
"modified",
"in",
"query",
")",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L307-L328
|
contentful/contentful.py
|
contentful/client.py
|
Client.sync
|
def sync(self, query=None):
"""Fetches content from the Sync API.
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/synchronization/initial-synchronization/query-entries
:param query: (optional) Dict with API options.
:return: :class:`SyncPage <contentful.sync_page.SyncPage>` object.
:rtype: contentful.sync_page.SyncPage
Usage:
>>> sync_page = client.sync({'initial': True})
<SyncPage next_sync_token='w5ZGw6JFwqZmVcKsE8Kow4grw45QdybC...'>
"""
if query is None:
query = {}
self._normalize_sync(query)
return self._get(
self.environment_url('/sync'),
query
)
|
python
|
def sync(self, query=None):
if query is None:
query = {}
self._normalize_sync(query)
return self._get(
self.environment_url('/sync'),
query
)
|
[
"def",
"sync",
"(",
"self",
",",
"query",
"=",
"None",
")",
":",
"if",
"query",
"is",
"None",
":",
"query",
"=",
"{",
"}",
"self",
".",
"_normalize_sync",
"(",
"query",
")",
"return",
"self",
".",
"_get",
"(",
"self",
".",
"environment_url",
"(",
"'/sync'",
")",
",",
"query",
")"
] |
Fetches content from the Sync API.
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/synchronization/initial-synchronization/query-entries
:param query: (optional) Dict with API options.
:return: :class:`SyncPage <contentful.sync_page.SyncPage>` object.
:rtype: contentful.sync_page.SyncPage
Usage:
>>> sync_page = client.sync({'initial': True})
<SyncPage next_sync_token='w5ZGw6JFwqZmVcKsE8Kow4grw45QdybC...'>
|
[
"Fetches",
"content",
"from",
"the",
"Sync",
"API",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L330-L351
|
contentful/contentful.py
|
contentful/client.py
|
Client._normalize_select
|
def _normalize_select(self, query):
"""
If the query contains the :select operator, we enforce :sys properties.
The SDK requires sys.type to function properly, but as other of our
SDKs require more parts of the :sys properties, we decided that every
SDK should include the complete :sys block to provide consistency
accross our SDKs.
"""
if 'select' not in query:
return
if isinstance(
query['select'],
string_class()):
query['select'] = [s.strip() for s in query['select'].split(',')]
query['select'] = [s for s
in query['select']
if not s.startswith('sys.')]
if 'sys' not in query['select']:
query['select'].append('sys')
|
python
|
def _normalize_select(self, query):
if 'select' not in query:
return
if isinstance(
query['select'],
string_class()):
query['select'] = [s.strip() for s in query['select'].split(',')]
query['select'] = [s for s
in query['select']
if not s.startswith('sys.')]
if 'sys' not in query['select']:
query['select'].append('sys')
|
[
"def",
"_normalize_select",
"(",
"self",
",",
"query",
")",
":",
"if",
"'select'",
"not",
"in",
"query",
":",
"return",
"if",
"isinstance",
"(",
"query",
"[",
"'select'",
"]",
",",
"string_class",
"(",
")",
")",
":",
"query",
"[",
"'select'",
"]",
"=",
"[",
"s",
".",
"strip",
"(",
")",
"for",
"s",
"in",
"query",
"[",
"'select'",
"]",
".",
"split",
"(",
"','",
")",
"]",
"query",
"[",
"'select'",
"]",
"=",
"[",
"s",
"for",
"s",
"in",
"query",
"[",
"'select'",
"]",
"if",
"not",
"s",
".",
"startswith",
"(",
"'sys.'",
")",
"]",
"if",
"'sys'",
"not",
"in",
"query",
"[",
"'select'",
"]",
":",
"query",
"[",
"'select'",
"]",
".",
"append",
"(",
"'sys'",
")"
] |
If the query contains the :select operator, we enforce :sys properties.
The SDK requires sys.type to function properly, but as other of our
SDKs require more parts of the :sys properties, we decided that every
SDK should include the complete :sys block to provide consistency
accross our SDKs.
|
[
"If",
"the",
"query",
"contains",
"the",
":",
"select",
"operator",
"we",
"enforce",
":",
"sys",
"properties",
".",
"The",
"SDK",
"requires",
"sys",
".",
"type",
"to",
"function",
"properly",
"but",
"as",
"other",
"of",
"our",
"SDKs",
"require",
"more",
"parts",
"of",
"the",
":",
"sys",
"properties",
"we",
"decided",
"that",
"every",
"SDK",
"should",
"include",
"the",
"complete",
":",
"sys",
"block",
"to",
"provide",
"consistency",
"accross",
"our",
"SDKs",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L361-L383
|
contentful/contentful.py
|
contentful/client.py
|
Client._request_headers
|
def _request_headers(self):
"""
Sets the default Request Headers.
"""
headers = {
'X-Contentful-User-Agent': self._contentful_user_agent(),
'Content-Type': 'application/vnd.contentful.delivery.v{0}+json'.format( # noqa: E501
self.api_version
)
}
if self.authorization_as_header:
headers['Authorization'] = 'Bearer {0}'.format(self.access_token)
headers['Accept-Encoding'] = 'gzip' if self.gzip_encoded else 'identity'
return headers
|
python
|
def _request_headers(self):
headers = {
'X-Contentful-User-Agent': self._contentful_user_agent(),
'Content-Type': 'application/vnd.contentful.delivery.v{0}+json'.format(
self.api_version
)
}
if self.authorization_as_header:
headers['Authorization'] = 'Bearer {0}'.format(self.access_token)
headers['Accept-Encoding'] = 'gzip' if self.gzip_encoded else 'identity'
return headers
|
[
"def",
"_request_headers",
"(",
"self",
")",
":",
"headers",
"=",
"{",
"'X-Contentful-User-Agent'",
":",
"self",
".",
"_contentful_user_agent",
"(",
")",
",",
"'Content-Type'",
":",
"'application/vnd.contentful.delivery.v{0}+json'",
".",
"format",
"(",
"# noqa: E501",
"self",
".",
"api_version",
")",
"}",
"if",
"self",
".",
"authorization_as_header",
":",
"headers",
"[",
"'Authorization'",
"]",
"=",
"'Bearer {0}'",
".",
"format",
"(",
"self",
".",
"access_token",
")",
"headers",
"[",
"'Accept-Encoding'",
"]",
"=",
"'gzip'",
"if",
"self",
".",
"gzip_encoded",
"else",
"'identity'",
"return",
"headers"
] |
Sets the default Request Headers.
|
[
"Sets",
"the",
"default",
"Request",
"Headers",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L476-L493
|
contentful/contentful.py
|
contentful/client.py
|
Client._url
|
def _url(self, url):
"""
Creates the Request URL.
"""
protocol = 'https' if self.https else 'http'
return '{0}://{1}/spaces/{2}{3}'.format(
protocol,
self.api_url,
self.space_id,
url
)
|
python
|
def _url(self, url):
protocol = 'https' if self.https else 'http'
return '{0}://{1}/spaces/{2}{3}'.format(
protocol,
self.api_url,
self.space_id,
url
)
|
[
"def",
"_url",
"(",
"self",
",",
"url",
")",
":",
"protocol",
"=",
"'https'",
"if",
"self",
".",
"https",
"else",
"'http'",
"return",
"'{0}://{1}/spaces/{2}{3}'",
".",
"format",
"(",
"protocol",
",",
"self",
".",
"api_url",
",",
"self",
".",
"space_id",
",",
"url",
")"
] |
Creates the Request URL.
|
[
"Creates",
"the",
"Request",
"URL",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L495-L506
|
contentful/contentful.py
|
contentful/client.py
|
Client._normalize_query
|
def _normalize_query(self, query):
"""
Converts Arrays in the query to comma
separaters lists for proper API handling.
"""
for k, v in query.items():
if isinstance(v, list):
query[k] = ','.join([str(e) for e in v])
|
python
|
def _normalize_query(self, query):
for k, v in query.items():
if isinstance(v, list):
query[k] = ','.join([str(e) for e in v])
|
[
"def",
"_normalize_query",
"(",
"self",
",",
"query",
")",
":",
"for",
"k",
",",
"v",
"in",
"query",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"v",
",",
"list",
")",
":",
"query",
"[",
"k",
"]",
"=",
"','",
".",
"join",
"(",
"[",
"str",
"(",
"e",
")",
"for",
"e",
"in",
"v",
"]",
")"
] |
Converts Arrays in the query to comma
separaters lists for proper API handling.
|
[
"Converts",
"Arrays",
"in",
"the",
"query",
"to",
"comma",
"separaters",
"lists",
"for",
"proper",
"API",
"handling",
"."
] |
train
|
https://github.com/contentful/contentful.py/blob/73fe01d6ae5a1f8818880da65199107b584681dd/contentful/client.py#L508-L516
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.