Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
now
(parser, token)
Display the date, formatted according to the given string. Use the same format as PHP's ``date()`` function; see https://php.net/date for all the possible values. Sample usage:: It is {% now "jS F Y H:i" %}
Display the date, formatted according to the given string.
def now(parser, token): """ Display the date, formatted according to the given string. Use the same format as PHP's ``date()`` function; see https://php.net/date for all the possible values. Sample usage:: It is {% now "jS F Y H:i" %} """ bits = token.split_contents() asvar = None if len(bits) == 4 and bits[-2] == 'as': asvar = bits[-1] bits = bits[:-2] if len(bits) != 2: raise TemplateSyntaxError("'now' statement takes one argument") format_string = bits[1][1:-1] return NowNode(format_string, asvar)
[ "def", "now", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "asvar", "=", "None", "if", "len", "(", "bits", ")", "==", "4", "and", "bits", "[", "-", "2", "]", "==", "'as'", ":", "asvar", "=", "bits", "[", "-", "1", "]", "bits", "=", "bits", "[", ":", "-", "2", "]", "if", "len", "(", "bits", ")", "!=", "2", ":", "raise", "TemplateSyntaxError", "(", "\"'now' statement takes one argument\"", ")", "format_string", "=", "bits", "[", "1", "]", "[", "1", ":", "-", "1", "]", "return", "NowNode", "(", "format_string", ",", "asvar", ")" ]
[ 1143, 0 ]
[ 1162, 40 ]
python
en
['en', 'error', 'th']
False
regroup
(parser, token)
Regroup a list of alike objects by a common attribute. This complex tag is best illustrated by use of an example: say that ``musicians`` is a list of ``Musician`` objects that have ``name`` and ``instrument`` attributes, and you'd like to display a list that looks like: * Guitar: * Django Reinhardt * Emily Remler * Piano: * Lovie Austin * Bud Powell * Trumpet: * Duke Ellington The following snippet of template code would accomplish this dubious task:: {% regroup musicians by instrument as grouped %} <ul> {% for group in grouped %} <li>{{ group.grouper }} <ul> {% for musician in group.list %} <li>{{ musician.name }}</li> {% endfor %} </ul> {% endfor %} </ul> As you can see, ``{% regroup %}`` populates a variable with a list of objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the item that was grouped by; ``list`` contains the list of objects that share that ``grouper``. In this case, ``grouper`` would be ``Guitar``, ``Piano`` and ``Trumpet``, and ``list`` is the list of musicians who play this instrument. Note that ``{% regroup %}`` does not work when the list to be grouped is not sorted by the key you are grouping by! This means that if your list of musicians was not sorted by instrument, you'd need to make sure it is sorted before using it, i.e.:: {% regroup musicians|dictsort:"instrument" by instrument as grouped %}
Regroup a list of alike objects by a common attribute.
def regroup(parser, token): """ Regroup a list of alike objects by a common attribute. This complex tag is best illustrated by use of an example: say that ``musicians`` is a list of ``Musician`` objects that have ``name`` and ``instrument`` attributes, and you'd like to display a list that looks like: * Guitar: * Django Reinhardt * Emily Remler * Piano: * Lovie Austin * Bud Powell * Trumpet: * Duke Ellington The following snippet of template code would accomplish this dubious task:: {% regroup musicians by instrument as grouped %} <ul> {% for group in grouped %} <li>{{ group.grouper }} <ul> {% for musician in group.list %} <li>{{ musician.name }}</li> {% endfor %} </ul> {% endfor %} </ul> As you can see, ``{% regroup %}`` populates a variable with a list of objects with ``grouper`` and ``list`` attributes. ``grouper`` contains the item that was grouped by; ``list`` contains the list of objects that share that ``grouper``. In this case, ``grouper`` would be ``Guitar``, ``Piano`` and ``Trumpet``, and ``list`` is the list of musicians who play this instrument. Note that ``{% regroup %}`` does not work when the list to be grouped is not sorted by the key you are grouping by! This means that if your list of musicians was not sorted by instrument, you'd need to make sure it is sorted before using it, i.e.:: {% regroup musicians|dictsort:"instrument" by instrument as grouped %} """ bits = token.split_contents() if len(bits) != 6: raise TemplateSyntaxError("'regroup' tag takes five arguments") target = parser.compile_filter(bits[1]) if bits[2] != 'by': raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'") if bits[4] != 'as': raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must" " be 'as'") var_name = bits[5] # RegroupNode will take each item in 'target', put it in the context under # 'var_name', evaluate 'var_name'.'expression' in the current context, and # group by the resulting value. After all items are processed, it will # save the final result in the context under 'var_name', thus clearing the # temporary values. This hack is necessary because the template engine # doesn't provide a context-aware equivalent of Python's getattr. expression = parser.compile_filter(var_name + VARIABLE_ATTRIBUTE_SEPARATOR + bits[3]) return RegroupNode(target, expression, var_name)
[ "def", "regroup", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "len", "(", "bits", ")", "!=", "6", ":", "raise", "TemplateSyntaxError", "(", "\"'regroup' tag takes five arguments\"", ")", "target", "=", "parser", ".", "compile_filter", "(", "bits", "[", "1", "]", ")", "if", "bits", "[", "2", "]", "!=", "'by'", ":", "raise", "TemplateSyntaxError", "(", "\"second argument to 'regroup' tag must be 'by'\"", ")", "if", "bits", "[", "4", "]", "!=", "'as'", ":", "raise", "TemplateSyntaxError", "(", "\"next-to-last argument to 'regroup' tag must\"", "\" be 'as'\"", ")", "var_name", "=", "bits", "[", "5", "]", "# RegroupNode will take each item in 'target', put it in the context under", "# 'var_name', evaluate 'var_name'.'expression' in the current context, and", "# group by the resulting value. After all items are processed, it will", "# save the final result in the context under 'var_name', thus clearing the", "# temporary values. This hack is necessary because the template engine", "# doesn't provide a context-aware equivalent of Python's getattr.", "expression", "=", "parser", ".", "compile_filter", "(", "var_name", "+", "VARIABLE_ATTRIBUTE_SEPARATOR", "+", "bits", "[", "3", "]", ")", "return", "RegroupNode", "(", "target", ",", "expression", ",", "var_name", ")" ]
[ 1166, 0 ]
[ 1231, 52 ]
python
en
['en', 'error', 'th']
False
resetcycle
(parser, token)
Reset a cycle tag. If an argument is given, reset the last rendered cycle tag whose name matches the argument, else reset the last rendered cycle tag (named or unnamed).
Reset a cycle tag.
def resetcycle(parser, token): """ Reset a cycle tag. If an argument is given, reset the last rendered cycle tag whose name matches the argument, else reset the last rendered cycle tag (named or unnamed). """ args = token.split_contents() if len(args) > 2: raise TemplateSyntaxError("%r tag accepts at most one argument." % args[0]) if len(args) == 2: name = args[1] try: return ResetCycleNode(parser._named_cycle_nodes[name]) except (AttributeError, KeyError): raise TemplateSyntaxError("Named cycle '%s' does not exist." % name) try: return ResetCycleNode(parser._last_cycle_node) except AttributeError: raise TemplateSyntaxError("No cycles in template.")
[ "def", "resetcycle", "(", "parser", ",", "token", ")", ":", "args", "=", "token", ".", "split_contents", "(", ")", "if", "len", "(", "args", ")", ">", "2", ":", "raise", "TemplateSyntaxError", "(", "\"%r tag accepts at most one argument.\"", "%", "args", "[", "0", "]", ")", "if", "len", "(", "args", ")", "==", "2", ":", "name", "=", "args", "[", "1", "]", "try", ":", "return", "ResetCycleNode", "(", "parser", ".", "_named_cycle_nodes", "[", "name", "]", ")", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "raise", "TemplateSyntaxError", "(", "\"Named cycle '%s' does not exist.\"", "%", "name", ")", "try", ":", "return", "ResetCycleNode", "(", "parser", ".", "_last_cycle_node", ")", "except", "AttributeError", ":", "raise", "TemplateSyntaxError", "(", "\"No cycles in template.\"", ")" ]
[ 1235, 0 ]
[ 1257, 59 ]
python
en
['en', 'error', 'th']
False
spaceless
(parser, token)
Remove whitespace between HTML tags, including tab and newline characters. Example usage:: {% spaceless %} <p> <a href="foo/">Foo</a> </p> {% endspaceless %} This example returns this HTML:: <p><a href="foo/">Foo</a></p> Only space between *tags* is normalized -- not space between tags and text. In this example, the space around ``Hello`` isn't stripped:: {% spaceless %} <strong> Hello </strong> {% endspaceless %}
Remove whitespace between HTML tags, including tab and newline characters.
def spaceless(parser, token): """ Remove whitespace between HTML tags, including tab and newline characters. Example usage:: {% spaceless %} <p> <a href="foo/">Foo</a> </p> {% endspaceless %} This example returns this HTML:: <p><a href="foo/">Foo</a></p> Only space between *tags* is normalized -- not space between tags and text. In this example, the space around ``Hello`` isn't stripped:: {% spaceless %} <strong> Hello </strong> {% endspaceless %} """ nodelist = parser.parse(('endspaceless',)) parser.delete_first_token() return SpacelessNode(nodelist)
[ "def", "spaceless", "(", "parser", ",", "token", ")", ":", "nodelist", "=", "parser", ".", "parse", "(", "(", "'endspaceless'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "return", "SpacelessNode", "(", "nodelist", ")" ]
[ 1261, 0 ]
[ 1288, 34 ]
python
en
['en', 'error', 'th']
False
templatetag
(parser, token)
Output one of the bits used to compose template tags. Since the template system has no concept of "escaping", to display one of the bits used in template tags, you must use the ``{% templatetag %}`` tag. The argument tells which template bit to output: ================== ======= Argument Outputs ================== ======= ``openblock`` ``{%`` ``closeblock`` ``%}`` ``openvariable`` ``{{`` ``closevariable`` ``}}`` ``openbrace`` ``{`` ``closebrace`` ``}`` ``opencomment`` ``{#`` ``closecomment`` ``#}`` ================== =======
Output one of the bits used to compose template tags.
def templatetag(parser, token): """ Output one of the bits used to compose template tags. Since the template system has no concept of "escaping", to display one of the bits used in template tags, you must use the ``{% templatetag %}`` tag. The argument tells which template bit to output: ================== ======= Argument Outputs ================== ======= ``openblock`` ``{%`` ``closeblock`` ``%}`` ``openvariable`` ``{{`` ``closevariable`` ``}}`` ``openbrace`` ``{`` ``closebrace`` ``}`` ``opencomment`` ``{#`` ``closecomment`` ``#}`` ================== ======= """ # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments bits = token.contents.split() if len(bits) != 2: raise TemplateSyntaxError("'templatetag' statement takes one argument") tag = bits[1] if tag not in TemplateTagNode.mapping: raise TemplateSyntaxError("Invalid templatetag argument: '%s'." " Must be one of: %s" % (tag, list(TemplateTagNode.mapping))) return TemplateTagNode(tag)
[ "def", "templatetag", "(", "parser", ",", "token", ")", ":", "# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments", "bits", "=", "token", ".", "contents", ".", "split", "(", ")", "if", "len", "(", "bits", ")", "!=", "2", ":", "raise", "TemplateSyntaxError", "(", "\"'templatetag' statement takes one argument\"", ")", "tag", "=", "bits", "[", "1", "]", "if", "tag", "not", "in", "TemplateTagNode", ".", "mapping", ":", "raise", "TemplateSyntaxError", "(", "\"Invalid templatetag argument: '%s'.\"", "\" Must be one of: %s\"", "%", "(", "tag", ",", "list", "(", "TemplateTagNode", ".", "mapping", ")", ")", ")", "return", "TemplateTagNode", "(", "tag", ")" ]
[ 1292, 0 ]
[ 1323, 31 ]
python
en
['en', 'error', 'th']
False
url
(parser, token)
r""" Return an absolute URL matching the given view with its parameters. This is a way to define links that aren't tied to a particular URL configuration:: {% url "url_name" arg1 arg2 %} or {% url "url_name" name1=value1 name2=value2 %} The first argument is a URL pattern name. Other arguments are space-separated values that will be filled in place of positional and keyword arguments in the URL. Don't mix positional and keyword arguments. All arguments for the URL must be present. For example, if you have a view ``app_name.views.client_details`` taking the client's id and the corresponding line in a URLconf looks like this:: path('client/<int:id>/', views.client_details, name='client-detail-view') and this app's URLconf is included into the project's URLconf under some path:: path('clients/', include('app_name.urls')) then in a template you can create a link for a certain client like this:: {% url "client-detail-view" client.id %} The URL will look like ``/clients/client/123/``. The first argument may also be the name of a template variable that will be evaluated to obtain the view name or the URL name, e.g.:: {% with url_name="client-detail-view" %} {% url url_name client.id %} {% endwith %}
r""" Return an absolute URL matching the given view with its parameters.
def url(parser, token): r""" Return an absolute URL matching the given view with its parameters. This is a way to define links that aren't tied to a particular URL configuration:: {% url "url_name" arg1 arg2 %} or {% url "url_name" name1=value1 name2=value2 %} The first argument is a URL pattern name. Other arguments are space-separated values that will be filled in place of positional and keyword arguments in the URL. Don't mix positional and keyword arguments. All arguments for the URL must be present. For example, if you have a view ``app_name.views.client_details`` taking the client's id and the corresponding line in a URLconf looks like this:: path('client/<int:id>/', views.client_details, name='client-detail-view') and this app's URLconf is included into the project's URLconf under some path:: path('clients/', include('app_name.urls')) then in a template you can create a link for a certain client like this:: {% url "client-detail-view" client.id %} The URL will look like ``/clients/client/123/``. The first argument may also be the name of a template variable that will be evaluated to obtain the view name or the URL name, e.g.:: {% with url_name="client-detail-view" %} {% url url_name client.id %} {% endwith %} """ bits = token.split_contents() if len(bits) < 2: raise TemplateSyntaxError("'%s' takes at least one argument, a URL pattern name." % bits[0]) viewname = parser.compile_filter(bits[1]) args = [] kwargs = {} asvar = None bits = bits[2:] if len(bits) >= 2 and bits[-2] == 'as': asvar = bits[-1] bits = bits[:-2] for bit in bits: match = kwarg_re.match(bit) if not match: raise TemplateSyntaxError("Malformed arguments to url tag") name, value = match.groups() if name: kwargs[name] = parser.compile_filter(value) else: args.append(parser.compile_filter(value)) return URLNode(viewname, args, kwargs, asvar)
[ "def", "url", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "len", "(", "bits", ")", "<", "2", ":", "raise", "TemplateSyntaxError", "(", "\"'%s' takes at least one argument, a URL pattern name.\"", "%", "bits", "[", "0", "]", ")", "viewname", "=", "parser", ".", "compile_filter", "(", "bits", "[", "1", "]", ")", "args", "=", "[", "]", "kwargs", "=", "{", "}", "asvar", "=", "None", "bits", "=", "bits", "[", "2", ":", "]", "if", "len", "(", "bits", ")", ">=", "2", "and", "bits", "[", "-", "2", "]", "==", "'as'", ":", "asvar", "=", "bits", "[", "-", "1", "]", "bits", "=", "bits", "[", ":", "-", "2", "]", "for", "bit", "in", "bits", ":", "match", "=", "kwarg_re", ".", "match", "(", "bit", ")", "if", "not", "match", ":", "raise", "TemplateSyntaxError", "(", "\"Malformed arguments to url tag\"", ")", "name", ",", "value", "=", "match", ".", "groups", "(", ")", "if", "name", ":", "kwargs", "[", "name", "]", "=", "parser", ".", "compile_filter", "(", "value", ")", "else", ":", "args", ".", "append", "(", "parser", ".", "compile_filter", "(", "value", ")", ")", "return", "URLNode", "(", "viewname", ",", "args", ",", "kwargs", ",", "asvar", ")" ]
[ 1327, 0 ]
[ 1390, 49 ]
python
cy
['en', 'cy', 'hi']
False
verbatim
(parser, token)
Stop the template engine from rendering the contents of this block tag. Usage:: {% verbatim %} {% don't process this %} {% endverbatim %} You can also designate a specific closing tag block (allowing the unrendered use of ``{% endverbatim %}``):: {% verbatim myblock %} ... {% endverbatim myblock %}
Stop the template engine from rendering the contents of this block tag.
def verbatim(parser, token): """ Stop the template engine from rendering the contents of this block tag. Usage:: {% verbatim %} {% don't process this %} {% endverbatim %} You can also designate a specific closing tag block (allowing the unrendered use of ``{% endverbatim %}``):: {% verbatim myblock %} ... {% endverbatim myblock %} """ nodelist = parser.parse(('endverbatim',)) parser.delete_first_token() return VerbatimNode(nodelist.render(Context()))
[ "def", "verbatim", "(", "parser", ",", "token", ")", ":", "nodelist", "=", "parser", ".", "parse", "(", "(", "'endverbatim'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "return", "VerbatimNode", "(", "nodelist", ".", "render", "(", "Context", "(", ")", ")", ")" ]
[ 1394, 0 ]
[ 1413, 51 ]
python
en
['en', 'error', 'th']
False
widthratio
(parser, token)
For creating bar charts and such. Calculate the ratio of a given value to a maximum value, and then apply that ratio to a constant. For example:: <img src="bar.png" alt="Bar" height="10" width="{% widthratio this_value max_value max_width %}"> If ``this_value`` is 175, ``max_value`` is 200, and ``max_width`` is 100, the image in the above example will be 88 pixels wide (because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88). In some cases you might want to capture the result of widthratio in a variable. It can be useful for instance in a blocktranslate like this:: {% widthratio this_value max_value max_width as width %} {% blocktranslate %}The width is: {{ width }}{% endblocktranslate %}
For creating bar charts and such. Calculate the ratio of a given value to a maximum value, and then apply that ratio to a constant.
def widthratio(parser, token): """ For creating bar charts and such. Calculate the ratio of a given value to a maximum value, and then apply that ratio to a constant. For example:: <img src="bar.png" alt="Bar" height="10" width="{% widthratio this_value max_value max_width %}"> If ``this_value`` is 175, ``max_value`` is 200, and ``max_width`` is 100, the image in the above example will be 88 pixels wide (because 175/200 = .875; .875 * 100 = 87.5 which is rounded up to 88). In some cases you might want to capture the result of widthratio in a variable. It can be useful for instance in a blocktranslate like this:: {% widthratio this_value max_value max_width as width %} {% blocktranslate %}The width is: {{ width }}{% endblocktranslate %} """ bits = token.split_contents() if len(bits) == 4: tag, this_value_expr, max_value_expr, max_width = bits asvar = None elif len(bits) == 6: tag, this_value_expr, max_value_expr, max_width, as_, asvar = bits if as_ != 'as': raise TemplateSyntaxError("Invalid syntax in widthratio tag. Expecting 'as' keyword") else: raise TemplateSyntaxError("widthratio takes at least three arguments") return WidthRatioNode(parser.compile_filter(this_value_expr), parser.compile_filter(max_value_expr), parser.compile_filter(max_width), asvar=asvar)
[ "def", "widthratio", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "len", "(", "bits", ")", "==", "4", ":", "tag", ",", "this_value_expr", ",", "max_value_expr", ",", "max_width", "=", "bits", "asvar", "=", "None", "elif", "len", "(", "bits", ")", "==", "6", ":", "tag", ",", "this_value_expr", ",", "max_value_expr", ",", "max_width", ",", "as_", ",", "asvar", "=", "bits", "if", "as_", "!=", "'as'", ":", "raise", "TemplateSyntaxError", "(", "\"Invalid syntax in widthratio tag. Expecting 'as' keyword\"", ")", "else", ":", "raise", "TemplateSyntaxError", "(", "\"widthratio takes at least three arguments\"", ")", "return", "WidthRatioNode", "(", "parser", ".", "compile_filter", "(", "this_value_expr", ")", ",", "parser", ".", "compile_filter", "(", "max_value_expr", ")", ",", "parser", ".", "compile_filter", "(", "max_width", ")", ",", "asvar", "=", "asvar", ")" ]
[ 1417, 0 ]
[ 1451, 38 ]
python
en
['en', 'error', 'th']
False
do_with
(parser, token)
Add one or more values to the context (inside of this block) for caching and easy access. For example:: {% with total=person.some_sql_method %} {{ total }} object{{ total|pluralize }} {% endwith %} Multiple values can be added to the context:: {% with foo=1 bar=2 %} ... {% endwith %} The legacy format of ``{% with person.some_sql_method as total %}`` is still accepted.
Add one or more values to the context (inside of this block) for caching and easy access.
def do_with(parser, token): """ Add one or more values to the context (inside of this block) for caching and easy access. For example:: {% with total=person.some_sql_method %} {{ total }} object{{ total|pluralize }} {% endwith %} Multiple values can be added to the context:: {% with foo=1 bar=2 %} ... {% endwith %} The legacy format of ``{% with person.some_sql_method as total %}`` is still accepted. """ bits = token.split_contents() remaining_bits = bits[1:] extra_context = token_kwargs(remaining_bits, parser, support_legacy=True) if not extra_context: raise TemplateSyntaxError("%r expected at least one variable " "assignment" % bits[0]) if remaining_bits: raise TemplateSyntaxError("%r received an invalid token: %r" % (bits[0], remaining_bits[0])) nodelist = parser.parse(('endwith',)) parser.delete_first_token() return WithNode(None, None, nodelist, extra_context=extra_context)
[ "def", "do_with", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "remaining_bits", "=", "bits", "[", "1", ":", "]", "extra_context", "=", "token_kwargs", "(", "remaining_bits", ",", "parser", ",", "support_legacy", "=", "True", ")", "if", "not", "extra_context", ":", "raise", "TemplateSyntaxError", "(", "\"%r expected at least one variable \"", "\"assignment\"", "%", "bits", "[", "0", "]", ")", "if", "remaining_bits", ":", "raise", "TemplateSyntaxError", "(", "\"%r received an invalid token: %r\"", "%", "(", "bits", "[", "0", "]", ",", "remaining_bits", "[", "0", "]", ")", ")", "nodelist", "=", "parser", ".", "parse", "(", "(", "'endwith'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "return", "WithNode", "(", "None", ",", "None", ",", "nodelist", ",", "extra_context", "=", "extra_context", ")" ]
[ 1455, 0 ]
[ 1486, 70 ]
python
en
['en', 'error', 'th']
False
CycleNode.reset
(self, context)
Reset the cycle iteration back to the beginning.
Reset the cycle iteration back to the beginning.
def reset(self, context): """ Reset the cycle iteration back to the beginning. """ context.render_context[self] = itertools_cycle(self.cyclevars)
[ "def", "reset", "(", "self", ",", "context", ")", ":", "context", ".", "render_context", "[", "self", "]", "=", "itertools_cycle", "(", "self", ".", "cyclevars", ")" ]
[ 89, 4 ]
[ 93, 70 ]
python
en
['en', 'error', 'th']
False
Agent.__init__
(self, actions, height=80, width=80, channels=1, discount=0.95, loss="huber", env="Breakout-v0", model_dir=None)
Initializes the parameters of the model. Args: height: Height of the image width: Width of the image channels: Number of channels, history of past frame discount: Discount_Factor for Q Learning update
Initializes the parameters of the model.
def __init__(self, actions, height=80, width=80, channels=1, discount=0.95, loss="huber", env="Breakout-v0", model_dir=None): """ Initializes the parameters of the model. Args: height: Height of the image width: Width of the image channels: Number of channels, history of past frame discount: Discount_Factor for Q Learning update """ self.height = height self.width = width self.channels = channels self.discount = discount self.actions = actions self.env = env self.loss = loss self.epoch_num = 0 self.model_dir = model_dir self.max_reward = 0 self.cur_reward = 0 self.reward_tensor = K.variable(value=0) if model_dir is not None: self.tbCallBack = TensorBoard( log_dir=model_dir, histogram_freq=0, write_graph=True, write_images=True)
[ "def", "__init__", "(", "self", ",", "actions", ",", "height", "=", "80", ",", "width", "=", "80", ",", "channels", "=", "1", ",", "discount", "=", "0.95", ",", "loss", "=", "\"huber\"", ",", "env", "=", "\"Breakout-v0\"", ",", "model_dir", "=", "None", ")", ":", "self", ".", "height", "=", "height", "self", ".", "width", "=", "width", "self", ".", "channels", "=", "channels", "self", ".", "discount", "=", "discount", "self", ".", "actions", "=", "actions", "self", ".", "env", "=", "env", "self", ".", "loss", "=", "loss", "self", ".", "epoch_num", "=", "0", "self", ".", "model_dir", "=", "model_dir", "self", ".", "max_reward", "=", "0", "self", ".", "cur_reward", "=", "0", "self", ".", "reward_tensor", "=", "K", ".", "variable", "(", "value", "=", "0", ")", "if", "model_dir", "is", "not", "None", ":", "self", ".", "tbCallBack", "=", "TensorBoard", "(", "log_dir", "=", "model_dir", ",", "histogram_freq", "=", "0", ",", "write_graph", "=", "True", ",", "write_images", "=", "True", ")" ]
[ 47, 2 ]
[ 81, 28 ]
python
en
['en', 'en', 'en']
True
Agent.create_model
( self, lr, type="vanilla", rescale_value=255.0, )
Builds the DQN Agent architecture. Source:https://cs.corp.google.com/piper///depot/google3/third_party/py/ dopamine/agents/dqn/dqn_agent.py?q=DQN&dr=CSs&l=15 This initializes the model as per the specifications mentioned in the DQN paper by Deepmind. This is a sequential model implemention of tf.keras. The compiled model is returned by the Method. Args: Returns: Model: Compiled Model
Builds the DQN Agent architecture.
def create_model( self, lr, type="vanilla", rescale_value=255.0, ): """ Builds the DQN Agent architecture. Source:https://cs.corp.google.com/piper///depot/google3/third_party/py/ dopamine/agents/dqn/dqn_agent.py?q=DQN&dr=CSs&l=15 This initializes the model as per the specifications mentioned in the DQN paper by Deepmind. This is a sequential model implemention of tf.keras. The compiled model is returned by the Method. Args: Returns: Model: Compiled Model """ #with tf.device('/gpu:0'): self.image_frames = Input(shape=(self.height, self.width, self.channels)) #self.normalize = Lambda(lambda input: input/255.0) self.conv1 = Conv2D( filters=32, kernel_size=(8, 8), strides=(4, 4), activation="relu", name="conv1")( Lambda(lambda input: input / float(rescale_value))( self.image_frames)) self.conv2 = Conv2D( filters=64, kernel_size=(4, 4), strides=(2, 2), activation="relu", name="conv2")( self.conv1) self.conv3 = Conv2D( filters=64, kernel_size=(3, 3), strides=(1, 1), activation="relu", name="conv3")( self.conv2) self.flattened = Flatten(name="flattened")(self.conv3) self.fully_connected_1 = Dense( units=512, activation="relu", name="fully_connected_1")( self.flattened) self.q_values = Dense( units=self.actions, activation="linear", name="q_values")( self.fully_connected_1) self.model = Model(inputs=[self.image_frames], outputs=[self.q_values]) self.optimizer = Adam(lr=lr) if self.loss == "huber": self.loss = huber_loss K.get_session().run(tf.global_variables_initializer()) def reward(y_true, y_pred): return self.reward_tensor self.model.compile( optimizer=self.optimizer, loss=self.loss, metrics=["mse", reward]) return self.model
[ "def", "create_model", "(", "self", ",", "lr", ",", "type", "=", "\"vanilla\"", ",", "rescale_value", "=", "255.0", ",", ")", ":", "#with tf.device('/gpu:0'):", "self", ".", "image_frames", "=", "Input", "(", "shape", "=", "(", "self", ".", "height", ",", "self", ".", "width", ",", "self", ".", "channels", ")", ")", "#self.normalize = Lambda(lambda input: input/255.0)", "self", ".", "conv1", "=", "Conv2D", "(", "filters", "=", "32", ",", "kernel_size", "=", "(", "8", ",", "8", ")", ",", "strides", "=", "(", "4", ",", "4", ")", ",", "activation", "=", "\"relu\"", ",", "name", "=", "\"conv1\"", ")", "(", "Lambda", "(", "lambda", "input", ":", "input", "/", "float", "(", "rescale_value", ")", ")", "(", "self", ".", "image_frames", ")", ")", "self", ".", "conv2", "=", "Conv2D", "(", "filters", "=", "64", ",", "kernel_size", "=", "(", "4", ",", "4", ")", ",", "strides", "=", "(", "2", ",", "2", ")", ",", "activation", "=", "\"relu\"", ",", "name", "=", "\"conv2\"", ")", "(", "self", ".", "conv1", ")", "self", ".", "conv3", "=", "Conv2D", "(", "filters", "=", "64", ",", "kernel_size", "=", "(", "3", ",", "3", ")", ",", "strides", "=", "(", "1", ",", "1", ")", ",", "activation", "=", "\"relu\"", ",", "name", "=", "\"conv3\"", ")", "(", "self", ".", "conv2", ")", "self", ".", "flattened", "=", "Flatten", "(", "name", "=", "\"flattened\"", ")", "(", "self", ".", "conv3", ")", "self", ".", "fully_connected_1", "=", "Dense", "(", "units", "=", "512", ",", "activation", "=", "\"relu\"", ",", "name", "=", "\"fully_connected_1\"", ")", "(", "self", ".", "flattened", ")", "self", ".", "q_values", "=", "Dense", "(", "units", "=", "self", ".", "actions", ",", "activation", "=", "\"linear\"", ",", "name", "=", "\"q_values\"", ")", "(", "self", ".", "fully_connected_1", ")", "self", ".", "model", "=", "Model", "(", "inputs", "=", "[", "self", ".", "image_frames", "]", ",", "outputs", "=", "[", "self", ".", "q_values", "]", ")", "self", ".", "optimizer", "=", "Adam", "(", "lr", "=", "lr", ")", "if", "self", ".", "loss", "==", "\"huber\"", ":", "self", ".", "loss", "=", "huber_loss", "K", ".", "get_session", "(", ")", ".", "run", "(", "tf", ".", "global_variables_initializer", "(", ")", ")", "def", "reward", "(", "y_true", ",", "y_pred", ")", ":", "return", "self", ".", "reward_tensor", "self", ".", "model", ".", "compile", "(", "optimizer", "=", "self", ".", "optimizer", ",", "loss", "=", "self", ".", "loss", ",", "metrics", "=", "[", "\"mse\"", ",", "reward", "]", ")", "return", "self", ".", "model" ]
[ 83, 2 ]
[ 152, 21 ]
python
en
['en', 'en', 'en']
True
Agent.batch_train
(self, curr_state, next_state, immediate_reward, action, done, target, type="Double")
Computes the TD Error for a given batch of tuples. Here, we randomly sample episodes from the Experience buffer and use this to train our model. This method computes this for a batch and trains the model. Args: curr_state(array): Numpy array representing an array of current states of game next_state(array): Numpy array for immediate next state of the game action(array): List of actions taken to go from current state to the next reward(array): List of rewards for the given transition done(bool): if this is a terminal state or not. target(keras.model object): Target network for computing TD error
Computes the TD Error for a given batch of tuples.
def batch_train(self, curr_state, next_state, immediate_reward, action, done, target, type="Double"): """ Computes the TD Error for a given batch of tuples. Here, we randomly sample episodes from the Experience buffer and use this to train our model. This method computes this for a batch and trains the model. Args: curr_state(array): Numpy array representing an array of current states of game next_state(array): Numpy array for immediate next state of the game action(array): List of actions taken to go from current state to the next reward(array): List of rewards for the given transition done(bool): if this is a terminal state or not. target(keras.model object): Target network for computing TD error """ if type == "Double": forward_action = np.argmax(self.model.predict(next_state), axis=1) predicted_qvalue = target.predict(next_state) # BxN matrix B = forward_action.size forward_qvalue = predicted_qvalue[np.arange(B), forward_action] # Bx1 vec elif type == "Vanilla": forward_qvalue = np.max(target.predict(next_state), axis=1) discounted_reward = (self.discount * forward_qvalue * (1 - done)) Q_value = immediate_reward + discounted_reward target_values = self.model.predict(curr_state) target_values[range(target_values.shape[0]), action] = Q_value """ for i, target in enumerate(target_values): target_values[i, action[i]] = Q_value[i] """ callbacks = [] # Update epoch number for TensorBoard. K.set_value(self.reward_tensor, self.cur_reward) if self.model_dir is not None and self.epoch_num % TB_LOGGING_EPOCHS == 0: callbacks.append(self.tbCallBack) self.model.fit( curr_state, target_values, verbose=0, initial_epoch=self.epoch_num, callbacks=callbacks, epochs=self.epoch_num + 1) self.epoch_num += 1
[ "def", "batch_train", "(", "self", ",", "curr_state", ",", "next_state", ",", "immediate_reward", ",", "action", ",", "done", ",", "target", ",", "type", "=", "\"Double\"", ")", ":", "if", "type", "==", "\"Double\"", ":", "forward_action", "=", "np", ".", "argmax", "(", "self", ".", "model", ".", "predict", "(", "next_state", ")", ",", "axis", "=", "1", ")", "predicted_qvalue", "=", "target", ".", "predict", "(", "next_state", ")", "# BxN matrix", "B", "=", "forward_action", ".", "size", "forward_qvalue", "=", "predicted_qvalue", "[", "np", ".", "arange", "(", "B", ")", ",", "forward_action", "]", "# Bx1 vec", "elif", "type", "==", "\"Vanilla\"", ":", "forward_qvalue", "=", "np", ".", "max", "(", "target", ".", "predict", "(", "next_state", ")", ",", "axis", "=", "1", ")", "discounted_reward", "=", "(", "self", ".", "discount", "*", "forward_qvalue", "*", "(", "1", "-", "done", ")", ")", "Q_value", "=", "immediate_reward", "+", "discounted_reward", "target_values", "=", "self", ".", "model", ".", "predict", "(", "curr_state", ")", "target_values", "[", "range", "(", "target_values", ".", "shape", "[", "0", "]", ")", ",", "action", "]", "=", "Q_value", "\"\"\"\n for i, target in enumerate(target_values):\n target_values[i, action[i]] = Q_value[i]\n \"\"\"", "callbacks", "=", "[", "]", "# Update epoch number for TensorBoard.", "K", ".", "set_value", "(", "self", ".", "reward_tensor", ",", "self", ".", "cur_reward", ")", "if", "self", ".", "model_dir", "is", "not", "None", "and", "self", ".", "epoch_num", "%", "TB_LOGGING_EPOCHS", "==", "0", ":", "callbacks", ".", "append", "(", "self", ".", "tbCallBack", ")", "self", ".", "model", ".", "fit", "(", "curr_state", ",", "target_values", ",", "verbose", "=", "0", ",", "initial_epoch", "=", "self", ".", "epoch_num", ",", "callbacks", "=", "callbacks", ",", "epochs", "=", "self", ".", "epoch_num", "+", "1", ")", "self", ".", "epoch_num", "+=", "1" ]
[ 154, 2 ]
[ 209, 23 ]
python
en
['en', 'en', 'en']
True
Agent.predict_action
(self, state)
Predict the action for a given state. Args: state(float): Numpy array Return: action(int): Discrete action to sample
Predict the action for a given state.
def predict_action(self, state): """ Predict the action for a given state. Args: state(float): Numpy array Return: action(int): Discrete action to sample """ #state = downsample_state(convert_greyscale(state)) #state = np.expand_dims(state, axis=0) if np.ndim(state) == 3: state = np.expand_dims(state, axis=0) return np.argmax(self.model.predict(state))
[ "def", "predict_action", "(", "self", ",", "state", ")", ":", "#state = downsample_state(convert_greyscale(state))", "#state = np.expand_dims(state, axis=0)", "if", "np", ".", "ndim", "(", "state", ")", "==", "3", ":", "state", "=", "np", ".", "expand_dims", "(", "state", ",", "axis", "=", "0", ")", "return", "np", ".", "argmax", "(", "self", ".", "model", ".", "predict", "(", "state", ")", ")" ]
[ 211, 2 ]
[ 224, 47 ]
python
en
['en', 'en', 'en']
True
Agent.play
(self, env, directory, mode)
Returns the total reward for an episode of the game.
Returns the total reward for an episode of the game.
def play(self, env, directory, mode): """ Returns the total reward for an episode of the game.""" steps = [] state = env.reset() done = False tot_reward = 0 actions = [0] * self.actions while not done: if mode != "Train": s = env.render("rgb_array") steps.append(s) action = self.predict_action(state) actions[action] += 1 state, reward, done, _ = env.step(action) tot_reward += reward self.cur_reward = tot_reward if mode != "Train" and tot_reward > self.max_reward: print("New high reward: ", tot_reward) clip = ImageSequenceClip(steps, fps=30) clip.write_gif("~/breakout.gif", fps=30) self.max_reward = tot_reward print("ACTIONS TAKEN", actions) return tot_reward
[ "def", "play", "(", "self", ",", "env", ",", "directory", ",", "mode", ")", ":", "steps", "=", "[", "]", "state", "=", "env", ".", "reset", "(", ")", "done", "=", "False", "tot_reward", "=", "0", "actions", "=", "[", "0", "]", "*", "self", ".", "actions", "while", "not", "done", ":", "if", "mode", "!=", "\"Train\"", ":", "s", "=", "env", ".", "render", "(", "\"rgb_array\"", ")", "steps", ".", "append", "(", "s", ")", "action", "=", "self", ".", "predict_action", "(", "state", ")", "actions", "[", "action", "]", "+=", "1", "state", ",", "reward", ",", "done", ",", "_", "=", "env", ".", "step", "(", "action", ")", "tot_reward", "+=", "reward", "self", ".", "cur_reward", "=", "tot_reward", "if", "mode", "!=", "\"Train\"", "and", "tot_reward", ">", "self", ".", "max_reward", ":", "print", "(", "\"New high reward: \"", ",", "tot_reward", ")", "clip", "=", "ImageSequenceClip", "(", "steps", ",", "fps", "=", "30", ")", "clip", ".", "write_gif", "(", "\"~/breakout.gif\"", ",", "fps", "=", "30", ")", "self", ".", "max_reward", "=", "tot_reward", "print", "(", "\"ACTIONS TAKEN\"", ",", "actions", ")", "return", "tot_reward" ]
[ 226, 2 ]
[ 250, 21 ]
python
en
['en', 'en', 'en']
True
WsgiToAsgi.__call__
(self, scope, receive, send)
ASGI application instantiation point. We return a new WsgiToAsgiInstance here with the WSGI app and the scope, ready to respond when it is __call__ed.
ASGI application instantiation point. We return a new WsgiToAsgiInstance here with the WSGI app and the scope, ready to respond when it is __call__ed.
async def __call__(self, scope, receive, send): """ ASGI application instantiation point. We return a new WsgiToAsgiInstance here with the WSGI app and the scope, ready to respond when it is __call__ed. """ await WsgiToAsgiInstance(self.wsgi_application)(scope, receive, send)
[ "async", "def", "__call__", "(", "self", ",", "scope", ",", "receive", ",", "send", ")", ":", "await", "WsgiToAsgiInstance", "(", "self", ".", "wsgi_application", ")", "(", "scope", ",", "receive", ",", "send", ")" ]
[ 14, 4 ]
[ 20, 77 ]
python
en
['en', 'error', 'th']
False
WsgiToAsgiInstance.build_environ
(self, scope, body)
Builds a scope and request body into a WSGI environ object.
Builds a scope and request body into a WSGI environ object.
def build_environ(self, scope, body): """ Builds a scope and request body into a WSGI environ object. """ environ = { "REQUEST_METHOD": scope["method"], "SCRIPT_NAME": scope.get("root_path", "").encode("utf8").decode("latin1"), "PATH_INFO": scope["path"].encode("utf8").decode("latin1"), "QUERY_STRING": scope["query_string"].decode("ascii"), "SERVER_PROTOCOL": "HTTP/%s" % scope["http_version"], "wsgi.version": (1, 0), "wsgi.url_scheme": scope.get("scheme", "http"), "wsgi.input": body, "wsgi.errors": BytesIO(), "wsgi.multithread": True, "wsgi.multiprocess": True, "wsgi.run_once": False, } # Get server name and port - required in WSGI, not in ASGI if "server" in scope: environ["SERVER_NAME"] = scope["server"][0] environ["SERVER_PORT"] = str(scope["server"][1]) else: environ["SERVER_NAME"] = "localhost" environ["SERVER_PORT"] = "80" if "client" in scope: environ["REMOTE_ADDR"] = scope["client"][0] # Go through headers and make them into environ entries for name, value in self.scope.get("headers", []): name = name.decode("latin1") if name == "content-length": corrected_name = "CONTENT_LENGTH" elif name == "content-type": corrected_name = "CONTENT_TYPE" else: corrected_name = "HTTP_%s" % name.upper().replace("-", "_") # HTTPbis say only ASCII chars are allowed in headers, but we latin1 just in case value = value.decode("latin1") if corrected_name in environ: value = environ[corrected_name] + "," + value environ[corrected_name] = value return environ
[ "def", "build_environ", "(", "self", ",", "scope", ",", "body", ")", ":", "environ", "=", "{", "\"REQUEST_METHOD\"", ":", "scope", "[", "\"method\"", "]", ",", "\"SCRIPT_NAME\"", ":", "scope", ".", "get", "(", "\"root_path\"", ",", "\"\"", ")", ".", "encode", "(", "\"utf8\"", ")", ".", "decode", "(", "\"latin1\"", ")", ",", "\"PATH_INFO\"", ":", "scope", "[", "\"path\"", "]", ".", "encode", "(", "\"utf8\"", ")", ".", "decode", "(", "\"latin1\"", ")", ",", "\"QUERY_STRING\"", ":", "scope", "[", "\"query_string\"", "]", ".", "decode", "(", "\"ascii\"", ")", ",", "\"SERVER_PROTOCOL\"", ":", "\"HTTP/%s\"", "%", "scope", "[", "\"http_version\"", "]", ",", "\"wsgi.version\"", ":", "(", "1", ",", "0", ")", ",", "\"wsgi.url_scheme\"", ":", "scope", ".", "get", "(", "\"scheme\"", ",", "\"http\"", ")", ",", "\"wsgi.input\"", ":", "body", ",", "\"wsgi.errors\"", ":", "BytesIO", "(", ")", ",", "\"wsgi.multithread\"", ":", "True", ",", "\"wsgi.multiprocess\"", ":", "True", ",", "\"wsgi.run_once\"", ":", "False", ",", "}", "# Get server name and port - required in WSGI, not in ASGI", "if", "\"server\"", "in", "scope", ":", "environ", "[", "\"SERVER_NAME\"", "]", "=", "scope", "[", "\"server\"", "]", "[", "0", "]", "environ", "[", "\"SERVER_PORT\"", "]", "=", "str", "(", "scope", "[", "\"server\"", "]", "[", "1", "]", ")", "else", ":", "environ", "[", "\"SERVER_NAME\"", "]", "=", "\"localhost\"", "environ", "[", "\"SERVER_PORT\"", "]", "=", "\"80\"", "if", "\"client\"", "in", "scope", ":", "environ", "[", "\"REMOTE_ADDR\"", "]", "=", "scope", "[", "\"client\"", "]", "[", "0", "]", "# Go through headers and make them into environ entries", "for", "name", ",", "value", "in", "self", ".", "scope", ".", "get", "(", "\"headers\"", ",", "[", "]", ")", ":", "name", "=", "name", ".", "decode", "(", "\"latin1\"", ")", "if", "name", "==", "\"content-length\"", ":", "corrected_name", "=", "\"CONTENT_LENGTH\"", "elif", "name", "==", "\"content-type\"", ":", "corrected_name", "=", "\"CONTENT_TYPE\"", "else", ":", "corrected_name", "=", "\"HTTP_%s\"", "%", "name", ".", "upper", "(", ")", ".", "replace", "(", "\"-\"", ",", "\"_\"", ")", "# HTTPbis say only ASCII chars are allowed in headers, but we latin1 just in case", "value", "=", "value", ".", "decode", "(", "\"latin1\"", ")", "if", "corrected_name", "in", "environ", ":", "value", "=", "environ", "[", "corrected_name", "]", "+", "\",\"", "+", "value", "environ", "[", "corrected_name", "]", "=", "value", "return", "environ" ]
[ 52, 4 ]
[ 95, 22 ]
python
en
['en', 'error', 'th']
False
WsgiToAsgiInstance.start_response
(self, status, response_headers, exc_info=None)
WSGI start_response callable.
WSGI start_response callable.
def start_response(self, status, response_headers, exc_info=None): """ WSGI start_response callable. """ # Don't allow re-calling once response has begun if self.response_started: raise exc_info[1].with_traceback(exc_info[2]) # Don't allow re-calling without exc_info if hasattr(self, "response_start") and exc_info is None: raise ValueError( "You cannot call start_response a second time without exc_info" ) # Extract status code status_code, _ = status.split(" ", 1) status_code = int(status_code) # Extract headers headers = [ (name.lower().encode("ascii"), value.encode("ascii")) for name, value in response_headers ] # Extract content-length self.response_content_length = None for name, value in response_headers: if name.lower() == "content-length": self.response_content_length = int(value) # Build and send response start message. self.response_start = { "type": "http.response.start", "status": status_code, "headers": headers, }
[ "def", "start_response", "(", "self", ",", "status", ",", "response_headers", ",", "exc_info", "=", "None", ")", ":", "# Don't allow re-calling once response has begun", "if", "self", ".", "response_started", ":", "raise", "exc_info", "[", "1", "]", ".", "with_traceback", "(", "exc_info", "[", "2", "]", ")", "# Don't allow re-calling without exc_info", "if", "hasattr", "(", "self", ",", "\"response_start\"", ")", "and", "exc_info", "is", "None", ":", "raise", "ValueError", "(", "\"You cannot call start_response a second time without exc_info\"", ")", "# Extract status code", "status_code", ",", "_", "=", "status", ".", "split", "(", "\" \"", ",", "1", ")", "status_code", "=", "int", "(", "status_code", ")", "# Extract headers", "headers", "=", "[", "(", "name", ".", "lower", "(", ")", ".", "encode", "(", "\"ascii\"", ")", ",", "value", ".", "encode", "(", "\"ascii\"", ")", ")", "for", "name", ",", "value", "in", "response_headers", "]", "# Extract content-length", "self", ".", "response_content_length", "=", "None", "for", "name", ",", "value", "in", "response_headers", ":", "if", "name", ".", "lower", "(", ")", "==", "\"content-length\"", ":", "self", ".", "response_content_length", "=", "int", "(", "value", ")", "# Build and send response start message.", "self", ".", "response_start", "=", "{", "\"type\"", ":", "\"http.response.start\"", ",", "\"status\"", ":", "status_code", ",", "\"headers\"", ":", "headers", ",", "}" ]
[ 97, 4 ]
[ 127, 9 ]
python
en
['en', 'error', 'th']
False
WsgiToAsgiInstance.run_wsgi_app
(self, body)
Called in a subthread to run the WSGI app. We encapsulate like this so that the start_response callable is called in the same thread.
Called in a subthread to run the WSGI app. We encapsulate like this so that the start_response callable is called in the same thread.
def run_wsgi_app(self, body): """ Called in a subthread to run the WSGI app. We encapsulate like this so that the start_response callable is called in the same thread. """ # Translate the scope and incoming request body into a WSGI environ environ = self.build_environ(self.scope, body) # Run the WSGI app bytes_sent = 0 for output in self.wsgi_application(environ, self.start_response): # If this is the first response, include the response headers if not self.response_started: self.response_started = True self.sync_send(self.response_start) # If the application supplies a Content-Length header if self.response_content_length is not None: # The server should not transmit more bytes to the client than the header allows bytes_allowed = self.response_content_length - bytes_sent if len(output) > bytes_allowed: output = output[:bytes_allowed] self.sync_send( {"type": "http.response.body", "body": output, "more_body": True} ) bytes_sent += len(output) # The server should stop iterating over the response when enough data has been sent if bytes_sent == self.response_content_length: break # Close connection if not self.response_started: self.response_started = True self.sync_send(self.response_start) self.sync_send({"type": "http.response.body"})
[ "def", "run_wsgi_app", "(", "self", ",", "body", ")", ":", "# Translate the scope and incoming request body into a WSGI environ", "environ", "=", "self", ".", "build_environ", "(", "self", ".", "scope", ",", "body", ")", "# Run the WSGI app", "bytes_sent", "=", "0", "for", "output", "in", "self", ".", "wsgi_application", "(", "environ", ",", "self", ".", "start_response", ")", ":", "# If this is the first response, include the response headers", "if", "not", "self", ".", "response_started", ":", "self", ".", "response_started", "=", "True", "self", ".", "sync_send", "(", "self", ".", "response_start", ")", "# If the application supplies a Content-Length header", "if", "self", ".", "response_content_length", "is", "not", "None", ":", "# The server should not transmit more bytes to the client than the header allows", "bytes_allowed", "=", "self", ".", "response_content_length", "-", "bytes_sent", "if", "len", "(", "output", ")", ">", "bytes_allowed", ":", "output", "=", "output", "[", ":", "bytes_allowed", "]", "self", ".", "sync_send", "(", "{", "\"type\"", ":", "\"http.response.body\"", ",", "\"body\"", ":", "output", ",", "\"more_body\"", ":", "True", "}", ")", "bytes_sent", "+=", "len", "(", "output", ")", "# The server should stop iterating over the response when enough data has been sent", "if", "bytes_sent", "==", "self", ".", "response_content_length", ":", "break", "# Close connection", "if", "not", "self", ".", "response_started", ":", "self", ".", "response_started", "=", "True", "self", ".", "sync_send", "(", "self", ".", "response_start", ")", "self", ".", "sync_send", "(", "{", "\"type\"", ":", "\"http.response.body\"", "}", ")" ]
[ 130, 4 ]
[ 161, 54 ]
python
en
['en', 'error', 'th']
False
warn_if_run_as_root
()
Output a warning for sudo users on Unix. In a virtual environment, sudo pip still writes to virtualenv. On Windows, users may run pip as Administrator without issues. This warning only applies to Unix root users outside of virtualenv.
Output a warning for sudo users on Unix.
def warn_if_run_as_root() -> None: """Output a warning for sudo users on Unix. In a virtual environment, sudo pip still writes to virtualenv. On Windows, users may run pip as Administrator without issues. This warning only applies to Unix root users outside of virtualenv. """ if running_under_virtualenv(): return if not hasattr(os, "getuid"): return # On Windows, there are no "system managed" Python packages. Installing as # Administrator via pip is the correct way of updating system environments. # # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform # checks: https://mypy.readthedocs.io/en/stable/common_issues.html if sys.platform == "win32" or sys.platform == "cygwin": return if sys.platform == "darwin" or sys.platform == "linux": if os.getuid() != 0: return logger.warning( "Running pip as the 'root' user can result in broken permissions and " "conflicting behaviour with the system package manager. " "It is recommended to use a virtual environment instead: " "https://pip.pypa.io/warnings/venv" )
[ "def", "warn_if_run_as_root", "(", ")", "->", "None", ":", "if", "running_under_virtualenv", "(", ")", ":", "return", "if", "not", "hasattr", "(", "os", ",", "\"getuid\"", ")", ":", "return", "# On Windows, there are no \"system managed\" Python packages. Installing as", "# Administrator via pip is the correct way of updating system environments.", "#", "# We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform", "# checks: https://mypy.readthedocs.io/en/stable/common_issues.html", "if", "sys", ".", "platform", "==", "\"win32\"", "or", "sys", ".", "platform", "==", "\"cygwin\"", ":", "return", "if", "sys", ".", "platform", "==", "\"darwin\"", "or", "sys", ".", "platform", "==", "\"linux\"", ":", "if", "os", ".", "getuid", "(", ")", "!=", "0", ":", "return", "logger", ".", "warning", "(", "\"Running pip as the 'root' user can result in broken permissions and \"", "\"conflicting behaviour with the system package manager. \"", "\"It is recommended to use a virtual environment instead: \"", "\"https://pip.pypa.io/warnings/venv\"", ")" ]
[ 156, 0 ]
[ 182, 5 ]
python
en
['en', 'en', 'en']
True
with_cleanup
(func: Any)
Decorator for common logic related to managing temporary directories.
Decorator for common logic related to managing temporary directories.
def with_cleanup(func: Any) -> Any: """Decorator for common logic related to managing temporary directories. """ def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None: for t in KEEPABLE_TEMPDIR_TYPES: registry.set_delete(t, False) def wrapper( self: RequirementCommand, options: Values, args: List[Any] ) -> Optional[int]: assert self.tempdir_registry is not None if options.no_clean: configure_tempdir_registry(self.tempdir_registry) try: return func(self, options, args) except PreviousBuildDirError: # This kind of conflict can occur when the user passes an explicit # build directory with a pre-existing folder. In that case we do # not want to accidentally remove it. configure_tempdir_registry(self.tempdir_registry) raise return wrapper
[ "def", "with_cleanup", "(", "func", ":", "Any", ")", "->", "Any", ":", "def", "configure_tempdir_registry", "(", "registry", ":", "TempDirectoryTypeRegistry", ")", "->", "None", ":", "for", "t", "in", "KEEPABLE_TEMPDIR_TYPES", ":", "registry", ".", "set_delete", "(", "t", ",", "False", ")", "def", "wrapper", "(", "self", ":", "RequirementCommand", ",", "options", ":", "Values", ",", "args", ":", "List", "[", "Any", "]", ")", "->", "Optional", "[", "int", "]", ":", "assert", "self", ".", "tempdir_registry", "is", "not", "None", "if", "options", ".", "no_clean", ":", "configure_tempdir_registry", "(", "self", ".", "tempdir_registry", ")", "try", ":", "return", "func", "(", "self", ",", "options", ",", "args", ")", "except", "PreviousBuildDirError", ":", "# This kind of conflict can occur when the user passes an explicit", "# build directory with a pre-existing folder. In that case we do", "# not want to accidentally remove it.", "configure_tempdir_registry", "(", "self", ".", "tempdir_registry", ")", "raise", "return", "wrapper" ]
[ 185, 0 ]
[ 210, 18 ]
python
en
['en', 'en', 'en']
True
SessionCommandMixin._get_index_urls
(cls, options: Values)
Return a list of index urls from user-provided options.
Return a list of index urls from user-provided options.
def _get_index_urls(cls, options: Values) -> Optional[List[str]]: """Return a list of index urls from user-provided options.""" index_urls = [] if not getattr(options, "no_index", False): url = getattr(options, "index_url", None) if url: index_urls.append(url) urls = getattr(options, "extra_index_urls", None) if urls: index_urls.extend(urls) # Return None rather than an empty list return index_urls or None
[ "def", "_get_index_urls", "(", "cls", ",", "options", ":", "Values", ")", "->", "Optional", "[", "List", "[", "str", "]", "]", ":", "index_urls", "=", "[", "]", "if", "not", "getattr", "(", "options", ",", "\"no_index\"", ",", "False", ")", ":", "url", "=", "getattr", "(", "options", ",", "\"index_url\"", ",", "None", ")", "if", "url", ":", "index_urls", ".", "append", "(", "url", ")", "urls", "=", "getattr", "(", "options", ",", "\"extra_index_urls\"", ",", "None", ")", "if", "urls", ":", "index_urls", ".", "extend", "(", "urls", ")", "# Return None rather than an empty list", "return", "index_urls", "or", "None" ]
[ 57, 4 ]
[ 68, 33 ]
python
en
['en', 'en', 'en']
True
SessionCommandMixin.get_default_session
(self, options: Values)
Get a default-managed session.
Get a default-managed session.
def get_default_session(self, options: Values) -> PipSession: """Get a default-managed session.""" if self._session is None: self._session = self.enter_context(self._build_session(options)) # there's no type annotation on requests.Session, so it's # automatically ContextManager[Any] and self._session becomes Any, # then https://github.com/python/mypy/issues/7696 kicks in assert self._session is not None return self._session
[ "def", "get_default_session", "(", "self", ",", "options", ":", "Values", ")", "->", "PipSession", ":", "if", "self", ".", "_session", "is", "None", ":", "self", ".", "_session", "=", "self", ".", "enter_context", "(", "self", ".", "_build_session", "(", "options", ")", ")", "# there's no type annotation on requests.Session, so it's", "# automatically ContextManager[Any] and self._session becomes Any,", "# then https://github.com/python/mypy/issues/7696 kicks in", "assert", "self", ".", "_session", "is", "not", "None", "return", "self", ".", "_session" ]
[ 70, 4 ]
[ 78, 28 ]
python
en
['en', 'da', 'en']
True
IndexGroupCommand.handle_pip_version_check
(self, options: Values)
Do the pip version check if not disabled. This overrides the default behavior of not doing the check.
Do the pip version check if not disabled.
def handle_pip_version_check(self, options: Values) -> None: """ Do the pip version check if not disabled. This overrides the default behavior of not doing the check. """ # Make sure the index_group options are present. assert hasattr(options, "no_index") if options.disable_pip_version_check or options.no_index: return # Otherwise, check if we're using the latest version of pip available. session = self._build_session( options, retries=0, timeout=min(5, options.timeout) ) with session: pip_self_version_check(session, options)
[ "def", "handle_pip_version_check", "(", "self", ",", "options", ":", "Values", ")", "->", "None", ":", "# Make sure the index_group options are present.", "assert", "hasattr", "(", "options", ",", "\"no_index\"", ")", "if", "options", ".", "disable_pip_version_check", "or", "options", ".", "no_index", ":", "return", "# Otherwise, check if we're using the latest version of pip available.", "session", "=", "self", ".", "_build_session", "(", "options", ",", "retries", "=", "0", ",", "timeout", "=", "min", "(", "5", ",", "options", ".", "timeout", ")", ")", "with", "session", ":", "pip_self_version_check", "(", "session", ",", "options", ")" ]
[ 129, 4 ]
[ 146, 52 ]
python
en
['en', 'error', 'th']
False
RequirementCommand.determine_resolver_variant
(options: Values)
Determines which resolver should be used, based on the given options.
Determines which resolver should be used, based on the given options.
def determine_resolver_variant(options: Values) -> str: """Determines which resolver should be used, based on the given options.""" if "legacy-resolver" in options.deprecated_features_enabled: return "legacy" return "2020-resolver"
[ "def", "determine_resolver_variant", "(", "options", ":", "Values", ")", "->", "str", ":", "if", "\"legacy-resolver\"", "in", "options", ".", "deprecated_features_enabled", ":", "return", "\"legacy\"", "return", "\"2020-resolver\"" ]
[ 220, 4 ]
[ 225, 30 ]
python
en
['en', 'en', 'en']
True
RequirementCommand.make_requirement_preparer
( cls, temp_build_dir: TempDirectory, options: Values, req_tracker: RequirementTracker, session: PipSession, finder: PackageFinder, use_user_site: bool, download_dir: Optional[str] = None, )
Create a RequirementPreparer instance for the given parameters.
Create a RequirementPreparer instance for the given parameters.
def make_requirement_preparer( cls, temp_build_dir: TempDirectory, options: Values, req_tracker: RequirementTracker, session: PipSession, finder: PackageFinder, use_user_site: bool, download_dir: Optional[str] = None, ) -> RequirementPreparer: """ Create a RequirementPreparer instance for the given parameters. """ temp_build_dir_path = temp_build_dir.path assert temp_build_dir_path is not None resolver_variant = cls.determine_resolver_variant(options) if resolver_variant == "2020-resolver": lazy_wheel = "fast-deps" in options.features_enabled if lazy_wheel: logger.warning( "pip is using lazily downloaded wheels using HTTP " "range requests to obtain dependency information. " "This experimental feature is enabled through " "--use-feature=fast-deps and it is not ready for " "production." ) else: lazy_wheel = False if "fast-deps" in options.features_enabled: logger.warning( "fast-deps has no effect when used with the legacy resolver." ) return RequirementPreparer( build_dir=temp_build_dir_path, src_dir=options.src_dir, download_dir=download_dir, build_isolation=options.build_isolation, req_tracker=req_tracker, session=session, progress_bar=options.progress_bar, finder=finder, require_hashes=options.require_hashes, use_user_site=use_user_site, lazy_wheel=lazy_wheel, in_tree_build="in-tree-build" in options.features_enabled, )
[ "def", "make_requirement_preparer", "(", "cls", ",", "temp_build_dir", ":", "TempDirectory", ",", "options", ":", "Values", ",", "req_tracker", ":", "RequirementTracker", ",", "session", ":", "PipSession", ",", "finder", ":", "PackageFinder", ",", "use_user_site", ":", "bool", ",", "download_dir", ":", "Optional", "[", "str", "]", "=", "None", ",", ")", "->", "RequirementPreparer", ":", "temp_build_dir_path", "=", "temp_build_dir", ".", "path", "assert", "temp_build_dir_path", "is", "not", "None", "resolver_variant", "=", "cls", ".", "determine_resolver_variant", "(", "options", ")", "if", "resolver_variant", "==", "\"2020-resolver\"", ":", "lazy_wheel", "=", "\"fast-deps\"", "in", "options", ".", "features_enabled", "if", "lazy_wheel", ":", "logger", ".", "warning", "(", "\"pip is using lazily downloaded wheels using HTTP \"", "\"range requests to obtain dependency information. \"", "\"This experimental feature is enabled through \"", "\"--use-feature=fast-deps and it is not ready for \"", "\"production.\"", ")", "else", ":", "lazy_wheel", "=", "False", "if", "\"fast-deps\"", "in", "options", ".", "features_enabled", ":", "logger", ".", "warning", "(", "\"fast-deps has no effect when used with the legacy resolver.\"", ")", "return", "RequirementPreparer", "(", "build_dir", "=", "temp_build_dir_path", ",", "src_dir", "=", "options", ".", "src_dir", ",", "download_dir", "=", "download_dir", ",", "build_isolation", "=", "options", ".", "build_isolation", ",", "req_tracker", "=", "req_tracker", ",", "session", "=", "session", ",", "progress_bar", "=", "options", ".", "progress_bar", ",", "finder", "=", "finder", ",", "require_hashes", "=", "options", ".", "require_hashes", ",", "use_user_site", "=", "use_user_site", ",", "lazy_wheel", "=", "lazy_wheel", ",", "in_tree_build", "=", "\"in-tree-build\"", "in", "options", ".", "features_enabled", ",", ")" ]
[ 228, 4 ]
[ 275, 9 ]
python
en
['en', 'error', 'th']
False
RequirementCommand.make_resolver
( cls, preparer: RequirementPreparer, finder: PackageFinder, options: Values, wheel_cache: Optional[WheelCache] = None, use_user_site: bool = False, ignore_installed: bool = True, ignore_requires_python: bool = False, force_reinstall: bool = False, upgrade_strategy: str = "to-satisfy-only", use_pep517: Optional[bool] = None, py_version_info: Optional[Tuple[int, ...]] = None, )
Create a Resolver instance for the given parameters.
Create a Resolver instance for the given parameters.
def make_resolver( cls, preparer: RequirementPreparer, finder: PackageFinder, options: Values, wheel_cache: Optional[WheelCache] = None, use_user_site: bool = False, ignore_installed: bool = True, ignore_requires_python: bool = False, force_reinstall: bool = False, upgrade_strategy: str = "to-satisfy-only", use_pep517: Optional[bool] = None, py_version_info: Optional[Tuple[int, ...]] = None, ) -> BaseResolver: """ Create a Resolver instance for the given parameters. """ make_install_req = partial( install_req_from_req_string, isolated=options.isolated_mode, use_pep517=use_pep517, ) resolver_variant = cls.determine_resolver_variant(options) # The long import name and duplicated invocation is needed to convince # Mypy into correctly typechecking. Otherwise it would complain the # "Resolver" class being redefined. if resolver_variant == "2020-resolver": import pip._internal.resolution.resolvelib.resolver return pip._internal.resolution.resolvelib.resolver.Resolver( preparer=preparer, finder=finder, wheel_cache=wheel_cache, make_install_req=make_install_req, use_user_site=use_user_site, ignore_dependencies=options.ignore_dependencies, ignore_installed=ignore_installed, ignore_requires_python=ignore_requires_python, force_reinstall=force_reinstall, upgrade_strategy=upgrade_strategy, py_version_info=py_version_info, ) import pip._internal.resolution.legacy.resolver return pip._internal.resolution.legacy.resolver.Resolver( preparer=preparer, finder=finder, wheel_cache=wheel_cache, make_install_req=make_install_req, use_user_site=use_user_site, ignore_dependencies=options.ignore_dependencies, ignore_installed=ignore_installed, ignore_requires_python=ignore_requires_python, force_reinstall=force_reinstall, upgrade_strategy=upgrade_strategy, py_version_info=py_version_info, )
[ "def", "make_resolver", "(", "cls", ",", "preparer", ":", "RequirementPreparer", ",", "finder", ":", "PackageFinder", ",", "options", ":", "Values", ",", "wheel_cache", ":", "Optional", "[", "WheelCache", "]", "=", "None", ",", "use_user_site", ":", "bool", "=", "False", ",", "ignore_installed", ":", "bool", "=", "True", ",", "ignore_requires_python", ":", "bool", "=", "False", ",", "force_reinstall", ":", "bool", "=", "False", ",", "upgrade_strategy", ":", "str", "=", "\"to-satisfy-only\"", ",", "use_pep517", ":", "Optional", "[", "bool", "]", "=", "None", ",", "py_version_info", ":", "Optional", "[", "Tuple", "[", "int", ",", "...", "]", "]", "=", "None", ",", ")", "->", "BaseResolver", ":", "make_install_req", "=", "partial", "(", "install_req_from_req_string", ",", "isolated", "=", "options", ".", "isolated_mode", ",", "use_pep517", "=", "use_pep517", ",", ")", "resolver_variant", "=", "cls", ".", "determine_resolver_variant", "(", "options", ")", "# The long import name and duplicated invocation is needed to convince", "# Mypy into correctly typechecking. Otherwise it would complain the", "# \"Resolver\" class being redefined.", "if", "resolver_variant", "==", "\"2020-resolver\"", ":", "import", "pip", ".", "_internal", ".", "resolution", ".", "resolvelib", ".", "resolver", "return", "pip", ".", "_internal", ".", "resolution", ".", "resolvelib", ".", "resolver", ".", "Resolver", "(", "preparer", "=", "preparer", ",", "finder", "=", "finder", ",", "wheel_cache", "=", "wheel_cache", ",", "make_install_req", "=", "make_install_req", ",", "use_user_site", "=", "use_user_site", ",", "ignore_dependencies", "=", "options", ".", "ignore_dependencies", ",", "ignore_installed", "=", "ignore_installed", ",", "ignore_requires_python", "=", "ignore_requires_python", ",", "force_reinstall", "=", "force_reinstall", ",", "upgrade_strategy", "=", "upgrade_strategy", ",", "py_version_info", "=", "py_version_info", ",", ")", "import", "pip", ".", "_internal", ".", "resolution", ".", "legacy", ".", "resolver", "return", "pip", ".", "_internal", ".", "resolution", ".", "legacy", ".", "resolver", ".", "Resolver", "(", "preparer", "=", "preparer", ",", "finder", "=", "finder", ",", "wheel_cache", "=", "wheel_cache", ",", "make_install_req", "=", "make_install_req", ",", "use_user_site", "=", "use_user_site", ",", "ignore_dependencies", "=", "options", ".", "ignore_dependencies", ",", "ignore_installed", "=", "ignore_installed", ",", "ignore_requires_python", "=", "ignore_requires_python", ",", "force_reinstall", "=", "force_reinstall", ",", "upgrade_strategy", "=", "upgrade_strategy", ",", "py_version_info", "=", "py_version_info", ",", ")" ]
[ 278, 4 ]
[ 334, 9 ]
python
en
['en', 'error', 'th']
False
RequirementCommand.get_requirements
( self, args: List[str], options: Values, finder: PackageFinder, session: PipSession, )
Parse command-line arguments into the corresponding requirements.
Parse command-line arguments into the corresponding requirements.
def get_requirements( self, args: List[str], options: Values, finder: PackageFinder, session: PipSession, ) -> List[InstallRequirement]: """ Parse command-line arguments into the corresponding requirements. """ requirements: List[InstallRequirement] = [] for filename in options.constraints: for parsed_req in parse_requirements( filename, constraint=True, finder=finder, options=options, session=session, ): req_to_add = install_req_from_parsed_requirement( parsed_req, isolated=options.isolated_mode, user_supplied=False, ) requirements.append(req_to_add) for req in args: req_to_add = install_req_from_line( req, None, isolated=options.isolated_mode, use_pep517=options.use_pep517, user_supplied=True, ) requirements.append(req_to_add) for req in options.editables: req_to_add = install_req_from_editable( req, user_supplied=True, isolated=options.isolated_mode, use_pep517=options.use_pep517, ) requirements.append(req_to_add) # NOTE: options.require_hashes may be set if --require-hashes is True for filename in options.requirements: for parsed_req in parse_requirements( filename, finder=finder, options=options, session=session ): req_to_add = install_req_from_parsed_requirement( parsed_req, isolated=options.isolated_mode, use_pep517=options.use_pep517, user_supplied=True, ) requirements.append(req_to_add) # If any requirement has hash options, enable hash checking. if any(req.has_hash_options for req in requirements): options.require_hashes = True if not (args or options.editables or options.requirements): opts = {"name": self.name} if options.find_links: raise CommandError( "You must give at least one requirement to {name} " '(maybe you meant "pip {name} {links}"?)'.format( **dict(opts, links=" ".join(options.find_links)) ) ) else: raise CommandError( "You must give at least one requirement to {name} " '(see "pip help {name}")'.format(**opts) ) return requirements
[ "def", "get_requirements", "(", "self", ",", "args", ":", "List", "[", "str", "]", ",", "options", ":", "Values", ",", "finder", ":", "PackageFinder", ",", "session", ":", "PipSession", ",", ")", "->", "List", "[", "InstallRequirement", "]", ":", "requirements", ":", "List", "[", "InstallRequirement", "]", "=", "[", "]", "for", "filename", "in", "options", ".", "constraints", ":", "for", "parsed_req", "in", "parse_requirements", "(", "filename", ",", "constraint", "=", "True", ",", "finder", "=", "finder", ",", "options", "=", "options", ",", "session", "=", "session", ",", ")", ":", "req_to_add", "=", "install_req_from_parsed_requirement", "(", "parsed_req", ",", "isolated", "=", "options", ".", "isolated_mode", ",", "user_supplied", "=", "False", ",", ")", "requirements", ".", "append", "(", "req_to_add", ")", "for", "req", "in", "args", ":", "req_to_add", "=", "install_req_from_line", "(", "req", ",", "None", ",", "isolated", "=", "options", ".", "isolated_mode", ",", "use_pep517", "=", "options", ".", "use_pep517", ",", "user_supplied", "=", "True", ",", ")", "requirements", ".", "append", "(", "req_to_add", ")", "for", "req", "in", "options", ".", "editables", ":", "req_to_add", "=", "install_req_from_editable", "(", "req", ",", "user_supplied", "=", "True", ",", "isolated", "=", "options", ".", "isolated_mode", ",", "use_pep517", "=", "options", ".", "use_pep517", ",", ")", "requirements", ".", "append", "(", "req_to_add", ")", "# NOTE: options.require_hashes may be set if --require-hashes is True", "for", "filename", "in", "options", ".", "requirements", ":", "for", "parsed_req", "in", "parse_requirements", "(", "filename", ",", "finder", "=", "finder", ",", "options", "=", "options", ",", "session", "=", "session", ")", ":", "req_to_add", "=", "install_req_from_parsed_requirement", "(", "parsed_req", ",", "isolated", "=", "options", ".", "isolated_mode", ",", "use_pep517", "=", "options", ".", "use_pep517", ",", "user_supplied", "=", "True", ",", ")", "requirements", ".", "append", "(", "req_to_add", ")", "# If any requirement has hash options, enable hash checking.", "if", "any", "(", "req", ".", "has_hash_options", "for", "req", "in", "requirements", ")", ":", "options", ".", "require_hashes", "=", "True", "if", "not", "(", "args", "or", "options", ".", "editables", "or", "options", ".", "requirements", ")", ":", "opts", "=", "{", "\"name\"", ":", "self", ".", "name", "}", "if", "options", ".", "find_links", ":", "raise", "CommandError", "(", "\"You must give at least one requirement to {name} \"", "'(maybe you meant \"pip {name} {links}\"?)'", ".", "format", "(", "*", "*", "dict", "(", "opts", ",", "links", "=", "\" \"", ".", "join", "(", "options", ".", "find_links", ")", ")", ")", ")", "else", ":", "raise", "CommandError", "(", "\"You must give at least one requirement to {name} \"", "'(see \"pip help {name}\")'", ".", "format", "(", "*", "*", "opts", ")", ")", "return", "requirements" ]
[ 336, 4 ]
[ 413, 27 ]
python
en
['en', 'error', 'th']
False
RequirementCommand.trace_basic_info
(finder: PackageFinder)
Trace basic information about the provided objects.
Trace basic information about the provided objects.
def trace_basic_info(finder: PackageFinder) -> None: """ Trace basic information about the provided objects. """ # Display where finder is looking for packages search_scope = finder.search_scope locations = search_scope.get_formatted_locations() if locations: logger.info(locations)
[ "def", "trace_basic_info", "(", "finder", ":", "PackageFinder", ")", "->", "None", ":", "# Display where finder is looking for packages", "search_scope", "=", "finder", ".", "search_scope", "locations", "=", "search_scope", ".", "get_formatted_locations", "(", ")", "if", "locations", ":", "logger", ".", "info", "(", "locations", ")" ]
[ 416, 4 ]
[ 424, 34 ]
python
en
['en', 'error', 'th']
False
RequirementCommand._build_package_finder
( self, options: Values, session: PipSession, target_python: Optional[TargetPython] = None, ignore_requires_python: Optional[bool] = None, )
Create a package finder appropriate to this requirement command. :param ignore_requires_python: Whether to ignore incompatible "Requires-Python" values in links. Defaults to False.
Create a package finder appropriate to this requirement command.
def _build_package_finder( self, options: Values, session: PipSession, target_python: Optional[TargetPython] = None, ignore_requires_python: Optional[bool] = None, ) -> PackageFinder: """ Create a package finder appropriate to this requirement command. :param ignore_requires_python: Whether to ignore incompatible "Requires-Python" values in links. Defaults to False. """ link_collector = LinkCollector.create(session, options=options) selection_prefs = SelectionPreferences( allow_yanked=True, format_control=options.format_control, allow_all_prereleases=options.pre, prefer_binary=options.prefer_binary, ignore_requires_python=ignore_requires_python, ) return PackageFinder.create( link_collector=link_collector, selection_prefs=selection_prefs, target_python=target_python, )
[ "def", "_build_package_finder", "(", "self", ",", "options", ":", "Values", ",", "session", ":", "PipSession", ",", "target_python", ":", "Optional", "[", "TargetPython", "]", "=", "None", ",", "ignore_requires_python", ":", "Optional", "[", "bool", "]", "=", "None", ",", ")", "->", "PackageFinder", ":", "link_collector", "=", "LinkCollector", ".", "create", "(", "session", ",", "options", "=", "options", ")", "selection_prefs", "=", "SelectionPreferences", "(", "allow_yanked", "=", "True", ",", "format_control", "=", "options", ".", "format_control", ",", "allow_all_prereleases", "=", "options", ".", "pre", ",", "prefer_binary", "=", "options", ".", "prefer_binary", ",", "ignore_requires_python", "=", "ignore_requires_python", ",", ")", "return", "PackageFinder", ".", "create", "(", "link_collector", "=", "link_collector", ",", "selection_prefs", "=", "selection_prefs", ",", "target_python", "=", "target_python", ",", ")" ]
[ 426, 4 ]
[ 452, 9 ]
python
en
['en', 'error', 'th']
False
_ConfigName
(context)
Return the short config name.
Return the short config name.
def _ConfigName(context): """Return the short config name.""" return '{}-config'.format(context.env['deployment'])
[ "def", "_ConfigName", "(", "context", ")", ":", "return", "'{}-config'", ".", "format", "(", "context", ".", "env", "[", "'deployment'", "]", ")" ]
[ 63, 0 ]
[ 65, 54 ]
python
en
['en', 'en', 'en']
True
_ConfigUrl
(context)
Returns the full URL to the config, including hostname.
Returns the full URL to the config, including hostname.
def _ConfigUrl(context): """Returns the full URL to the config, including hostname.""" return '{endpoint}/projects/{project}/configs/{config}'.format( endpoint=RTC_ENDPOINT, project=context.env['project'], config=_ConfigName(context))
[ "def", "_ConfigUrl", "(", "context", ")", ":", "return", "'{endpoint}/projects/{project}/configs/{config}'", ".", "format", "(", "endpoint", "=", "RTC_ENDPOINT", ",", "project", "=", "context", ".", "env", "[", "'project'", "]", ",", "config", "=", "_ConfigName", "(", "context", ")", ")" ]
[ 68, 0 ]
[ 73, 34 ]
python
en
['en', 'en', 'en']
True
_WaiterName
(context)
Returns the short waiter name.
Returns the short waiter name.
def _WaiterName(context): """Returns the short waiter name.""" # This name is only used for the DM manifest entry. The actual waiter name # within RuntimeConfig is static, as it is scoped to the config resource. return '{}-software'.format(context.env['deployment'])
[ "def", "_WaiterName", "(", "context", ")", ":", "# This name is only used for the DM manifest entry. The actual waiter name", "# within RuntimeConfig is static, as it is scoped to the config resource.", "return", "'{}-software'", ".", "format", "(", "context", ".", "env", "[", "'deployment'", "]", ")" ]
[ 76, 0 ]
[ 80, 56 ]
python
en
['en', 'no', 'en']
True
_Timeout
(context)
Returns the timeout property or a default value if unspecified.
Returns the timeout property or a default value if unspecified.
def _Timeout(context): """Returns the timeout property or a default value if unspecified.""" timeout = context.properties.get('timeout', DEFAULT_TIMEOUT) try: return str(int(timeout)) except ValueError: raise PropertyError('Invalid timeout value: {}'.format(timeout))
[ "def", "_Timeout", "(", "context", ")", ":", "timeout", "=", "context", ".", "properties", ".", "get", "(", "'timeout'", ",", "DEFAULT_TIMEOUT", ")", "try", ":", "return", "str", "(", "int", "(", "timeout", ")", ")", "except", "ValueError", ":", "raise", "PropertyError", "(", "'Invalid timeout value: {}'", ".", "format", "(", "timeout", ")", ")" ]
[ 83, 0 ]
[ 89, 68 ]
python
en
['en', 'en', 'en']
True
_SuccessNumber
(context)
Returns the successNumber property or a default value if unspecified.
Returns the successNumber property or a default value if unspecified.
def _SuccessNumber(context): """Returns the successNumber property or a default value if unspecified.""" number = context.properties.get('successNumber', DEFAULT_SUCCESS_NUMBER) try: number = int(number) if number < 1: raise PropertyError('successNumber value must be greater than 0.') return number except ValueError: raise PropertyError('Invalid successNumber value: {}'.format(number))
[ "def", "_SuccessNumber", "(", "context", ")", ":", "number", "=", "context", ".", "properties", ".", "get", "(", "'successNumber'", ",", "DEFAULT_SUCCESS_NUMBER", ")", "try", ":", "number", "=", "int", "(", "number", ")", "if", "number", "<", "1", ":", "raise", "PropertyError", "(", "'successNumber value must be greater than 0.'", ")", "return", "number", "except", "ValueError", ":", "raise", "PropertyError", "(", "'Invalid successNumber value: {}'", ".", "format", "(", "number", ")", ")" ]
[ 92, 0 ]
[ 101, 73 ]
python
en
['en', 'en', 'en']
True
_FailureNumber
(context)
Returns the failureNumber property or a default value if unspecified.
Returns the failureNumber property or a default value if unspecified.
def _FailureNumber(context): """Returns the failureNumber property or a default value if unspecified.""" number = context.properties.get('failureNumber', DEFAULT_FAILURE_NUMBER) try: number = int(number) if number < 1: raise PropertyError('failureNumber value must be greater than 0.') return number except ValueError: raise PropertyError('Invalid failureNumber value: {}'.format(number))
[ "def", "_FailureNumber", "(", "context", ")", ":", "number", "=", "context", ".", "properties", ".", "get", "(", "'failureNumber'", ",", "DEFAULT_FAILURE_NUMBER", ")", "try", ":", "number", "=", "int", "(", "number", ")", "if", "number", "<", "1", ":", "raise", "PropertyError", "(", "'failureNumber value must be greater than 0.'", ")", "return", "number", "except", "ValueError", ":", "raise", "PropertyError", "(", "'Invalid failureNumber value: {}'", ".", "format", "(", "number", ")", ")" ]
[ 104, 0 ]
[ 113, 73 ]
python
en
['en', 'en', 'en']
True
_WaiterDependsOn
(context)
Returns the waiterDependsOn property or an empty list if unspecified.
Returns the waiterDependsOn property or an empty list if unspecified.
def _WaiterDependsOn(context): """Returns the waiterDependsOn property or an empty list if unspecified.""" depends_on = context.properties.get('waiterDependsOn', []) if not isinstance(depends_on, list): raise PropertyError('waiterDependsOn must be a list: {}'.format(depends_on)) for item in depends_on: if not isinstance(item, (str,)): raise PropertyError( 'waiterDependsOn must be a list of strings: {}'.format(depends_on)) return depends_on
[ "def", "_WaiterDependsOn", "(", "context", ")", ":", "depends_on", "=", "context", ".", "properties", ".", "get", "(", "'waiterDependsOn'", ",", "[", "]", ")", "if", "not", "isinstance", "(", "depends_on", ",", "list", ")", ":", "raise", "PropertyError", "(", "'waiterDependsOn must be a list: {}'", ".", "format", "(", "depends_on", ")", ")", "for", "item", "in", "depends_on", ":", "if", "not", "isinstance", "(", "item", ",", "(", "str", ",", ")", ")", ":", "raise", "PropertyError", "(", "'waiterDependsOn must be a list of strings: {}'", ".", "format", "(", "depends_on", ")", ")", "return", "depends_on" ]
[ 116, 0 ]
[ 127, 19 ]
python
en
['en', 'en', 'en']
True
_RuntimeConfig
(context)
Constructs a RuntimeConfig resource.
Constructs a RuntimeConfig resource.
def _RuntimeConfig(context): """Constructs a RuntimeConfig resource.""" deployment_name = context.env['deployment'] return { 'name': _ConfigName(context), 'type': 'runtimeconfig.v1beta1.config', 'properties': { 'config': _ConfigName(context), 'description': ('Holds software readiness status ' 'for deployment {}').format(deployment_name), }, }
[ "def", "_RuntimeConfig", "(", "context", ")", ":", "deployment_name", "=", "context", ".", "env", "[", "'deployment'", "]", "return", "{", "'name'", ":", "_ConfigName", "(", "context", ")", ",", "'type'", ":", "'runtimeconfig.v1beta1.config'", ",", "'properties'", ":", "{", "'config'", ":", "_ConfigName", "(", "context", ")", ",", "'description'", ":", "(", "'Holds software readiness status '", "'for deployment {}'", ")", ".", "format", "(", "deployment_name", ")", ",", "}", ",", "}" ]
[ 130, 0 ]
[ 142, 3 ]
python
en
['en', 'en', 'en']
True
_Waiter
(context)
Constructs a waiter resource.
Constructs a waiter resource.
def _Waiter(context): """Constructs a waiter resource.""" waiter_timeout = _Timeout(context) return { 'name': _WaiterName(context), 'type': 'runtimeconfig.v1beta1.waiter', 'metadata': { 'dependsOn': _WaiterDependsOn(context), }, 'properties': { 'parent': '$(ref.{}.name)'.format(_ConfigName(context)), 'waiter': 'software', 'timeout': '{}s'.format(waiter_timeout), 'success': { 'cardinality': { 'number': _SuccessNumber(context), 'path': '{}/success'.format(STATUS_PATH), }, }, 'failure': { 'cardinality': { 'number': _FailureNumber(context), 'path': '{}/failure'.format(STATUS_PATH), }, }, }, }
[ "def", "_Waiter", "(", "context", ")", ":", "waiter_timeout", "=", "_Timeout", "(", "context", ")", "return", "{", "'name'", ":", "_WaiterName", "(", "context", ")", ",", "'type'", ":", "'runtimeconfig.v1beta1.waiter'", ",", "'metadata'", ":", "{", "'dependsOn'", ":", "_WaiterDependsOn", "(", "context", ")", ",", "}", ",", "'properties'", ":", "{", "'parent'", ":", "'$(ref.{}.name)'", ".", "format", "(", "_ConfigName", "(", "context", ")", ")", ",", "'waiter'", ":", "'software'", ",", "'timeout'", ":", "'{}s'", ".", "format", "(", "waiter_timeout", ")", ",", "'success'", ":", "{", "'cardinality'", ":", "{", "'number'", ":", "_SuccessNumber", "(", "context", ")", ",", "'path'", ":", "'{}/success'", ".", "format", "(", "STATUS_PATH", ")", ",", "}", ",", "}", ",", "'failure'", ":", "{", "'cardinality'", ":", "{", "'number'", ":", "_FailureNumber", "(", "context", ")", ",", "'path'", ":", "'{}/failure'", ".", "format", "(", "STATUS_PATH", ")", ",", "}", ",", "}", ",", "}", ",", "}" ]
[ 145, 0 ]
[ 172, 3 ]
python
en
['en', 'en', 'en']
True
GenerateConfig
(context)
Entry function to generate the DM config.
Entry function to generate the DM config.
def GenerateConfig(context): """Entry function to generate the DM config.""" content = { 'resources': [ _RuntimeConfig(context), _Waiter(context), ], 'outputs': [ { 'name': 'config-url', 'value': _ConfigUrl(context) }, { 'name': 'variable-path', 'value': STATUS_PATH }, ] } return yaml.safe_dump(content)
[ "def", "GenerateConfig", "(", "context", ")", ":", "content", "=", "{", "'resources'", ":", "[", "_RuntimeConfig", "(", "context", ")", ",", "_Waiter", "(", "context", ")", ",", "]", ",", "'outputs'", ":", "[", "{", "'name'", ":", "'config-url'", ",", "'value'", ":", "_ConfigUrl", "(", "context", ")", "}", ",", "{", "'name'", ":", "'variable-path'", ",", "'value'", ":", "STATUS_PATH", "}", ",", "]", "}", "return", "yaml", ".", "safe_dump", "(", "content", ")" ]
[ 175, 0 ]
[ 193, 32 ]
python
en
['en', 'en', 'en']
True
save_isolate_table
(tables: Dict[str, pandas.DataFrame], filename: Path)
Saves the parsed table as an Excel spreadsheet. Parameters ---------- tables: Dict[str,pandas.DataFrame] A mapping of sheet names to dataframes. filename: str, pathlib.Path The output file. Returns -------
Saves the parsed table as an Excel spreadsheet. Parameters ---------- tables: Dict[str,pandas.DataFrame] A mapping of sheet names to dataframes. filename: str, pathlib.Path The output file.
def save_isolate_table(tables: Dict[str, pandas.DataFrame], filename: Path) -> Path: """ Saves the parsed table as an Excel spreadsheet. Parameters ---------- tables: Dict[str,pandas.DataFrame] A mapping of sheet names to dataframes. filename: str, pathlib.Path The output file. Returns ------- """ writer = pandas.ExcelWriter(str(filename)) include_index = False # python 3.5 or 3.6 made all dicts ordered by default, so the sheets will be ordered in the same order they were defined in `tables` for sheet_label, df in tables.items(): if df is None: continue df.to_excel(writer, sheet_label, index = include_index) writer.save() # otherwise color_table_cells will not be able to load the file # Color in the spreadsheet cells based on whether the sequence differs from the reference. if openpyxl is not None: try: color_table_cells(filename) except: pass return filename
[ "def", "save_isolate_table", "(", "tables", ":", "Dict", "[", "str", ",", "pandas", ".", "DataFrame", "]", ",", "filename", ":", "Path", ")", "->", "Path", ":", "writer", "=", "pandas", ".", "ExcelWriter", "(", "str", "(", "filename", ")", ")", "include_index", "=", "False", "# python 3.5 or 3.6 made all dicts ordered by default, so the sheets will be ordered in the same order they were defined in `tables`", "for", "sheet_label", ",", "df", "in", "tables", ".", "items", "(", ")", ":", "if", "df", "is", "None", ":", "continue", "df", ".", "to_excel", "(", "writer", ",", "sheet_label", ",", "index", "=", "include_index", ")", "writer", ".", "save", "(", ")", "# otherwise color_table_cells will not be able to load the file", "# Color in the spreadsheet cells based on whether the sequence differs from the reference.", "if", "openpyxl", "is", "not", "None", ":", "try", ":", "color_table_cells", "(", "filename", ")", "except", ":", "pass", "return", "filename" ]
[ 13, 0 ]
[ 40, 16 ]
python
en
['en', 'error', 'th']
False
color_table_cells
(filename: Path)
Colors in the cells of the comparison table to highlight differences between samples. Parameters ---------- filename: Path Path to the excel file. The sheet containing the comparison table should be named 'variant comparison'.
Colors in the cells of the comparison table to highlight differences between samples. Parameters ---------- filename: Path Path to the excel file. The sheet containing the comparison table should be named 'variant comparison'.
def color_table_cells(filename: Path): """ Colors in the cells of the comparison table to highlight differences between samples. Parameters ---------- filename: Path Path to the excel file. The sheet containing the comparison table should be named 'variant comparison'. """ workbook = openpyxl.load_workbook(filename = str(filename)) worksheet = workbook['variant comparison'] # There is an issue with libreoffice when 'bgColor' is used instead of 'fgColor' where cells are rendered with a black background. variant_pattern = openpyxl.styles.PatternFill(fgColor = "FC8D62", fill_type = "solid") reference_column_label, sample_column_labels = _get_relevant_columns(worksheet) for sample_column_label in sample_column_labels: sample_column = worksheet[sample_column_label] for cell in sample_column: cell_row = cell.row if cell_row == 1: continue # Is the header column reference_cell = worksheet[f"{reference_column_label}{cell_row}"] is_variant = cell.value != reference_cell.value if is_variant: cell.fill = variant_pattern workbook.save(str(filename.with_suffix('.edited.xlsx')))
[ "def", "color_table_cells", "(", "filename", ":", "Path", ")", ":", "workbook", "=", "openpyxl", ".", "load_workbook", "(", "filename", "=", "str", "(", "filename", ")", ")", "worksheet", "=", "workbook", "[", "'variant comparison'", "]", "# There is an issue with libreoffice when 'bgColor' is used instead of 'fgColor' where cells are rendered with a black background.", "variant_pattern", "=", "openpyxl", ".", "styles", ".", "PatternFill", "(", "fgColor", "=", "\"FC8D62\"", ",", "fill_type", "=", "\"solid\"", ")", "reference_column_label", ",", "sample_column_labels", "=", "_get_relevant_columns", "(", "worksheet", ")", "for", "sample_column_label", "in", "sample_column_labels", ":", "sample_column", "=", "worksheet", "[", "sample_column_label", "]", "for", "cell", "in", "sample_column", ":", "cell_row", "=", "cell", ".", "row", "if", "cell_row", "==", "1", ":", "continue", "# Is the header column", "reference_cell", "=", "worksheet", "[", "f\"{reference_column_label}{cell_row}\"", "]", "is_variant", "=", "cell", ".", "value", "!=", "reference_cell", ".", "value", "if", "is_variant", ":", "cell", ".", "fill", "=", "variant_pattern", "workbook", ".", "save", "(", "str", "(", "filename", ".", "with_suffix", "(", "'.edited.xlsx'", ")", ")", ")" ]
[ 53, 0 ]
[ 77, 57 ]
python
en
['en', 'en', 'en']
True
get_accessible_tenants
(os_conf, auth_url, region_name=None, insecure=True, cacert=None, timeout=5)
:param os_conf: openstack config :param auth_url: auth url :param region_name: region name :param insecure: insecure allowed for https :param cacert: :param timeout: :return: tenants, client list of tenants scoped or unscoped client -- v3 client
:param os_conf: openstack config :param auth_url: auth url :param region_name: region name :param insecure: insecure allowed for https :param cacert: :param timeout: :return: tenants, client list of tenants scoped or unscoped client -- v3 client
def get_accessible_tenants(os_conf, auth_url, region_name=None, insecure=True, cacert=None, timeout=5): """ :param os_conf: openstack config :param auth_url: auth url :param region_name: region name :param insecure: insecure allowed for https :param cacert: :param timeout: :return: tenants, client list of tenants scoped or unscoped client -- v3 client """ keystone_client = get_keystone_client(auth_url.replace("v2.0", "v3")) ks_args = { "password": os_conf.password, "auth_url": auth_url.replace("v2.0", "v3"), "region_name": region_name, "insecure": insecure, "cacert": cacert, "timeout": timeout, "debug": settings.DEBUG } (user, user_domain) = (os_conf.username, None) if get_keystone_version(auth_url) >= 3: (user, user_domain) = os_utils.getUserAndDomainFromConfig(os_conf) # keep the behavior similar to how access is done when configuring # via UI if not user_domain: user_domain = "Default" ks_args["username"] = user ks_args["user_domain_name"] = user_domain (tenant, tenant_domain) = os_utils.getTenantAndDomainFromConfig(os_conf) if get_keystone_version(auth_url) < 3: # ks_args["tenant_name"] = tenant ks_args["project_name"] = tenant ks_args["project_domain_name"] = None else: ks_args["project_name"] = tenant if tenant_domain: ks_args["project_domain_name"] = tenant_domain else: ks_args["project_domain_name"] = "Default" keystone_host = os_utils.getHostFromAuthURL(auth_url) # try with a scoped client first try: client = keystone_client.Client(**ks_args) os_utils.patchKeyStoneManagementURL( client, keystone_host, use_admin_url=os_conf.use_admin_url, logger=logger) except: # noqa logger.exception("Failed in connecting to keystone " "with configured credentials") return [], None # ensure that the configured user has access to all tenants # of the user logging in ctenants = None try: ctenants = os_utils.getTenants(client, logger=logger, ks_args=ks_args.copy()) except: # noqa logger.warn("Could not get tenants list with scoped credentials") if ctenants: return ctenants, client # try unscoped now ks_args.pop("project_name", None) ks_args.pop("project_domain_name", None) ks_args.pop("tenant_name", None) try: tclient = keystone_client.Client(**ks_args) os_utils.patchKeyStoneManagementURL( tclient, keystone_host, use_admin_url=os_conf.use_admin_url, logger=logger) ctenants = os_utils.getTenants(tclient, logger=logger, ks_args=ks_args.copy()) except Exception as e: logger.exception("Could not connect to openstack using " "configured credentials: %s", e) ctenants = [] # return tenant list and the scoped keystone client return ctenants, client
[ "def", "get_accessible_tenants", "(", "os_conf", ",", "auth_url", ",", "region_name", "=", "None", ",", "insecure", "=", "True", ",", "cacert", "=", "None", ",", "timeout", "=", "5", ")", ":", "keystone_client", "=", "get_keystone_client", "(", "auth_url", ".", "replace", "(", "\"v2.0\"", ",", "\"v3\"", ")", ")", "ks_args", "=", "{", "\"password\"", ":", "os_conf", ".", "password", ",", "\"auth_url\"", ":", "auth_url", ".", "replace", "(", "\"v2.0\"", ",", "\"v3\"", ")", ",", "\"region_name\"", ":", "region_name", ",", "\"insecure\"", ":", "insecure", ",", "\"cacert\"", ":", "cacert", ",", "\"timeout\"", ":", "timeout", ",", "\"debug\"", ":", "settings", ".", "DEBUG", "}", "(", "user", ",", "user_domain", ")", "=", "(", "os_conf", ".", "username", ",", "None", ")", "if", "get_keystone_version", "(", "auth_url", ")", ">=", "3", ":", "(", "user", ",", "user_domain", ")", "=", "os_utils", ".", "getUserAndDomainFromConfig", "(", "os_conf", ")", "# keep the behavior similar to how access is done when configuring", "# via UI", "if", "not", "user_domain", ":", "user_domain", "=", "\"Default\"", "ks_args", "[", "\"username\"", "]", "=", "user", "ks_args", "[", "\"user_domain_name\"", "]", "=", "user_domain", "(", "tenant", ",", "tenant_domain", ")", "=", "os_utils", ".", "getTenantAndDomainFromConfig", "(", "os_conf", ")", "if", "get_keystone_version", "(", "auth_url", ")", "<", "3", ":", "# ks_args[\"tenant_name\"] = tenant", "ks_args", "[", "\"project_name\"", "]", "=", "tenant", "ks_args", "[", "\"project_domain_name\"", "]", "=", "None", "else", ":", "ks_args", "[", "\"project_name\"", "]", "=", "tenant", "if", "tenant_domain", ":", "ks_args", "[", "\"project_domain_name\"", "]", "=", "tenant_domain", "else", ":", "ks_args", "[", "\"project_domain_name\"", "]", "=", "\"Default\"", "keystone_host", "=", "os_utils", ".", "getHostFromAuthURL", "(", "auth_url", ")", "# try with a scoped client first", "try", ":", "client", "=", "keystone_client", ".", "Client", "(", "*", "*", "ks_args", ")", "os_utils", ".", "patchKeyStoneManagementURL", "(", "client", ",", "keystone_host", ",", "use_admin_url", "=", "os_conf", ".", "use_admin_url", ",", "logger", "=", "logger", ")", "except", ":", "# noqa", "logger", ".", "exception", "(", "\"Failed in connecting to keystone \"", "\"with configured credentials\"", ")", "return", "[", "]", ",", "None", "# ensure that the configured user has access to all tenants", "# of the user logging in", "ctenants", "=", "None", "try", ":", "ctenants", "=", "os_utils", ".", "getTenants", "(", "client", ",", "logger", "=", "logger", ",", "ks_args", "=", "ks_args", ".", "copy", "(", ")", ")", "except", ":", "# noqa", "logger", ".", "warn", "(", "\"Could not get tenants list with scoped credentials\"", ")", "if", "ctenants", ":", "return", "ctenants", ",", "client", "# try unscoped now", "ks_args", ".", "pop", "(", "\"project_name\"", ",", "None", ")", "ks_args", ".", "pop", "(", "\"project_domain_name\"", ",", "None", ")", "ks_args", ".", "pop", "(", "\"tenant_name\"", ",", "None", ")", "try", ":", "tclient", "=", "keystone_client", ".", "Client", "(", "*", "*", "ks_args", ")", "os_utils", ".", "patchKeyStoneManagementURL", "(", "tclient", ",", "keystone_host", ",", "use_admin_url", "=", "os_conf", ".", "use_admin_url", ",", "logger", "=", "logger", ")", "ctenants", "=", "os_utils", ".", "getTenants", "(", "tclient", ",", "logger", "=", "logger", ",", "ks_args", "=", "ks_args", ".", "copy", "(", ")", ")", "except", "Exception", "as", "e", ":", "logger", ".", "exception", "(", "\"Could not connect to openstack using \"", "\"configured credentials: %s\"", ",", "e", ")", "ctenants", "=", "[", "]", "# return tenant list and the scoped keystone client", "return", "ctenants", ",", "client" ]
[ 68, 0 ]
[ 158, 27 ]
python
en
['en', 'error', 'th']
False
cleanup_users_tenants
(users=None, tenants=None, dry_run=False)
For each remote user, if not in keystone, remove locally For each remote tenant, if not in keystone, remove corresponding roles and the local tenant object
For each remote user, if not in keystone, remove locally For each remote tenant, if not in keystone, remove corresponding roles and the local tenant object
def cleanup_users_tenants(users=None, tenants=None, dry_run=False): """ For each remote user, if not in keystone, remove locally For each remote tenant, if not in keystone, remove corresponding roles and the local tenant object """ # logger.debug("Cleaning up non-existent non-local users and tenants") logger.warning("Starting cleanup") openstack_config = get_openstack_config() if(not openstack_config or not openstack_config.use_keystone_auth): logger.warning("Not using keystone auth, " "removing all non-local tenants and user") remove_nonlocal_users_tenants(dry_run=dry_run) return False auth_url = os_utils.getAuthURLFromConfig(openstack_config) insecure = openstack_config.insecure region_name = None if openstack_config.region: region_name = openstack_config.region del_tenants = [] (atenants, client) = get_accessible_tenants(openstack_config, auth_url=auth_url, insecure=insecure, region_name=region_name) if not client: logger.error("Failed to initialize keystone client, " "tenant/user cleanup failed") return False # keystone V2 requires access to admin URL to delete users and # tenants if (type(client) == client_v2.Client and not openstack_config.use_admin_url): logger.info("use_admin_url is set to False, " "not deleting keystone users and tenants") return False if not atenants: logger.warn("Unexpected empty list of tenants from keystone, " "tenant/user cleanup aborted") return False if tenants is None: tenants = Tenant.objects.filter( json_data__local=False).select_for_update() atenant_map = {} for atenant in atenants: atenant_map[atenant.id] = atenant # check if all local tenants still exist in keystone for tenant in tenants: uuid = tenant.uuid.split("tenant-")[1].replace('-', '') if uuid not in atenant_map: print("Tenant %s (%s) not found in OpenStack, marking for deletion" % (tenant.name, uuid)) # noqa del_tenants.append(tenant) else: print("Tenant %s (%s) present in OpenStack, not deleting" % (tenant.name, uuid)) # noqa for dtenant in del_tenants: # delete all roles with this tenant # UserRole.objects.filter(tenant_ref=dtenant).delete() # Role.objects.filter(tenant_ref__uuid=dtenant.slug).delete() try: if not dry_run: delete_tenant(dtenant.uuid) print("Deleted non-existent tenant %s" % dtenant.name) except: # noqa print("Tenant deletion failed") continue if users is None: users = get_user_model().objects.filter( local=False).select_for_update() del_users = [] for user in users: uuid = user.uuid.split("user-")[1].replace('-', '') if not user.access.count(): del_users.append(user) continue ruser = None try: ruser = client.users.get(user=uuid) except Exception as e: logger.info("Error checking user %s in keystone: %s" % (uuid, e)) if not ruser: del_users.append(user) print("User %s (%s) not found in OpenStack, marking for deletion" % (user, uuid)) # noqa else: print("User %s (%s) present in OpenStack, not deleting" % (user, uuid)) # noqa for duser in del_users: username = duser.username if not dry_run: user_act = UserActivity.objects.get(name=duser.name) user_act.delete() duser.delete() print("Deleted non-existent user %s" % username) return True
[ "def", "cleanup_users_tenants", "(", "users", "=", "None", ",", "tenants", "=", "None", ",", "dry_run", "=", "False", ")", ":", "# logger.debug(\"Cleaning up non-existent non-local users and tenants\")", "logger", ".", "warning", "(", "\"Starting cleanup\"", ")", "openstack_config", "=", "get_openstack_config", "(", ")", "if", "(", "not", "openstack_config", "or", "not", "openstack_config", ".", "use_keystone_auth", ")", ":", "logger", ".", "warning", "(", "\"Not using keystone auth, \"", "\"removing all non-local tenants and user\"", ")", "remove_nonlocal_users_tenants", "(", "dry_run", "=", "dry_run", ")", "return", "False", "auth_url", "=", "os_utils", ".", "getAuthURLFromConfig", "(", "openstack_config", ")", "insecure", "=", "openstack_config", ".", "insecure", "region_name", "=", "None", "if", "openstack_config", ".", "region", ":", "region_name", "=", "openstack_config", ".", "region", "del_tenants", "=", "[", "]", "(", "atenants", ",", "client", ")", "=", "get_accessible_tenants", "(", "openstack_config", ",", "auth_url", "=", "auth_url", ",", "insecure", "=", "insecure", ",", "region_name", "=", "region_name", ")", "if", "not", "client", ":", "logger", ".", "error", "(", "\"Failed to initialize keystone client, \"", "\"tenant/user cleanup failed\"", ")", "return", "False", "# keystone V2 requires access to admin URL to delete users and", "# tenants", "if", "(", "type", "(", "client", ")", "==", "client_v2", ".", "Client", "and", "not", "openstack_config", ".", "use_admin_url", ")", ":", "logger", ".", "info", "(", "\"use_admin_url is set to False, \"", "\"not deleting keystone users and tenants\"", ")", "return", "False", "if", "not", "atenants", ":", "logger", ".", "warn", "(", "\"Unexpected empty list of tenants from keystone, \"", "\"tenant/user cleanup aborted\"", ")", "return", "False", "if", "tenants", "is", "None", ":", "tenants", "=", "Tenant", ".", "objects", ".", "filter", "(", "json_data__local", "=", "False", ")", ".", "select_for_update", "(", ")", "atenant_map", "=", "{", "}", "for", "atenant", "in", "atenants", ":", "atenant_map", "[", "atenant", ".", "id", "]", "=", "atenant", "# check if all local tenants still exist in keystone", "for", "tenant", "in", "tenants", ":", "uuid", "=", "tenant", ".", "uuid", ".", "split", "(", "\"tenant-\"", ")", "[", "1", "]", ".", "replace", "(", "'-'", ",", "''", ")", "if", "uuid", "not", "in", "atenant_map", ":", "print", "(", "\"Tenant %s (%s) not found in OpenStack, marking for deletion\"", "%", "(", "tenant", ".", "name", ",", "uuid", ")", ")", "# noqa", "del_tenants", ".", "append", "(", "tenant", ")", "else", ":", "print", "(", "\"Tenant %s (%s) present in OpenStack, not deleting\"", "%", "(", "tenant", ".", "name", ",", "uuid", ")", ")", "# noqa", "for", "dtenant", "in", "del_tenants", ":", "# delete all roles with this tenant", "# UserRole.objects.filter(tenant_ref=dtenant).delete()", "# Role.objects.filter(tenant_ref__uuid=dtenant.slug).delete()", "try", ":", "if", "not", "dry_run", ":", "delete_tenant", "(", "dtenant", ".", "uuid", ")", "print", "(", "\"Deleted non-existent tenant %s\"", "%", "dtenant", ".", "name", ")", "except", ":", "# noqa", "print", "(", "\"Tenant deletion failed\"", ")", "continue", "if", "users", "is", "None", ":", "users", "=", "get_user_model", "(", ")", ".", "objects", ".", "filter", "(", "local", "=", "False", ")", ".", "select_for_update", "(", ")", "del_users", "=", "[", "]", "for", "user", "in", "users", ":", "uuid", "=", "user", ".", "uuid", ".", "split", "(", "\"user-\"", ")", "[", "1", "]", ".", "replace", "(", "'-'", ",", "''", ")", "if", "not", "user", ".", "access", ".", "count", "(", ")", ":", "del_users", ".", "append", "(", "user", ")", "continue", "ruser", "=", "None", "try", ":", "ruser", "=", "client", ".", "users", ".", "get", "(", "user", "=", "uuid", ")", "except", "Exception", "as", "e", ":", "logger", ".", "info", "(", "\"Error checking user %s in keystone: %s\"", "%", "(", "uuid", ",", "e", ")", ")", "if", "not", "ruser", ":", "del_users", ".", "append", "(", "user", ")", "print", "(", "\"User %s (%s) not found in OpenStack, marking for deletion\"", "%", "(", "user", ",", "uuid", ")", ")", "# noqa", "else", ":", "print", "(", "\"User %s (%s) present in OpenStack, not deleting\"", "%", "(", "user", ",", "uuid", ")", ")", "# noqa", "for", "duser", "in", "del_users", ":", "username", "=", "duser", ".", "username", "if", "not", "dry_run", ":", "user_act", "=", "UserActivity", ".", "objects", ".", "get", "(", "name", "=", "duser", ".", "name", ")", "user_act", ".", "delete", "(", ")", "duser", ".", "delete", "(", ")", "print", "(", "\"Deleted non-existent user %s\"", "%", "username", ")", "return", "True" ]
[ 251, 0 ]
[ 356, 15 ]
python
en
['en', 'error', 'th']
False
DatabaseFeatures._mysql_storage_engine
(self)
Internal method used in Django tests. Don't rely on this from your code
Internal method used in Django tests. Don't rely on this from your code
def _mysql_storage_engine(self): "Internal method used in Django tests. Don't rely on this from your code" return self.connection.mysql_server_data['default_storage_engine']
[ "def", "_mysql_storage_engine", "(", "self", ")", ":", "return", "self", ".", "connection", ".", "mysql_server_data", "[", "'default_storage_engine'", "]" ]
[ 106, 4 ]
[ 108, 74 ]
python
en
['en', 'en', 'en']
True
DatabaseFeatures.allows_auto_pk_0
(self)
Autoincrement primary key can be set to 0 if it doesn't generate new autoincrement values.
Autoincrement primary key can be set to 0 if it doesn't generate new autoincrement values.
def allows_auto_pk_0(self): """ Autoincrement primary key can be set to 0 if it doesn't generate new autoincrement values. """ return 'NO_AUTO_VALUE_ON_ZERO' in self.connection.sql_mode
[ "def", "allows_auto_pk_0", "(", "self", ")", ":", "return", "'NO_AUTO_VALUE_ON_ZERO'", "in", "self", ".", "connection", ".", "sql_mode" ]
[ 111, 4 ]
[ 116, 66 ]
python
en
['en', 'error', 'th']
False
DatabaseFeatures.can_introspect_foreign_keys
(self)
Confirm support for introspected foreign keys
Confirm support for introspected foreign keys
def can_introspect_foreign_keys(self): "Confirm support for introspected foreign keys" return self._mysql_storage_engine != 'MyISAM'
[ "def", "can_introspect_foreign_keys", "(", "self", ")", ":", "return", "self", ".", "_mysql_storage_engine", "!=", "'MyISAM'" ]
[ 123, 4 ]
[ 125, 53 ]
python
en
['en', 'en', 'en']
True
DatabaseFeatures.supports_transactions
(self)
All storage engines except MyISAM support transactions.
All storage engines except MyISAM support transactions.
def supports_transactions(self): """ All storage engines except MyISAM support transactions. """ return self._mysql_storage_engine != 'MyISAM'
[ "def", "supports_transactions", "(", "self", ")", ":", "return", "self", ".", "_mysql_storage_engine", "!=", "'MyISAM'" ]
[ 202, 4 ]
[ 206, 53 ]
python
en
['en', 'error', 'th']
False
NTLMConnectionPool.__init__
(self, user, pw, authurl, *args, **kwargs)
authurl is a random URL on the server that is protected by NTLM. user is the Windows user, probably in the DOMAIN\\username format. pw is the password for the user.
authurl is a random URL on the server that is protected by NTLM. user is the Windows user, probably in the DOMAIN\\username format. pw is the password for the user.
def __init__(self, user, pw, authurl, *args, **kwargs): """ authurl is a random URL on the server that is protected by NTLM. user is the Windows user, probably in the DOMAIN\\username format. pw is the password for the user. """ super(NTLMConnectionPool, self).__init__(*args, **kwargs) self.authurl = authurl self.rawuser = user user_parts = user.split("\\", 1) self.domain = user_parts[0].upper() self.user = user_parts[1] self.pw = pw
[ "def", "__init__", "(", "self", ",", "user", ",", "pw", ",", "authurl", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "super", "(", "NTLMConnectionPool", ",", "self", ")", ".", "__init__", "(", "*", "args", ",", "*", "*", "kwargs", ")", "self", ".", "authurl", "=", "authurl", "self", ".", "rawuser", "=", "user", "user_parts", "=", "user", ".", "split", "(", "\"\\\\\"", ",", "1", ")", "self", ".", "domain", "=", "user_parts", "[", "0", "]", ".", "upper", "(", ")", "self", ".", "user", "=", "user_parts", "[", "1", "]", "self", ".", "pw", "=", "pw" ]
[ 33, 4 ]
[ 45, 20 ]
python
en
['en', 'error', 'th']
False
DatabaseValidation.check_field_type
(self, field, field_type)
MySQL has the following field length restriction: No character (varchar) fields can have a length exceeding 255 characters if they have a unique index on them. MySQL doesn't support a database index on some data types.
MySQL has the following field length restriction: No character (varchar) fields can have a length exceeding 255 characters if they have a unique index on them. MySQL doesn't support a database index on some data types.
def check_field_type(self, field, field_type): """ MySQL has the following field length restriction: No character (varchar) fields can have a length exceeding 255 characters if they have a unique index on them. MySQL doesn't support a database index on some data types. """ errors = [] if (field_type.startswith('varchar') and field.unique and (field.max_length is None or int(field.max_length) > 255)): errors.append( checks.Warning( '%s may not allow unique CharFields to have a max_length ' '> 255.' % self.connection.display_name, obj=field, hint=( 'See: https://docs.djangoproject.com/en/%s/ref/' 'databases/#mysql-character-fields' % get_docs_version() ), id='mysql.W003', ) ) if field.db_index and field_type.lower() in self.connection._limited_data_types: errors.append( checks.Warning( '%s does not support a database index on %s columns.' % (self.connection.display_name, field_type), hint=( "An index won't be created. Silence this warning if " "you don't care about it." ), obj=field, id='fields.W162', ) ) return errors
[ "def", "check_field_type", "(", "self", ",", "field", ",", "field_type", ")", ":", "errors", "=", "[", "]", "if", "(", "field_type", ".", "startswith", "(", "'varchar'", ")", "and", "field", ".", "unique", "and", "(", "field", ".", "max_length", "is", "None", "or", "int", "(", "field", ".", "max_length", ")", ">", "255", ")", ")", ":", "errors", ".", "append", "(", "checks", ".", "Warning", "(", "'%s may not allow unique CharFields to have a max_length '", "'> 255.'", "%", "self", ".", "connection", ".", "display_name", ",", "obj", "=", "field", ",", "hint", "=", "(", "'See: https://docs.djangoproject.com/en/%s/ref/'", "'databases/#mysql-character-fields'", "%", "get_docs_version", "(", ")", ")", ",", "id", "=", "'mysql.W003'", ",", ")", ")", "if", "field", ".", "db_index", "and", "field_type", ".", "lower", "(", ")", "in", "self", ".", "connection", ".", "_limited_data_types", ":", "errors", ".", "append", "(", "checks", ".", "Warning", "(", "'%s does not support a database index on %s columns.'", "%", "(", "self", ".", "connection", ".", "display_name", ",", "field_type", ")", ",", "hint", "=", "(", "\"An index won't be created. Silence this warning if \"", "\"you don't care about it.\"", ")", ",", "obj", "=", "field", ",", "id", "=", "'fields.W162'", ",", ")", ")", "return", "errors" ]
[ 32, 4 ]
[ 68, 21 ]
python
en
['en', 'error', 'th']
False
features_and_labels
(row_data)
Splits features and labels from feature dictionary. Args: row_data: Dictionary of CSV column names and tensor values. Returns: Dictionary of feature tensors and label tensor.
Splits features and labels from feature dictionary.
def features_and_labels(row_data): """Splits features and labels from feature dictionary. Args: row_data: Dictionary of CSV column names and tensor values. Returns: Dictionary of feature tensors and label tensor. """ label = row_data.pop(LABEL_COLUMN) return row_data, label
[ "def", "features_and_labels", "(", "row_data", ")", ":", "label", "=", "row_data", ".", "pop", "(", "LABEL_COLUMN", ")", "return", "row_data", ",", "label" ]
[ 37, 0 ]
[ 47, 26 ]
python
en
['en', 'en', 'en']
True
load_dataset
(pattern, batch_size=1, mode='eval')
Loads dataset using the tf.data API from CSV files. Args: pattern: str, file pattern to glob into list of files. batch_size: int, the number of examples per batch. mode: 'train' | 'eval' to determine if training or evaluating. Returns: `Dataset` object.
Loads dataset using the tf.data API from CSV files.
def load_dataset(pattern, batch_size=1, mode='eval'): """Loads dataset using the tf.data API from CSV files. Args: pattern: str, file pattern to glob into list of files. batch_size: int, the number of examples per batch. mode: 'train' | 'eval' to determine if training or evaluating. Returns: `Dataset` object. """ print("mode = {}".format(mode)) # Make a CSV dataset dataset = tf.data.experimental.make_csv_dataset( file_pattern=pattern, batch_size=batch_size, column_names=CSV_COLUMNS, column_defaults=DEFAULTS) # Map dataset to features and label dataset = dataset.map(map_func=features_and_labels) # features, label # Shuffle and repeat for training if mode == 'train': dataset = dataset.shuffle(buffer_size=1000).repeat() # Take advantage of multi-threading; 1=AUTOTUNE dataset = dataset.prefetch(buffer_size=1) return dataset
[ "def", "load_dataset", "(", "pattern", ",", "batch_size", "=", "1", ",", "mode", "=", "'eval'", ")", ":", "print", "(", "\"mode = {}\"", ".", "format", "(", "mode", ")", ")", "# Make a CSV dataset", "dataset", "=", "tf", ".", "data", ".", "experimental", ".", "make_csv_dataset", "(", "file_pattern", "=", "pattern", ",", "batch_size", "=", "batch_size", ",", "column_names", "=", "CSV_COLUMNS", ",", "column_defaults", "=", "DEFAULTS", ")", "# Map dataset to features and label", "dataset", "=", "dataset", ".", "map", "(", "map_func", "=", "features_and_labels", ")", "# features, label", "# Shuffle and repeat for training", "if", "mode", "==", "'train'", ":", "dataset", "=", "dataset", ".", "shuffle", "(", "buffer_size", "=", "1000", ")", ".", "repeat", "(", ")", "# Take advantage of multi-threading; 1=AUTOTUNE", "dataset", "=", "dataset", ".", "prefetch", "(", "buffer_size", "=", "1", ")", "return", "dataset" ]
[ 50, 0 ]
[ 78, 18 ]
python
en
['en', 'en', 'en']
True
create_input_layers
()
Creates dictionary of input layers for each feature. Returns: Dictionary of `tf.Keras.layers.Input` layers for each feature.
Creates dictionary of input layers for each feature.
def create_input_layers(): """Creates dictionary of input layers for each feature. Returns: Dictionary of `tf.Keras.layers.Input` layers for each feature. """ deep_inputs = { colname: tf.keras.layers.Input( name=colname, shape=(), dtype="float32") for colname in ["mother_age", "gestation_weeks"] } wide_inputs = { colname: tf.keras.layers.Input( name=colname, shape=(), dtype="string") for colname in ["is_male", "plurality"] } inputs = {**wide_inputs, **deep_inputs} return inputs
[ "def", "create_input_layers", "(", ")", ":", "deep_inputs", "=", "{", "colname", ":", "tf", ".", "keras", ".", "layers", ".", "Input", "(", "name", "=", "colname", ",", "shape", "=", "(", ")", ",", "dtype", "=", "\"float32\"", ")", "for", "colname", "in", "[", "\"mother_age\"", ",", "\"gestation_weeks\"", "]", "}", "wide_inputs", "=", "{", "colname", ":", "tf", ".", "keras", ".", "layers", ".", "Input", "(", "name", "=", "colname", ",", "shape", "=", "(", ")", ",", "dtype", "=", "\"string\"", ")", "for", "colname", "in", "[", "\"is_male\"", ",", "\"plurality\"", "]", "}", "inputs", "=", "{", "*", "*", "wide_inputs", ",", "*", "*", "deep_inputs", "}", "return", "inputs" ]
[ 81, 0 ]
[ 101, 17 ]
python
en
['en', 'en', 'en']
True
categorical_fc
(name, values)
Helper function to wrap categorical feature by indicator column. Args: name: str, name of feature. values: list, list of strings of categorical values. Returns: Categorical and indicator column of categorical feature.
Helper function to wrap categorical feature by indicator column.
def categorical_fc(name, values): """Helper function to wrap categorical feature by indicator column. Args: name: str, name of feature. values: list, list of strings of categorical values. Returns: Categorical and indicator column of categorical feature. """ cat_column = tf.feature_column.categorical_column_with_vocabulary_list( key=name, vocabulary_list=values) ind_column = tf.feature_column.indicator_column( categorical_column=cat_column) return cat_column, ind_column
[ "def", "categorical_fc", "(", "name", ",", "values", ")", ":", "cat_column", "=", "tf", ".", "feature_column", ".", "categorical_column_with_vocabulary_list", "(", "key", "=", "name", ",", "vocabulary_list", "=", "values", ")", "ind_column", "=", "tf", ".", "feature_column", ".", "indicator_column", "(", "categorical_column", "=", "cat_column", ")", "return", "cat_column", ",", "ind_column" ]
[ 104, 0 ]
[ 118, 33 ]
python
en
['en', 'en', 'en']
True
create_feature_columns
(nembeds)
Creates wide and deep dictionaries of feature columns from inputs. Args: nembeds: int, number of dimensions to embed categorical column down to. Returns: Wide and deep dictionaries of feature columns.
Creates wide and deep dictionaries of feature columns from inputs.
def create_feature_columns(nembeds): """Creates wide and deep dictionaries of feature columns from inputs. Args: nembeds: int, number of dimensions to embed categorical column down to. Returns: Wide and deep dictionaries of feature columns. """ deep_fc = { colname: tf.feature_column.numeric_column(key=colname) for colname in ["mother_age", "gestation_weeks"] } wide_fc = {} is_male, wide_fc["is_male"] = categorical_fc( "is_male", ["True", "False", "Unknown"]) plurality, wide_fc["plurality"] = categorical_fc( "plurality", ["Single(1)", "Twins(2)", "Triplets(3)", "Quadruplets(4)", "Quintuplets(5)", "Multiple(2+)"]) # Bucketize the float fields. This makes them wide age_buckets = tf.feature_column.bucketized_column( source_column=deep_fc["mother_age"], boundaries=np.arange(15, 45, 1).tolist()) wide_fc["age_buckets"] = tf.feature_column.indicator_column( categorical_column=age_buckets) gestation_buckets = tf.feature_column.bucketized_column( source_column=deep_fc["gestation_weeks"], boundaries=np.arange(17, 47, 1).tolist()) wide_fc["gestation_buckets"] = tf.feature_column.indicator_column( categorical_column=gestation_buckets) # Cross all the wide columns, have to do the crossing before we one-hot crossed = tf.feature_column.crossed_column( keys=[age_buckets, gestation_buckets], hash_bucket_size=1000) deep_fc["crossed_embeds"] = tf.feature_column.embedding_column( categorical_column=crossed, dimension=nembeds) return wide_fc, deep_fc
[ "def", "create_feature_columns", "(", "nembeds", ")", ":", "deep_fc", "=", "{", "colname", ":", "tf", ".", "feature_column", ".", "numeric_column", "(", "key", "=", "colname", ")", "for", "colname", "in", "[", "\"mother_age\"", ",", "\"gestation_weeks\"", "]", "}", "wide_fc", "=", "{", "}", "is_male", ",", "wide_fc", "[", "\"is_male\"", "]", "=", "categorical_fc", "(", "\"is_male\"", ",", "[", "\"True\"", ",", "\"False\"", ",", "\"Unknown\"", "]", ")", "plurality", ",", "wide_fc", "[", "\"plurality\"", "]", "=", "categorical_fc", "(", "\"plurality\"", ",", "[", "\"Single(1)\"", ",", "\"Twins(2)\"", ",", "\"Triplets(3)\"", ",", "\"Quadruplets(4)\"", ",", "\"Quintuplets(5)\"", ",", "\"Multiple(2+)\"", "]", ")", "# Bucketize the float fields. This makes them wide", "age_buckets", "=", "tf", ".", "feature_column", ".", "bucketized_column", "(", "source_column", "=", "deep_fc", "[", "\"mother_age\"", "]", ",", "boundaries", "=", "np", ".", "arange", "(", "15", ",", "45", ",", "1", ")", ".", "tolist", "(", ")", ")", "wide_fc", "[", "\"age_buckets\"", "]", "=", "tf", ".", "feature_column", ".", "indicator_column", "(", "categorical_column", "=", "age_buckets", ")", "gestation_buckets", "=", "tf", ".", "feature_column", ".", "bucketized_column", "(", "source_column", "=", "deep_fc", "[", "\"gestation_weeks\"", "]", ",", "boundaries", "=", "np", ".", "arange", "(", "17", ",", "47", ",", "1", ")", ".", "tolist", "(", ")", ")", "wide_fc", "[", "\"gestation_buckets\"", "]", "=", "tf", ".", "feature_column", ".", "indicator_column", "(", "categorical_column", "=", "gestation_buckets", ")", "# Cross all the wide columns, have to do the crossing before we one-hot", "crossed", "=", "tf", ".", "feature_column", ".", "crossed_column", "(", "keys", "=", "[", "age_buckets", ",", "gestation_buckets", "]", ",", "hash_bucket_size", "=", "1000", ")", "deep_fc", "[", "\"crossed_embeds\"", "]", "=", "tf", ".", "feature_column", ".", "embedding_column", "(", "categorical_column", "=", "crossed", ",", "dimension", "=", "nembeds", ")", "return", "wide_fc", ",", "deep_fc" ]
[ 121, 0 ]
[ 160, 27 ]
python
en
['en', 'en', 'en']
True
get_model_outputs
(wide_inputs, deep_inputs, dnn_hidden_units)
Creates model architecture and returns outputs. Args: wide_inputs: Dense tensor used as inputs to wide side of model. deep_inputs: Dense tensor used as inputs to deep side of model. dnn_hidden_units: List of integers where length is number of hidden layers and ith element is the number of neurons at ith layer. Returns: Dense tensor output from the model.
Creates model architecture and returns outputs.
def get_model_outputs(wide_inputs, deep_inputs, dnn_hidden_units): """Creates model architecture and returns outputs. Args: wide_inputs: Dense tensor used as inputs to wide side of model. deep_inputs: Dense tensor used as inputs to deep side of model. dnn_hidden_units: List of integers where length is number of hidden layers and ith element is the number of neurons at ith layer. Returns: Dense tensor output from the model. """ # Hidden layers for the deep side layers = [int(x) for x in dnn_hidden_units] deep = deep_inputs for layerno, numnodes in enumerate(layers): deep = tf.keras.layers.Dense( units=numnodes, activation="relu", name="dnn_{}".format(layerno+1))(deep) deep_out = deep # Linear model for the wide side wide_out = tf.keras.layers.Dense( units=10, activation="relu", name="linear")(wide_inputs) # Concatenate the two sides both = tf.keras.layers.concatenate( inputs=[deep_out, wide_out], name="both") # Final output is a linear activation because this is regression output = tf.keras.layers.Dense( units=1, activation="linear", name="weight")(both) return output
[ "def", "get_model_outputs", "(", "wide_inputs", ",", "deep_inputs", ",", "dnn_hidden_units", ")", ":", "# Hidden layers for the deep side", "layers", "=", "[", "int", "(", "x", ")", "for", "x", "in", "dnn_hidden_units", "]", "deep", "=", "deep_inputs", "for", "layerno", ",", "numnodes", "in", "enumerate", "(", "layers", ")", ":", "deep", "=", "tf", ".", "keras", ".", "layers", ".", "Dense", "(", "units", "=", "numnodes", ",", "activation", "=", "\"relu\"", ",", "name", "=", "\"dnn_{}\"", ".", "format", "(", "layerno", "+", "1", ")", ")", "(", "deep", ")", "deep_out", "=", "deep", "# Linear model for the wide side", "wide_out", "=", "tf", ".", "keras", ".", "layers", ".", "Dense", "(", "units", "=", "10", ",", "activation", "=", "\"relu\"", ",", "name", "=", "\"linear\"", ")", "(", "wide_inputs", ")", "# Concatenate the two sides", "both", "=", "tf", ".", "keras", ".", "layers", ".", "concatenate", "(", "inputs", "=", "[", "deep_out", ",", "wide_out", "]", ",", "name", "=", "\"both\"", ")", "# Final output is a linear activation because this is regression", "output", "=", "tf", ".", "keras", ".", "layers", ".", "Dense", "(", "units", "=", "1", ",", "activation", "=", "\"linear\"", ",", "name", "=", "\"weight\"", ")", "(", "both", ")", "return", "output" ]
[ 163, 0 ]
[ 196, 17 ]
python
en
['en', 'en', 'en']
True
rmse
(y_true, y_pred)
Calculates RMSE evaluation metric. Args: y_true: tensor, true labels. y_pred: tensor, predicted labels. Returns: Tensor with value of RMSE between true and predicted labels.
Calculates RMSE evaluation metric.
def rmse(y_true, y_pred): """Calculates RMSE evaluation metric. Args: y_true: tensor, true labels. y_pred: tensor, predicted labels. Returns: Tensor with value of RMSE between true and predicted labels. """ return tf.sqrt(tf.reduce_mean(tf.square(y_pred - y_true)))
[ "def", "rmse", "(", "y_true", ",", "y_pred", ")", ":", "return", "tf", ".", "sqrt", "(", "tf", ".", "reduce_mean", "(", "tf", ".", "square", "(", "y_pred", "-", "y_true", ")", ")", ")" ]
[ 199, 0 ]
[ 208, 62 ]
python
en
['en', 'en', 'en']
True
build_wide_deep_model
(dnn_hidden_units=[64, 32], nembeds=3)
Builds wide and deep model using Keras Functional API. Returns: `tf.keras.models.Model` object.
Builds wide and deep model using Keras Functional API.
def build_wide_deep_model(dnn_hidden_units=[64, 32], nembeds=3): """Builds wide and deep model using Keras Functional API. Returns: `tf.keras.models.Model` object. """ # Create input layers inputs = create_input_layers() # Create feature columns for both wide and deep wide_fc, deep_fc = create_feature_columns(nembeds) # The constructor for DenseFeatures takes a list of numeric columns # The Functional API in Keras requires: LayerConstructor()(inputs) wide_inputs = tf.keras.layers.DenseFeatures( feature_columns=wide_fc.values(), name="wide_inputs")(inputs) deep_inputs = tf.keras.layers.DenseFeatures( feature_columns=deep_fc.values(), name="deep_inputs")(inputs) # Get output of model given inputs output = get_model_outputs(wide_inputs, deep_inputs, dnn_hidden_units) # Build model and compile it all together model = tf.keras.models.Model(inputs=inputs, outputs=output) model.compile(optimizer="adam", loss="mse", metrics=[rmse, "mse"]) return model
[ "def", "build_wide_deep_model", "(", "dnn_hidden_units", "=", "[", "64", ",", "32", "]", ",", "nembeds", "=", "3", ")", ":", "# Create input layers", "inputs", "=", "create_input_layers", "(", ")", "# Create feature columns for both wide and deep", "wide_fc", ",", "deep_fc", "=", "create_feature_columns", "(", "nembeds", ")", "# The constructor for DenseFeatures takes a list of numeric columns", "# The Functional API in Keras requires: LayerConstructor()(inputs)", "wide_inputs", "=", "tf", ".", "keras", ".", "layers", ".", "DenseFeatures", "(", "feature_columns", "=", "wide_fc", ".", "values", "(", ")", ",", "name", "=", "\"wide_inputs\"", ")", "(", "inputs", ")", "deep_inputs", "=", "tf", ".", "keras", ".", "layers", ".", "DenseFeatures", "(", "feature_columns", "=", "deep_fc", ".", "values", "(", ")", ",", "name", "=", "\"deep_inputs\"", ")", "(", "inputs", ")", "# Get output of model given inputs", "output", "=", "get_model_outputs", "(", "wide_inputs", ",", "deep_inputs", ",", "dnn_hidden_units", ")", "# Build model and compile it all together", "model", "=", "tf", ".", "keras", ".", "models", ".", "Model", "(", "inputs", "=", "inputs", ",", "outputs", "=", "output", ")", "model", ".", "compile", "(", "optimizer", "=", "\"adam\"", ",", "loss", "=", "\"mse\"", ",", "metrics", "=", "[", "rmse", ",", "\"mse\"", "]", ")", "return", "model" ]
[ 211, 0 ]
[ 237, 16 ]
python
en
['en', 'en', 'en']
True
sorted_walk
(dir)
Do os.walk in a reproducible way, independent of indeterministic filesystem readdir order
Do os.walk in a reproducible way, independent of indeterministic filesystem readdir order
def sorted_walk(dir): """Do os.walk in a reproducible way, independent of indeterministic filesystem readdir order """ for base, dirs, files in os.walk(dir): dirs.sort() files.sort() yield base, dirs, files
[ "def", "sorted_walk", "(", "dir", ")", ":", "for", "base", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "dir", ")", ":", "dirs", ".", "sort", "(", ")", "files", ".", "sort", "(", ")", "yield", "base", ",", "dirs", ",", "files" ]
[ 42, 0 ]
[ 49, 31 ]
python
en
['en', 'gl', 'en']
True
walk_egg
(egg_dir)
Walk an unpacked egg's contents, skipping the metadata directory
Walk an unpacked egg's contents, skipping the metadata directory
def walk_egg(egg_dir): """Walk an unpacked egg's contents, skipping the metadata directory""" walker = sorted_walk(egg_dir) base, dirs, files = next(walker) if 'EGG-INFO' in dirs: dirs.remove('EGG-INFO') yield base, dirs, files for bdf in walker: yield bdf
[ "def", "walk_egg", "(", "egg_dir", ")", ":", "walker", "=", "sorted_walk", "(", "egg_dir", ")", "base", ",", "dirs", ",", "files", "=", "next", "(", "walker", ")", "if", "'EGG-INFO'", "in", "dirs", ":", "dirs", ".", "remove", "(", "'EGG-INFO'", ")", "yield", "base", ",", "dirs", ",", "files", "for", "bdf", "in", "walker", ":", "yield", "bdf" ]
[ 357, 0 ]
[ 365, 17 ]
python
en
['en', 'en', 'en']
True
scan_module
(egg_dir, base, name, stubs)
Check whether module possibly uses unsafe-for-zipfile stuff
Check whether module possibly uses unsafe-for-zipfile stuff
def scan_module(egg_dir, base, name, stubs): """Check whether module possibly uses unsafe-for-zipfile stuff""" filename = os.path.join(base, name) if filename[:-1] in stubs: return True # Extension module pkg = base[len(egg_dir) + 1:].replace(os.sep, '.') module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] if sys.version_info < (3, 3): skip = 8 # skip magic & date elif sys.version_info < (3, 7): skip = 12 # skip magic & date & file size else: skip = 16 # skip magic & reserved? & date & file size f = open(filename, 'rb') f.read(skip) code = marshal.load(f) f.close() safe = True symbols = dict.fromkeys(iter_symbols(code)) for bad in ['__file__', '__path__']: if bad in symbols: log.warn("%s: module references %s", module, bad) safe = False if 'inspect' in symbols: for bad in [ 'getsource', 'getabsfile', 'getsourcefile', 'getfile' 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', 'getinnerframes', 'getouterframes', 'stack', 'trace' ]: if bad in symbols: log.warn("%s: module MAY be using inspect.%s", module, bad) safe = False return safe
[ "def", "scan_module", "(", "egg_dir", ",", "base", ",", "name", ",", "stubs", ")", ":", "filename", "=", "os", ".", "path", ".", "join", "(", "base", ",", "name", ")", "if", "filename", "[", ":", "-", "1", "]", "in", "stubs", ":", "return", "True", "# Extension module", "pkg", "=", "base", "[", "len", "(", "egg_dir", ")", "+", "1", ":", "]", ".", "replace", "(", "os", ".", "sep", ",", "'.'", ")", "module", "=", "pkg", "+", "(", "pkg", "and", "'.'", "or", "''", ")", "+", "os", ".", "path", ".", "splitext", "(", "name", ")", "[", "0", "]", "if", "sys", ".", "version_info", "<", "(", "3", ",", "3", ")", ":", "skip", "=", "8", "# skip magic & date", "elif", "sys", ".", "version_info", "<", "(", "3", ",", "7", ")", ":", "skip", "=", "12", "# skip magic & date & file size", "else", ":", "skip", "=", "16", "# skip magic & reserved? & date & file size", "f", "=", "open", "(", "filename", ",", "'rb'", ")", "f", ".", "read", "(", "skip", ")", "code", "=", "marshal", ".", "load", "(", "f", ")", "f", ".", "close", "(", ")", "safe", "=", "True", "symbols", "=", "dict", ".", "fromkeys", "(", "iter_symbols", "(", "code", ")", ")", "for", "bad", "in", "[", "'__file__'", ",", "'__path__'", "]", ":", "if", "bad", "in", "symbols", ":", "log", ".", "warn", "(", "\"%s: module references %s\"", ",", "module", ",", "bad", ")", "safe", "=", "False", "if", "'inspect'", "in", "symbols", ":", "for", "bad", "in", "[", "'getsource'", ",", "'getabsfile'", ",", "'getsourcefile'", ",", "'getfile'", "'getsourcelines'", ",", "'findsource'", ",", "'getcomments'", ",", "'getframeinfo'", ",", "'getinnerframes'", ",", "'getouterframes'", ",", "'stack'", ",", "'trace'", "]", ":", "if", "bad", "in", "symbols", ":", "log", ".", "warn", "(", "\"%s: module MAY be using inspect.%s\"", ",", "module", ",", "bad", ")", "safe", "=", "False", "return", "safe" ]
[ 405, 0 ]
[ 438, 15 ]
python
en
['en', 'en', 'en']
True
iter_symbols
(code)
Yield names and strings used by `code` and its nested code objects
Yield names and strings used by `code` and its nested code objects
def iter_symbols(code): """Yield names and strings used by `code` and its nested code objects""" for name in code.co_names: yield name for const in code.co_consts: if isinstance(const, six.string_types): yield const elif isinstance(const, CodeType): for name in iter_symbols(const): yield name
[ "def", "iter_symbols", "(", "code", ")", ":", "for", "name", "in", "code", ".", "co_names", ":", "yield", "name", "for", "const", "in", "code", ".", "co_consts", ":", "if", "isinstance", "(", "const", ",", "six", ".", "string_types", ")", ":", "yield", "const", "elif", "isinstance", "(", "const", ",", "CodeType", ")", ":", "for", "name", "in", "iter_symbols", "(", "const", ")", ":", "yield", "name" ]
[ 441, 0 ]
[ 450, 26 ]
python
en
['en', 'en', 'en']
True
make_zipfile
(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, mode='w')
Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises DistutilsExecError. Returns the name of the output zip file.
Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises DistutilsExecError. Returns the name of the output zip file.
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, mode='w'): """Create a zip file from all the files under 'base_dir'. The output zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" Python module (if available) or the InfoZIP "zip" utility (if installed and found on the default search path). If neither tool is available, raises DistutilsExecError. Returns the name of the output zip file. """ import zipfile mkpath(os.path.dirname(zip_filename), dry_run=dry_run) log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) def visit(z, dirname, names): for name in names: path = os.path.normpath(os.path.join(dirname, name)) if os.path.isfile(path): p = path[len(base_dir) + 1:] if not dry_run: z.write(path, p) log.debug("adding '%s'", p) compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED if not dry_run: z = zipfile.ZipFile(zip_filename, mode, compression=compression) for dirname, dirs, files in sorted_walk(base_dir): visit(z, dirname, files) z.close() else: for dirname, dirs, files in sorted_walk(base_dir): visit(None, dirname, files) return zip_filename
[ "def", "make_zipfile", "(", "zip_filename", ",", "base_dir", ",", "verbose", "=", "0", ",", "dry_run", "=", "0", ",", "compress", "=", "True", ",", "mode", "=", "'w'", ")", ":", "import", "zipfile", "mkpath", "(", "os", ".", "path", ".", "dirname", "(", "zip_filename", ")", ",", "dry_run", "=", "dry_run", ")", "log", ".", "info", "(", "\"creating '%s' and adding '%s' to it\"", ",", "zip_filename", ",", "base_dir", ")", "def", "visit", "(", "z", ",", "dirname", ",", "names", ")", ":", "for", "name", "in", "names", ":", "path", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "dirname", ",", "name", ")", ")", "if", "os", ".", "path", ".", "isfile", "(", "path", ")", ":", "p", "=", "path", "[", "len", "(", "base_dir", ")", "+", "1", ":", "]", "if", "not", "dry_run", ":", "z", ".", "write", "(", "path", ",", "p", ")", "log", ".", "debug", "(", "\"adding '%s'\"", ",", "p", ")", "compression", "=", "zipfile", ".", "ZIP_DEFLATED", "if", "compress", "else", "zipfile", ".", "ZIP_STORED", "if", "not", "dry_run", ":", "z", "=", "zipfile", ".", "ZipFile", "(", "zip_filename", ",", "mode", ",", "compression", "=", "compression", ")", "for", "dirname", ",", "dirs", ",", "files", "in", "sorted_walk", "(", "base_dir", ")", ":", "visit", "(", "z", ",", "dirname", ",", "files", ")", "z", ".", "close", "(", ")", "else", ":", "for", "dirname", ",", "dirs", ",", "files", "in", "sorted_walk", "(", "base_dir", ")", ":", "visit", "(", "None", ",", "dirname", ",", "files", ")", "return", "zip_filename" ]
[ 470, 0 ]
[ 501, 23 ]
python
en
['en', 'en', 'en']
True
bdist_egg.call_command
(self, cmdname, **kw)
Invoke reinitialized command `cmdname` with keyword args
Invoke reinitialized command `cmdname` with keyword args
def call_command(self, cmdname, **kw): """Invoke reinitialized command `cmdname` with keyword args""" for dirname in INSTALL_DIRECTORY_ATTRS: kw.setdefault(dirname, self.bdist_dir) kw.setdefault('skip_build', self.skip_build) kw.setdefault('dry_run', self.dry_run) cmd = self.reinitialize_command(cmdname, **kw) self.run_command(cmdname) return cmd
[ "def", "call_command", "(", "self", ",", "cmdname", ",", "*", "*", "kw", ")", ":", "for", "dirname", "in", "INSTALL_DIRECTORY_ATTRS", ":", "kw", ".", "setdefault", "(", "dirname", ",", "self", ".", "bdist_dir", ")", "kw", ".", "setdefault", "(", "'skip_build'", ",", "self", ".", "skip_build", ")", "kw", ".", "setdefault", "(", "'dry_run'", ",", "self", ".", "dry_run", ")", "cmd", "=", "self", ".", "reinitialize_command", "(", "cmdname", ",", "*", "*", "kw", ")", "self", ".", "run_command", "(", "cmdname", ")", "return", "cmd" ]
[ 150, 4 ]
[ 158, 18 ]
python
en
['en', 'en', 'en']
True
bdist_egg.copy_metadata_to
(self, target_dir)
Copy metadata (egg info) to the target_dir
Copy metadata (egg info) to the target_dir
def copy_metadata_to(self, target_dir): "Copy metadata (egg info) to the target_dir" # normalize the path (so that a forward-slash in egg_info will # match using startswith below) norm_egg_info = os.path.normpath(self.egg_info) prefix = os.path.join(norm_egg_info, '') for path in self.ei_cmd.filelist.files: if path.startswith(prefix): target = os.path.join(target_dir, path[len(prefix):]) ensure_directory(target) self.copy_file(path, target)
[ "def", "copy_metadata_to", "(", "self", ",", "target_dir", ")", ":", "# normalize the path (so that a forward-slash in egg_info will", "# match using startswith below)", "norm_egg_info", "=", "os", ".", "path", ".", "normpath", "(", "self", ".", "egg_info", ")", "prefix", "=", "os", ".", "path", ".", "join", "(", "norm_egg_info", ",", "''", ")", "for", "path", "in", "self", ".", "ei_cmd", ".", "filelist", ".", "files", ":", "if", "path", ".", "startswith", "(", "prefix", ")", ":", "target", "=", "os", ".", "path", ".", "join", "(", "target_dir", ",", "path", "[", "len", "(", "prefix", ")", ":", "]", ")", "ensure_directory", "(", "target", ")", "self", ".", "copy_file", "(", "path", ",", "target", ")" ]
[ 313, 4 ]
[ 323, 44 ]
python
en
['en', 'pt', 'en']
True
bdist_egg.get_ext_outputs
(self)
Get a list of relative paths to C extensions in the output distro
Get a list of relative paths to C extensions in the output distro
def get_ext_outputs(self): """Get a list of relative paths to C extensions in the output distro""" all_outputs = [] ext_outputs = [] paths = {self.bdist_dir: ''} for base, dirs, files in sorted_walk(self.bdist_dir): for filename in files: if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: all_outputs.append(paths[base] + filename) for filename in dirs: paths[os.path.join(base, filename)] = (paths[base] + filename + '/') if self.distribution.has_ext_modules(): build_cmd = self.get_finalized_command('build_ext') for ext in build_cmd.extensions: if isinstance(ext, Library): continue fullname = build_cmd.get_ext_fullname(ext.name) filename = build_cmd.get_ext_filename(fullname) if not os.path.basename(filename).startswith('dl-'): if os.path.exists(os.path.join(self.bdist_dir, filename)): ext_outputs.append(filename) return all_outputs, ext_outputs
[ "def", "get_ext_outputs", "(", "self", ")", ":", "all_outputs", "=", "[", "]", "ext_outputs", "=", "[", "]", "paths", "=", "{", "self", ".", "bdist_dir", ":", "''", "}", "for", "base", ",", "dirs", ",", "files", "in", "sorted_walk", "(", "self", ".", "bdist_dir", ")", ":", "for", "filename", "in", "files", ":", "if", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "1", "]", ".", "lower", "(", ")", "in", "NATIVE_EXTENSIONS", ":", "all_outputs", ".", "append", "(", "paths", "[", "base", "]", "+", "filename", ")", "for", "filename", "in", "dirs", ":", "paths", "[", "os", ".", "path", ".", "join", "(", "base", ",", "filename", ")", "]", "=", "(", "paths", "[", "base", "]", "+", "filename", "+", "'/'", ")", "if", "self", ".", "distribution", ".", "has_ext_modules", "(", ")", ":", "build_cmd", "=", "self", ".", "get_finalized_command", "(", "'build_ext'", ")", "for", "ext", "in", "build_cmd", ".", "extensions", ":", "if", "isinstance", "(", "ext", ",", "Library", ")", ":", "continue", "fullname", "=", "build_cmd", ".", "get_ext_fullname", "(", "ext", ".", "name", ")", "filename", "=", "build_cmd", ".", "get_ext_filename", "(", "fullname", ")", "if", "not", "os", ".", "path", ".", "basename", "(", "filename", ")", ".", "startswith", "(", "'dl-'", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "self", ".", "bdist_dir", ",", "filename", ")", ")", ":", "ext_outputs", ".", "append", "(", "filename", ")", "return", "all_outputs", ",", "ext_outputs" ]
[ 325, 4 ]
[ 351, 39 ]
python
en
['en', 'en', 'en']
True
Resolution._push_new_state
(self)
Push a new state into history. This new state will be used to hold resolution results of the next coming round.
Push a new state into history.
def _push_new_state(self): """Push a new state into history. This new state will be used to hold resolution results of the next coming round. """ base = self._states[-1] state = State( mapping=base.mapping.copy(), criteria=base.criteria.copy(), ) self._states.append(state)
[ "def", "_push_new_state", "(", "self", ")", ":", "base", "=", "self", ".", "_states", "[", "-", "1", "]", "state", "=", "State", "(", "mapping", "=", "base", ".", "mapping", ".", "copy", "(", ")", ",", "criteria", "=", "base", ".", "criteria", ".", "copy", "(", ")", ",", ")", "self", ".", "_states", ".", "append", "(", "state", ")" ]
[ 123, 4 ]
[ 134, 34 ]
python
en
['en', 'en', 'en']
True
Resolution._backtrack
(self)
Perform backtracking. When we enter here, the stack is like this:: [ state Z ] [ state Y ] [ state X ] .... earlier states are irrelevant. 1. No pins worked for Z, so it does not have a pin. 2. We want to reset state Y to unpinned, and pin another candidate. 3. State X holds what state Y was before the pin, but does not have the incompatibility information gathered in state Y. Each iteration of the loop will: 1. Discard Z. 2. Discard Y but remember its incompatibility information gathered previously, and the failure we're dealing with right now. 3. Push a new state Y' based on X, and apply the incompatibility information from Y to Y'. 4a. If this causes Y' to conflict, we need to backtrack again. Make Y' the new Z and go back to step 2. 4b. If the incompatibilities apply cleanly, end backtracking.
Perform backtracking.
def _backtrack(self): """Perform backtracking. When we enter here, the stack is like this:: [ state Z ] [ state Y ] [ state X ] .... earlier states are irrelevant. 1. No pins worked for Z, so it does not have a pin. 2. We want to reset state Y to unpinned, and pin another candidate. 3. State X holds what state Y was before the pin, but does not have the incompatibility information gathered in state Y. Each iteration of the loop will: 1. Discard Z. 2. Discard Y but remember its incompatibility information gathered previously, and the failure we're dealing with right now. 3. Push a new state Y' based on X, and apply the incompatibility information from Y to Y'. 4a. If this causes Y' to conflict, we need to backtrack again. Make Y' the new Z and go back to step 2. 4b. If the incompatibilities apply cleanly, end backtracking. """ while len(self._states) >= 3: # Remove the state that triggered backtracking. del self._states[-1] # Retrieve the last candidate pin and known incompatibilities. broken_state = self._states.pop() name, candidate = broken_state.mapping.popitem() incompatibilities_from_broken = [ (k, list(v.incompatibilities)) for k, v in broken_state.criteria.items() ] # Also mark the newly known incompatibility. incompatibilities_from_broken.append((name, [candidate])) self._r.backtracking(candidate=candidate) # Create a new state from the last known-to-work one, and apply # the previously gathered incompatibility information. def _patch_criteria(): for k, incompatibilities in incompatibilities_from_broken: if not incompatibilities: continue try: criterion = self.state.criteria[k] except KeyError: continue matches = self._p.find_matches( identifier=k, requirements=IteratorMapping( self.state.criteria, operator.methodcaller("iter_requirement"), ), incompatibilities=IteratorMapping( self.state.criteria, operator.attrgetter("incompatibilities"), {k: incompatibilities}, ), ) candidates = build_iter_view(matches) if not candidates: return False incompatibilities.extend(criterion.incompatibilities) self.state.criteria[k] = Criterion( candidates=candidates, information=list(criterion.information), incompatibilities=incompatibilities, ) return True self._push_new_state() success = _patch_criteria() # It works! Let's work on this new state. if success: return True # State does not work after applying known incompatibilities. # Try the still previous state. # No way to backtrack anymore. return False
[ "def", "_backtrack", "(", "self", ")", ":", "while", "len", "(", "self", ".", "_states", ")", ">=", "3", ":", "# Remove the state that triggered backtracking.", "del", "self", ".", "_states", "[", "-", "1", "]", "# Retrieve the last candidate pin and known incompatibilities.", "broken_state", "=", "self", ".", "_states", ".", "pop", "(", ")", "name", ",", "candidate", "=", "broken_state", ".", "mapping", ".", "popitem", "(", ")", "incompatibilities_from_broken", "=", "[", "(", "k", ",", "list", "(", "v", ".", "incompatibilities", ")", ")", "for", "k", ",", "v", "in", "broken_state", ".", "criteria", ".", "items", "(", ")", "]", "# Also mark the newly known incompatibility.", "incompatibilities_from_broken", ".", "append", "(", "(", "name", ",", "[", "candidate", "]", ")", ")", "self", ".", "_r", ".", "backtracking", "(", "candidate", "=", "candidate", ")", "# Create a new state from the last known-to-work one, and apply", "# the previously gathered incompatibility information.", "def", "_patch_criteria", "(", ")", ":", "for", "k", ",", "incompatibilities", "in", "incompatibilities_from_broken", ":", "if", "not", "incompatibilities", ":", "continue", "try", ":", "criterion", "=", "self", ".", "state", ".", "criteria", "[", "k", "]", "except", "KeyError", ":", "continue", "matches", "=", "self", ".", "_p", ".", "find_matches", "(", "identifier", "=", "k", ",", "requirements", "=", "IteratorMapping", "(", "self", ".", "state", ".", "criteria", ",", "operator", ".", "methodcaller", "(", "\"iter_requirement\"", ")", ",", ")", ",", "incompatibilities", "=", "IteratorMapping", "(", "self", ".", "state", ".", "criteria", ",", "operator", ".", "attrgetter", "(", "\"incompatibilities\"", ")", ",", "{", "k", ":", "incompatibilities", "}", ",", ")", ",", ")", "candidates", "=", "build_iter_view", "(", "matches", ")", "if", "not", "candidates", ":", "return", "False", "incompatibilities", ".", "extend", "(", "criterion", ".", "incompatibilities", ")", "self", ".", "state", ".", "criteria", "[", "k", "]", "=", "Criterion", "(", "candidates", "=", "candidates", ",", "information", "=", "list", "(", "criterion", ".", "information", ")", ",", "incompatibilities", "=", "incompatibilities", ",", ")", "return", "True", "self", ".", "_push_new_state", "(", ")", "success", "=", "_patch_criteria", "(", ")", "# It works! Let's work on this new state.", "if", "success", ":", "return", "True", "# State does not work after applying known incompatibilities.", "# Try the still previous state.", "# No way to backtrack anymore.", "return", "False" ]
[ 241, 4 ]
[ 328, 20 ]
python
en
['en', 'en', 'en']
False
Resolver.resolve
(self, requirements, max_rounds=100)
Take a collection of constraints, spit out the resolution result. The return value is a representation to the final resolution result. It is a tuple subclass with three public members: * `mapping`: A dict of resolved candidates. Each key is an identifier of a requirement (as returned by the provider's `identify` method), and the value is the resolved candidate. * `graph`: A `DirectedGraph` instance representing the dependency tree. The vertices are keys of `mapping`, and each edge represents *why* a particular package is included. A special vertex `None` is included to represent parents of user-supplied requirements. * `criteria`: A dict of "criteria" that hold detailed information on how edges in the graph are derived. Each key is an identifier of a requirement, and the value is a `Criterion` instance. The following exceptions may be raised if a resolution cannot be found: * `ResolutionImpossible`: A resolution cannot be found for the given combination of requirements. The `causes` attribute of the exception is a list of (requirement, parent), giving the requirements that could not be satisfied. * `ResolutionTooDeep`: The dependency tree is too deeply nested and the resolver gave up. This is usually caused by a circular dependency, but you can try to resolve this by increasing the `max_rounds` argument.
Take a collection of constraints, spit out the resolution result.
def resolve(self, requirements, max_rounds=100): """Take a collection of constraints, spit out the resolution result. The return value is a representation to the final resolution result. It is a tuple subclass with three public members: * `mapping`: A dict of resolved candidates. Each key is an identifier of a requirement (as returned by the provider's `identify` method), and the value is the resolved candidate. * `graph`: A `DirectedGraph` instance representing the dependency tree. The vertices are keys of `mapping`, and each edge represents *why* a particular package is included. A special vertex `None` is included to represent parents of user-supplied requirements. * `criteria`: A dict of "criteria" that hold detailed information on how edges in the graph are derived. Each key is an identifier of a requirement, and the value is a `Criterion` instance. The following exceptions may be raised if a resolution cannot be found: * `ResolutionImpossible`: A resolution cannot be found for the given combination of requirements. The `causes` attribute of the exception is a list of (requirement, parent), giving the requirements that could not be satisfied. * `ResolutionTooDeep`: The dependency tree is too deeply nested and the resolver gave up. This is usually caused by a circular dependency, but you can try to resolve this by increasing the `max_rounds` argument. """ resolution = Resolution(self.provider, self.reporter) state = resolution.resolve(requirements, max_rounds=max_rounds) return _build_result(state)
[ "def", "resolve", "(", "self", ",", "requirements", ",", "max_rounds", "=", "100", ")", ":", "resolution", "=", "Resolution", "(", "self", ".", "provider", ",", "self", ".", "reporter", ")", "state", "=", "resolution", ".", "resolve", "(", "requirements", ",", "max_rounds", "=", "max_rounds", ")", "return", "_build_result", "(", "state", ")" ]
[ 442, 4 ]
[ 472, 35 ]
python
en
['en', 'en', 'en']
True
load_cdll
(name, macos10_16_path)
Loads a CDLL by name, falling back to known path on 10.16+
Loads a CDLL by name, falling back to known path on 10.16+
def load_cdll(name, macos10_16_path): """Loads a CDLL by name, falling back to known path on 10.16+""" try: # Big Sur is technically 11 but we use 10.16 due to the Big Sur # beta being labeled as 10.16. if version_info >= (10, 16): path = macos10_16_path else: path = find_library(name) if not path: raise OSError # Caught and reraised as 'ImportError' return CDLL(path, use_errno=True) except OSError: raise_from(ImportError("The library %s failed to load" % name), None)
[ "def", "load_cdll", "(", "name", ",", "macos10_16_path", ")", ":", "try", ":", "# Big Sur is technically 11 but we use 10.16 due to the Big Sur", "# beta being labeled as 10.16.", "if", "version_info", ">=", "(", "10", ",", "16", ")", ":", "path", "=", "macos10_16_path", "else", ":", "path", "=", "find_library", "(", "name", ")", "if", "not", "path", ":", "raise", "OSError", "# Caught and reraised as 'ImportError'", "return", "CDLL", "(", "path", ",", "use_errno", "=", "True", ")", "except", "OSError", ":", "raise_from", "(", "ImportError", "(", "\"The library %s failed to load\"", "%", "name", ")", ",", "None", ")" ]
[ 64, 0 ]
[ 77, 77 ]
python
en
['en', 'en', 'en']
True
init_logging
(quiet, loglevel, configdir)
Initializes the console and file handlers for the logging module.
Initializes the console and file handlers for the logging module.
def init_logging(quiet, loglevel, configdir): "Initializes the console and file handlers for the logging module." logger = logging.getLogger() # Set the loglevel. if loglevel > 3: loglevel = 3 levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] logger.setLevel(levels[loglevel]) logformat = "%(asctime)-14s %(levelname)-7s %(name)-5s %(message)s" formatter = logging.Formatter(logformat) try: logfile = os.path.join(configdir, "gmxmail.log") file_handler = logging.FileHandler(logfile) file_handler.setFormatter(formatter) logger.addHandler(file_handler) log.info("Added logging file handler.") except IOError: log.info("Could not attach file handler.") # By default, we log to both file and stdout, unless quiet is enabled. if not quiet: console_handler = logging.StreamHandler(sys.stdout) console_handler.setFormatter(formatter) logger.addHandler(console_handler) log.info("Added logging console handler.")
[ "def", "init_logging", "(", "quiet", ",", "loglevel", ",", "configdir", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", ")", "# Set the loglevel.", "if", "loglevel", ">", "3", ":", "loglevel", "=", "3", "levels", "=", "[", "logging", ".", "ERROR", ",", "logging", ".", "WARN", ",", "logging", ".", "INFO", ",", "logging", ".", "DEBUG", "]", "logger", ".", "setLevel", "(", "levels", "[", "loglevel", "]", ")", "logformat", "=", "\"%(asctime)-14s %(levelname)-7s %(name)-5s %(message)s\"", "formatter", "=", "logging", ".", "Formatter", "(", "logformat", ")", "try", ":", "logfile", "=", "os", ".", "path", ".", "join", "(", "configdir", ",", "\"gmxmail.log\"", ")", "file_handler", "=", "logging", ".", "FileHandler", "(", "logfile", ")", "file_handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "file_handler", ")", "log", ".", "info", "(", "\"Added logging file handler.\"", ")", "except", "IOError", ":", "log", ".", "info", "(", "\"Could not attach file handler.\"", ")", "# By default, we log to both file and stdout, unless quiet is enabled.", "if", "not", "quiet", ":", "console_handler", "=", "logging", ".", "StreamHandler", "(", "sys", ".", "stdout", ")", "console_handler", ".", "setFormatter", "(", "formatter", ")", "logger", ".", "addHandler", "(", "console_handler", ")", "log", ".", "info", "(", "\"Added logging console handler.\"", ")" ]
[ 40, 0 ]
[ 68, 50 ]
python
en
['en', 'en', 'en']
True
get_configdir
()
Determines where to store our logs and read our config file from.
Determines where to store our logs and read our config file from.
def get_configdir(): "Determines where to store our logs and read our config file from." configdir = os.path.dirname(os.path.realpath(__file__)) # We are here. home = os.path.join(os.path.expanduser("~"), ".gmxmail") homeconfig = os.path.join(os.path.expanduser("~"), ".config/gmxmail") if os.path.isdir(homeconfig): configdir = homeconfig elif os.path.isdir(home): configdir = home return configdir
[ "def", "get_configdir", "(", ")", ":", "configdir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "realpath", "(", "__file__", ")", ")", "# We are here.", "home", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "expanduser", "(", "\"~\"", ")", ",", "\".gmxmail\"", ")", "homeconfig", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "expanduser", "(", "\"~\"", ")", ",", "\".config/gmxmail\"", ")", "if", "os", ".", "path", ".", "isdir", "(", "homeconfig", ")", ":", "configdir", "=", "homeconfig", "elif", "os", ".", "path", ".", "isdir", "(", "home", ")", ":", "configdir", "=", "home", "return", "configdir" ]
[ 71, 0 ]
[ 81, 20 ]
python
en
['en', 'en', 'en']
True
main
()
Entry point for gmxmail. Parse command-line arguments and instantiate MH.
Entry point for gmxmail. Parse command-line arguments and instantiate MH.
def main(): "Entry point for gmxmail. Parse command-line arguments and instantiate MH." args = docopt(__doc__, version="0.1") configdir = get_configdir() init_logging(args["--quiet"], args["-v"], configdir) # Loglevels: 10 (Debug), 20 (Info), 30 (Warn), 40 (Error) log.info("Loglevel is {}".format(logging.getLogger().getEffectiveLevel())) m = MailHandler(args["--acc"], args["--user"], configdir) if args["send"]: m.send_mail(args["<recipient>"], args["<head>"], args["<message>"], args["--sign"], args["--encrypt"], args["--key"], args["--dryrun"]) else: m.get_mail()
[ "def", "main", "(", ")", ":", "args", "=", "docopt", "(", "__doc__", ",", "version", "=", "\"0.1\"", ")", "configdir", "=", "get_configdir", "(", ")", "init_logging", "(", "args", "[", "\"--quiet\"", "]", ",", "args", "[", "\"-v\"", "]", ",", "configdir", ")", "# Loglevels: 10 (Debug), 20 (Info), 30 (Warn), 40 (Error)", "log", ".", "info", "(", "\"Loglevel is {}\"", ".", "format", "(", "logging", ".", "getLogger", "(", ")", ".", "getEffectiveLevel", "(", ")", ")", ")", "m", "=", "MailHandler", "(", "args", "[", "\"--acc\"", "]", ",", "args", "[", "\"--user\"", "]", ",", "configdir", ")", "if", "args", "[", "\"send\"", "]", ":", "m", ".", "send_mail", "(", "args", "[", "\"<recipient>\"", "]", ",", "args", "[", "\"<head>\"", "]", ",", "args", "[", "\"<message>\"", "]", ",", "args", "[", "\"--sign\"", "]", ",", "args", "[", "\"--encrypt\"", "]", ",", "args", "[", "\"--key\"", "]", ",", "args", "[", "\"--dryrun\"", "]", ")", "else", ":", "m", ".", "get_mail", "(", ")" ]
[ 84, 0 ]
[ 103, 20 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.ensure_timezone
(self)
Ensure the connection's timezone is set to `self.timezone_name` and return whether it changed or not.
Ensure the connection's timezone is set to `self.timezone_name` and return whether it changed or not.
def ensure_timezone(self): """ Ensure the connection's timezone is set to `self.timezone_name` and return whether it changed or not. """ return False
[ "def", "ensure_timezone", "(", "self", ")", ":", "return", "False" ]
[ 109, 4 ]
[ 114, 20 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.timezone
(self)
Return a tzinfo of the database connection time zone. This is only used when time zone support is enabled. When a datetime is read from the database, it is always returned in this time zone. When the database backend supports time zones, it doesn't matter which time zone Django uses, as long as aware datetimes are used everywhere. Other users connecting to the database can choose their own time zone. When the database backend doesn't support time zones, the time zone Django uses may be constrained by the requirements of other users of the database.
Return a tzinfo of the database connection time zone.
def timezone(self): """ Return a tzinfo of the database connection time zone. This is only used when time zone support is enabled. When a datetime is read from the database, it is always returned in this time zone. When the database backend supports time zones, it doesn't matter which time zone Django uses, as long as aware datetimes are used everywhere. Other users connecting to the database can choose their own time zone. When the database backend doesn't support time zones, the time zone Django uses may be constrained by the requirements of other users of the database. """ if not settings.USE_TZ: return None elif self.settings_dict['TIME_ZONE'] is None: return timezone.utc else: return pytz.timezone(self.settings_dict['TIME_ZONE'])
[ "def", "timezone", "(", "self", ")", ":", "if", "not", "settings", ".", "USE_TZ", ":", "return", "None", "elif", "self", ".", "settings_dict", "[", "'TIME_ZONE'", "]", "is", "None", ":", "return", "timezone", ".", "utc", "else", ":", "return", "pytz", ".", "timezone", "(", "self", ".", "settings_dict", "[", "'TIME_ZONE'", "]", ")" ]
[ 117, 4 ]
[ 137, 65 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.timezone_name
(self)
Name of the time zone of the database connection.
Name of the time zone of the database connection.
def timezone_name(self): """ Name of the time zone of the database connection. """ if not settings.USE_TZ: return settings.TIME_ZONE elif self.settings_dict['TIME_ZONE'] is None: return 'UTC' else: return self.settings_dict['TIME_ZONE']
[ "def", "timezone_name", "(", "self", ")", ":", "if", "not", "settings", ".", "USE_TZ", ":", "return", "settings", ".", "TIME_ZONE", "elif", "self", ".", "settings_dict", "[", "'TIME_ZONE'", "]", "is", "None", ":", "return", "'UTC'", "else", ":", "return", "self", ".", "settings_dict", "[", "'TIME_ZONE'", "]" ]
[ 140, 4 ]
[ 149, 50 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.get_connection_params
(self)
Return a dict of parameters suitable for get_new_connection.
Return a dict of parameters suitable for get_new_connection.
def get_connection_params(self): """Return a dict of parameters suitable for get_new_connection.""" raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a get_connection_params() method')
[ "def", "get_connection_params", "(", "self", ")", ":", "raise", "NotImplementedError", "(", "'subclasses of BaseDatabaseWrapper may require a get_connection_params() method'", ")" ]
[ 165, 4 ]
[ 167, 115 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.get_new_connection
(self, conn_params)
Open a connection to the database.
Open a connection to the database.
def get_new_connection(self, conn_params): """Open a connection to the database.""" raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a get_new_connection() method')
[ "def", "get_new_connection", "(", "self", ",", "conn_params", ")", ":", "raise", "NotImplementedError", "(", "'subclasses of BaseDatabaseWrapper may require a get_new_connection() method'", ")" ]
[ 169, 4 ]
[ 171, 112 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.init_connection_state
(self)
Initialize the database connection settings.
Initialize the database connection settings.
def init_connection_state(self): """Initialize the database connection settings.""" raise NotImplementedError('subclasses of BaseDatabaseWrapper may require an init_connection_state() method')
[ "def", "init_connection_state", "(", "self", ")", ":", "raise", "NotImplementedError", "(", "'subclasses of BaseDatabaseWrapper may require an init_connection_state() method'", ")" ]
[ 173, 4 ]
[ 175, 116 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.create_cursor
(self, name=None)
Create a cursor. Assume that a connection is established.
Create a cursor. Assume that a connection is established.
def create_cursor(self, name=None): """Create a cursor. Assume that a connection is established.""" raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a create_cursor() method')
[ "def", "create_cursor", "(", "self", ",", "name", "=", "None", ")", ":", "raise", "NotImplementedError", "(", "'subclasses of BaseDatabaseWrapper may require a create_cursor() method'", ")" ]
[ 177, 4 ]
[ 179, 107 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.connect
(self)
Connect to the database. Assume that the connection is closed.
Connect to the database. Assume that the connection is closed.
def connect(self): """Connect to the database. Assume that the connection is closed.""" # Check for invalid configurations. self.check_settings() # In case the previous connection was closed while in an atomic block self.in_atomic_block = False self.savepoint_ids = [] self.needs_rollback = False # Reset parameters defining when to close the connection max_age = self.settings_dict['CONN_MAX_AGE'] self.close_at = None if max_age is None else time.monotonic() + max_age self.closed_in_transaction = False self.errors_occurred = False # Establish the connection conn_params = self.get_connection_params() self.connection = self.get_new_connection(conn_params) self.set_autocommit(self.settings_dict['AUTOCOMMIT']) self.init_connection_state() connection_created.send(sender=self.__class__, connection=self) self.run_on_commit = []
[ "def", "connect", "(", "self", ")", ":", "# Check for invalid configurations.", "self", ".", "check_settings", "(", ")", "# In case the previous connection was closed while in an atomic block", "self", ".", "in_atomic_block", "=", "False", "self", ".", "savepoint_ids", "=", "[", "]", "self", ".", "needs_rollback", "=", "False", "# Reset parameters defining when to close the connection", "max_age", "=", "self", ".", "settings_dict", "[", "'CONN_MAX_AGE'", "]", "self", ".", "close_at", "=", "None", "if", "max_age", "is", "None", "else", "time", ".", "monotonic", "(", ")", "+", "max_age", "self", ".", "closed_in_transaction", "=", "False", "self", ".", "errors_occurred", "=", "False", "# Establish the connection", "conn_params", "=", "self", ".", "get_connection_params", "(", ")", "self", ".", "connection", "=", "self", ".", "get_new_connection", "(", "conn_params", ")", "self", ".", "set_autocommit", "(", "self", ".", "settings_dict", "[", "'AUTOCOMMIT'", "]", ")", "self", ".", "init_connection_state", "(", ")", "connection_created", ".", "send", "(", "sender", "=", "self", ".", "__class__", ",", "connection", "=", "self", ")", "self", ".", "run_on_commit", "=", "[", "]" ]
[ 184, 4 ]
[ 204, 31 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.ensure_connection
(self)
Guarantee that a connection to the database is established.
Guarantee that a connection to the database is established.
def ensure_connection(self): """Guarantee that a connection to the database is established.""" if self.connection is None: with self.wrap_database_errors: self.connect()
[ "def", "ensure_connection", "(", "self", ")", ":", "if", "self", ".", "connection", "is", "None", ":", "with", "self", ".", "wrap_database_errors", ":", "self", ".", "connect", "(", ")" ]
[ 214, 4 ]
[ 218, 30 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper._prepare_cursor
(self, cursor)
Validate the connection is usable and perform database cursor wrapping.
Validate the connection is usable and perform database cursor wrapping.
def _prepare_cursor(self, cursor): """ Validate the connection is usable and perform database cursor wrapping. """ self.validate_thread_sharing() if self.queries_logged: wrapped_cursor = self.make_debug_cursor(cursor) else: wrapped_cursor = self.make_cursor(cursor) return wrapped_cursor
[ "def", "_prepare_cursor", "(", "self", ",", "cursor", ")", ":", "self", ".", "validate_thread_sharing", "(", ")", "if", "self", ".", "queries_logged", ":", "wrapped_cursor", "=", "self", ".", "make_debug_cursor", "(", "cursor", ")", "else", ":", "wrapped_cursor", "=", "self", ".", "make_cursor", "(", "cursor", ")", "return", "wrapped_cursor" ]
[ 222, 4 ]
[ 231, 29 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.cursor
(self)
Create a cursor, opening a connection if necessary.
Create a cursor, opening a connection if necessary.
def cursor(self): """Create a cursor, opening a connection if necessary.""" return self._cursor()
[ "def", "cursor", "(", "self", ")", ":", "return", "self", ".", "_cursor", "(", ")" ]
[ 256, 4 ]
[ 258, 29 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.commit
(self)
Commit a transaction and reset the dirty flag.
Commit a transaction and reset the dirty flag.
def commit(self): """Commit a transaction and reset the dirty flag.""" self.validate_thread_sharing() self.validate_no_atomic_block() self._commit() # A successful commit means that the database connection works. self.errors_occurred = False self.run_commit_hooks_on_set_autocommit_on = True
[ "def", "commit", "(", "self", ")", ":", "self", ".", "validate_thread_sharing", "(", ")", "self", ".", "validate_no_atomic_block", "(", ")", "self", ".", "_commit", "(", ")", "# A successful commit means that the database connection works.", "self", ".", "errors_occurred", "=", "False", "self", ".", "run_commit_hooks_on_set_autocommit_on", "=", "True" ]
[ 261, 4 ]
[ 268, 57 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.rollback
(self)
Roll back a transaction and reset the dirty flag.
Roll back a transaction and reset the dirty flag.
def rollback(self): """Roll back a transaction and reset the dirty flag.""" self.validate_thread_sharing() self.validate_no_atomic_block() self._rollback() # A successful rollback means that the database connection works. self.errors_occurred = False self.needs_rollback = False self.run_on_commit = []
[ "def", "rollback", "(", "self", ")", ":", "self", ".", "validate_thread_sharing", "(", ")", "self", ".", "validate_no_atomic_block", "(", ")", "self", ".", "_rollback", "(", ")", "# A successful rollback means that the database connection works.", "self", ".", "errors_occurred", "=", "False", "self", ".", "needs_rollback", "=", "False", "self", ".", "run_on_commit", "=", "[", "]" ]
[ 271, 4 ]
[ 279, 31 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.close
(self)
Close the connection to the database.
Close the connection to the database.
def close(self): """Close the connection to the database.""" self.validate_thread_sharing() self.run_on_commit = [] # Don't call validate_no_atomic_block() to avoid making it difficult # to get rid of a connection in an invalid state. The next connect() # will reset the transaction state anyway. if self.closed_in_transaction or self.connection is None: return try: self._close() finally: if self.in_atomic_block: self.closed_in_transaction = True self.needs_rollback = True else: self.connection = None
[ "def", "close", "(", "self", ")", ":", "self", ".", "validate_thread_sharing", "(", ")", "self", ".", "run_on_commit", "=", "[", "]", "# Don't call validate_no_atomic_block() to avoid making it difficult", "# to get rid of a connection in an invalid state. The next connect()", "# will reset the transaction state anyway.", "if", "self", ".", "closed_in_transaction", "or", "self", ".", "connection", "is", "None", ":", "return", "try", ":", "self", ".", "_close", "(", ")", "finally", ":", "if", "self", ".", "in_atomic_block", ":", "self", ".", "closed_in_transaction", "=", "True", "self", ".", "needs_rollback", "=", "True", "else", ":", "self", ".", "connection", "=", "None" ]
[ 282, 4 ]
[ 299, 38 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.savepoint
(self)
Create a savepoint inside the current transaction. Return an identifier for the savepoint that will be used for the subsequent rollback or commit. Do nothing if savepoints are not supported.
Create a savepoint inside the current transaction. Return an identifier for the savepoint that will be used for the subsequent rollback or commit. Do nothing if savepoints are not supported.
def savepoint(self): """ Create a savepoint inside the current transaction. Return an identifier for the savepoint that will be used for the subsequent rollback or commit. Do nothing if savepoints are not supported. """ if not self._savepoint_allowed(): return thread_ident = _thread.get_ident() tid = str(thread_ident).replace('-', '') self.savepoint_state += 1 sid = "s%s_x%d" % (tid, self.savepoint_state) self.validate_thread_sharing() self._savepoint(sid) return sid
[ "def", "savepoint", "(", "self", ")", ":", "if", "not", "self", ".", "_savepoint_allowed", "(", ")", ":", "return", "thread_ident", "=", "_thread", ".", "get_ident", "(", ")", "tid", "=", "str", "(", "thread_ident", ")", ".", "replace", "(", "'-'", ",", "''", ")", "self", ".", "savepoint_state", "+=", "1", "sid", "=", "\"s%s_x%d\"", "%", "(", "tid", ",", "self", ".", "savepoint_state", ")", "self", ".", "validate_thread_sharing", "(", ")", "self", ".", "_savepoint", "(", "sid", ")", "return", "sid" ]
[ 322, 4 ]
[ 340, 18 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.savepoint_rollback
(self, sid)
Roll back to a savepoint. Do nothing if savepoints are not supported.
Roll back to a savepoint. Do nothing if savepoints are not supported.
def savepoint_rollback(self, sid): """ Roll back to a savepoint. Do nothing if savepoints are not supported. """ if not self._savepoint_allowed(): return self.validate_thread_sharing() self._savepoint_rollback(sid) # Remove any callbacks registered while this savepoint was active. self.run_on_commit = [ (sids, func) for (sids, func) in self.run_on_commit if sid not in sids ]
[ "def", "savepoint_rollback", "(", "self", ",", "sid", ")", ":", "if", "not", "self", ".", "_savepoint_allowed", "(", ")", ":", "return", "self", ".", "validate_thread_sharing", "(", ")", "self", ".", "_savepoint_rollback", "(", "sid", ")", "# Remove any callbacks registered while this savepoint was active.", "self", ".", "run_on_commit", "=", "[", "(", "sids", ",", "func", ")", "for", "(", "sids", ",", "func", ")", "in", "self", ".", "run_on_commit", "if", "sid", "not", "in", "sids", "]" ]
[ 343, 4 ]
[ 356, 9 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.savepoint_commit
(self, sid)
Release a savepoint. Do nothing if savepoints are not supported.
Release a savepoint. Do nothing if savepoints are not supported.
def savepoint_commit(self, sid): """ Release a savepoint. Do nothing if savepoints are not supported. """ if not self._savepoint_allowed(): return self.validate_thread_sharing() self._savepoint_commit(sid)
[ "def", "savepoint_commit", "(", "self", ",", "sid", ")", ":", "if", "not", "self", ".", "_savepoint_allowed", "(", ")", ":", "return", "self", ".", "validate_thread_sharing", "(", ")", "self", ".", "_savepoint_commit", "(", "sid", ")" ]
[ 359, 4 ]
[ 367, 35 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.clean_savepoints
(self)
Reset the counter used to generate unique savepoint ids in this thread.
Reset the counter used to generate unique savepoint ids in this thread.
def clean_savepoints(self): """ Reset the counter used to generate unique savepoint ids in this thread. """ self.savepoint_state = 0
[ "def", "clean_savepoints", "(", "self", ")", ":", "self", ".", "savepoint_state", "=", "0" ]
[ 370, 4 ]
[ 374, 32 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper._set_autocommit
(self, autocommit)
Backend-specific implementation to enable or disable autocommit.
Backend-specific implementation to enable or disable autocommit.
def _set_autocommit(self, autocommit): """ Backend-specific implementation to enable or disable autocommit. """ raise NotImplementedError('subclasses of BaseDatabaseWrapper may require a _set_autocommit() method')
[ "def", "_set_autocommit", "(", "self", ",", "autocommit", ")", ":", "raise", "NotImplementedError", "(", "'subclasses of BaseDatabaseWrapper may require a _set_autocommit() method'", ")" ]
[ 378, 4 ]
[ 382, 109 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.get_autocommit
(self)
Get the autocommit state.
Get the autocommit state.
def get_autocommit(self): """Get the autocommit state.""" self.ensure_connection() return self.autocommit
[ "def", "get_autocommit", "(", "self", ")", ":", "self", ".", "ensure_connection", "(", ")", "return", "self", ".", "autocommit" ]
[ 386, 4 ]
[ 389, 30 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.set_autocommit
(self, autocommit, force_begin_transaction_with_broken_autocommit=False)
Enable or disable autocommit. The usual way to start a transaction is to turn autocommit off. SQLite does not properly start a transaction when disabling autocommit. To avoid this buggy behavior and to actually enter a new transaction, an explicit BEGIN is required. Using force_begin_transaction_with_broken_autocommit=True will issue an explicit BEGIN with SQLite. This option will be ignored for other backends.
Enable or disable autocommit.
def set_autocommit(self, autocommit, force_begin_transaction_with_broken_autocommit=False): """ Enable or disable autocommit. The usual way to start a transaction is to turn autocommit off. SQLite does not properly start a transaction when disabling autocommit. To avoid this buggy behavior and to actually enter a new transaction, an explicit BEGIN is required. Using force_begin_transaction_with_broken_autocommit=True will issue an explicit BEGIN with SQLite. This option will be ignored for other backends. """ self.validate_no_atomic_block() self.ensure_connection() start_transaction_under_autocommit = ( force_begin_transaction_with_broken_autocommit and not autocommit and hasattr(self, '_start_transaction_under_autocommit') ) if start_transaction_under_autocommit: self._start_transaction_under_autocommit() else: self._set_autocommit(autocommit) self.autocommit = autocommit if autocommit and self.run_commit_hooks_on_set_autocommit_on: self.run_and_clear_commit_hooks() self.run_commit_hooks_on_set_autocommit_on = False
[ "def", "set_autocommit", "(", "self", ",", "autocommit", ",", "force_begin_transaction_with_broken_autocommit", "=", "False", ")", ":", "self", ".", "validate_no_atomic_block", "(", ")", "self", ".", "ensure_connection", "(", ")", "start_transaction_under_autocommit", "=", "(", "force_begin_transaction_with_broken_autocommit", "and", "not", "autocommit", "and", "hasattr", "(", "self", ",", "'_start_transaction_under_autocommit'", ")", ")", "if", "start_transaction_under_autocommit", ":", "self", ".", "_start_transaction_under_autocommit", "(", ")", "else", ":", "self", ".", "_set_autocommit", "(", "autocommit", ")", "self", ".", "autocommit", "=", "autocommit", "if", "autocommit", "and", "self", ".", "run_commit_hooks_on_set_autocommit_on", ":", "self", ".", "run_and_clear_commit_hooks", "(", ")", "self", ".", "run_commit_hooks_on_set_autocommit_on", "=", "False" ]
[ 391, 4 ]
[ 420, 62 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.get_rollback
(self)
Get the "needs rollback" flag -- for *advanced use* only.
Get the "needs rollback" flag -- for *advanced use* only.
def get_rollback(self): """Get the "needs rollback" flag -- for *advanced use* only.""" if not self.in_atomic_block: raise TransactionManagementError( "The rollback flag doesn't work outside of an 'atomic' block.") return self.needs_rollback
[ "def", "get_rollback", "(", "self", ")", ":", "if", "not", "self", ".", "in_atomic_block", ":", "raise", "TransactionManagementError", "(", "\"The rollback flag doesn't work outside of an 'atomic' block.\"", ")", "return", "self", ".", "needs_rollback" ]
[ 422, 4 ]
[ 427, 34 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseWrapper.set_rollback
(self, rollback)
Set or unset the "needs rollback" flag -- for *advanced use* only.
Set or unset the "needs rollback" flag -- for *advanced use* only.
def set_rollback(self, rollback): """ Set or unset the "needs rollback" flag -- for *advanced use* only. """ if not self.in_atomic_block: raise TransactionManagementError( "The rollback flag doesn't work outside of an 'atomic' block.") self.needs_rollback = rollback
[ "def", "set_rollback", "(", "self", ",", "rollback", ")", ":", "if", "not", "self", ".", "in_atomic_block", ":", "raise", "TransactionManagementError", "(", "\"The rollback flag doesn't work outside of an 'atomic' block.\"", ")", "self", ".", "needs_rollback", "=", "rollback" ]
[ 429, 4 ]
[ 436, 38 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.validate_no_atomic_block
(self)
Raise an error if an atomic block is active.
Raise an error if an atomic block is active.
def validate_no_atomic_block(self): """Raise an error if an atomic block is active.""" if self.in_atomic_block: raise TransactionManagementError( "This is forbidden when an 'atomic' block is active.")
[ "def", "validate_no_atomic_block", "(", "self", ")", ":", "if", "self", ".", "in_atomic_block", ":", "raise", "TransactionManagementError", "(", "\"This is forbidden when an 'atomic' block is active.\"", ")" ]
[ 438, 4 ]
[ 442, 70 ]
python
en
['en', 'lb', 'en']
True
BaseDatabaseWrapper.constraint_checks_disabled
(self)
Disable foreign key constraint checking.
Disable foreign key constraint checking.
def constraint_checks_disabled(self): """ Disable foreign key constraint checking. """ disabled = self.disable_constraint_checking() try: yield finally: if disabled: self.enable_constraint_checking()
[ "def", "constraint_checks_disabled", "(", "self", ")", ":", "disabled", "=", "self", ".", "disable_constraint_checking", "(", ")", "try", ":", "yield", "finally", ":", "if", "disabled", ":", "self", ".", "enable_constraint_checking", "(", ")" ]
[ 453, 4 ]
[ 462, 49 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.disable_constraint_checking
(self)
Backends can implement as needed to temporarily disable foreign key constraint checking. Should return True if the constraints were disabled and will need to be reenabled.
Backends can implement as needed to temporarily disable foreign key constraint checking. Should return True if the constraints were disabled and will need to be reenabled.
def disable_constraint_checking(self): """ Backends can implement as needed to temporarily disable foreign key constraint checking. Should return True if the constraints were disabled and will need to be reenabled. """ return False
[ "def", "disable_constraint_checking", "(", "self", ")", ":", "return", "False" ]
[ 464, 4 ]
[ 470, 20 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseWrapper.enable_constraint_checking
(self)
Backends can implement as needed to re-enable foreign key constraint checking.
Backends can implement as needed to re-enable foreign key constraint checking.
def enable_constraint_checking(self): """ Backends can implement as needed to re-enable foreign key constraint checking. """ pass
[ "def", "enable_constraint_checking", "(", "self", ")", ":", "pass" ]
[ 472, 4 ]
[ 477, 12 ]
python
en
['en', 'error', 'th']
False