desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Function description.
:return Rename:'
| @staticmethod
def get_refactor(ctx):
| return ModuleToPackage(ctx.project, ctx.resource)
|
'Get changes.
:return Changes:'
| @staticmethod
def get_changes(refactor, input_str, in_hierarchy=False):
| return refactor.get_changes()
|
'Get destination.
:return str:'
| @staticmethod
def get_input_str(refactor, ctx):
| return env.user_input('Enter destination:')
|
'Function description.
:return Rename:'
| @staticmethod
def get_refactor(ctx):
| (_, offset) = env.get_offset_params()
if (offset == 0):
offset = None
return move.create_move(ctx.project, ctx.resource, offset)
|
'Get destination.
:return str:'
| @staticmethod
def get_input_str(refactor, ctx):
| args = refactor.get_args()
default = ', '.join((a[0] for a in args))
return env.user_input('Change the signature:', default)
|
'Function description.
:return Rename:'
| @staticmethod
def get_refactor(ctx):
| (_, offset) = env.get_offset_params()
return change_signature.ChangeSignature(ctx.project, ctx.resource, offset)
|
'Function description.
:return Rope.changes:'
| def get_changes(self, refactor, input_string, in_hierarchy=False):
| args = re.sub('[\\s\\(\\)]+', '', input_string).split(',')
olds = [arg[0] for arg in refactor.get_args()]
changers = []
for arg in [a for a in olds if (a not in args)]:
changers.append(change_signature.ArgumentRemover(olds.index(arg)))
olds.remove(arg)
order = []
for (index, arg) in enumerate(args):
if (arg not in olds):
changers.append(change_signature.ArgumentAdder(index, arg))
olds.insert(index, arg)
order.append(olds.index(arg))
changers.append(change_signature.ArgumentReorderer(order, autodef='None'))
return refactor.get_changes(changers, in_hierarchy=in_hierarchy)
|
'Function description.'
| def __init__(self, kind, *args, **kwargs):
| self.kind = kind
super(GenerateElementRefactoring, self).__init__(*args, **kwargs)
|
'Function description.
:return Rename:'
| def get_refactor(self, ctx):
| (_, offset) = env.get_offset_params()
return generate.create_generate(self.kind, ctx.project, ctx.resource, offset)
|
'Function description.
:return Rope.changes:'
| def get_changes(self, refactor, input_str, in_hierarchy=False):
| return refactor.get_changes()
|
'Deprecated, use `source\'.'
| @property
def filename(self):
| warnings.warn(u"The 'filename' attribute will be removed in future versions. Use 'source' instead.", DeprecationWarning, stacklevel=2)
return self.source
|
'Deprecated, user `source\'.'
| @filename.setter
def filename(self, value):
| warnings.warn(u"The 'filename' attribute will be removed in future versions. Use 'source' instead.", DeprecationWarning, stacklevel=2)
self.source = value
|
'Return a list of section names, excluding [DEFAULT]'
| def sections(self):
| return list(self._sections.keys())
|
'Create a new section in the configuration.
Raise DuplicateSectionError if a section by the specified name
already exists. Raise ValueError if name is DEFAULT.'
| def add_section(self, section):
| if (section == self.default_section):
raise ValueError((u'Invalid section name: %r' % section))
if (section in self._sections):
raise DuplicateSectionError(section)
self._sections[section] = self._dict()
self._proxies[section] = SectionProxy(self, section)
|
'Indicate whether the named section is present in the configuration.
The DEFAULT section is not acknowledged.'
| def has_section(self, section):
| return (section in self._sections)
|
'Return a list of option names for the given section name.'
| def options(self, section):
| try:
opts = self._sections[section].copy()
except KeyError:
raise from_none(NoSectionError(section))
opts.update(self._defaults)
return list(opts.keys())
|
'Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user\'s
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Return list of successfully read files.'
| def read(self, filenames, encoding=None):
| if (PY2 and isinstance(filenames, bytes)):
warnings.warn(u'You passed a bytestring as `filenames`. This will not work on Python 3. Use `cp.read_file()` or switch to using Unicode strings across the board.', DeprecationWarning, stacklevel=2)
filenames = [filenames]
elif isinstance(filenames, str):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
with open(filename, encoding=encoding) as fp:
self._read(fp, filename)
except IOError:
continue
read_ok.append(filename)
return read_ok
|
'Like read() but the argument must be a file-like object.
The `f\' argument must be iterable, returning one line at a time.
Optional second argument is the `source\' specifying the name of the
file being read. If not given, it is taken from f.name. If `f\' has no
`name\' attribute, `<???>\' is used.'
| def read_file(self, f, source=None):
| if (source is None):
try:
source = f.name
except AttributeError:
source = u'<???>'
self._read(f, source)
|
'Read configuration from a given string.'
| def read_string(self, string, source=u'<string>'):
| sfile = io.StringIO(string)
self.read_file(sfile, source)
|
'Read configuration from a dictionary.
Keys are section names, values are dictionaries with keys and values
that should be present in the section. If the used dictionary type
preserves order, sections and their keys will be added in order.
All types held in the dictionary are converted to strings during
reading, including section names, option names and keys.
Optional second argument is the `source\' specifying the name of the
dictionary being read.'
| def read_dict(self, dictionary, source=u'<dict>'):
| elements_added = set()
for (section, keys) in dictionary.items():
section = str(section)
try:
self.add_section(section)
except (DuplicateSectionError, ValueError):
if (self._strict and (section in elements_added)):
raise
elements_added.add(section)
for (key, value) in keys.items():
key = self.optionxform(str(key))
if (value is not None):
value = str(value)
if (self._strict and ((section, key) in elements_added)):
raise DuplicateOptionError(section, key, source)
elements_added.add((section, key))
self.set(section, key, value)
|
'Deprecated, use read_file instead.'
| def readfp(self, fp, filename=None):
| warnings.warn(u"This method will be removed in future versions. Use 'parser.read_file()' instead.", DeprecationWarning, stacklevel=2)
self.read_file(fp, source=filename)
|
'Get an option value for a given section.
If `vars\' is provided, it must be a dictionary. The option is looked up
in `vars\' (if provided), `section\', and in `DEFAULTSECT\' in that order.
If the key is not found and `fallback\' is provided, it is used as
a fallback value. `None\' can be provided as a `fallback\' value.
If interpolation is enabled and the optional argument `raw\' is False,
all interpolations are expanded in the return values.
Arguments `raw\', `vars\', and `fallback\' are keyword only.
The section DEFAULT is special.'
| def get(self, section, option, **kwargs):
| raw = kwargs.get(u'raw', False)
vars = kwargs.get(u'vars', None)
fallback = kwargs.get(u'fallback', _UNSET)
try:
d = self._unify_values(section, vars)
except NoSectionError:
if (fallback is _UNSET):
raise
else:
return fallback
option = self.optionxform(option)
try:
value = d[option]
except KeyError:
if (fallback is _UNSET):
raise NoOptionError(option, section)
else:
return fallback
if (raw or (value is None)):
return value
else:
return self._interpolation.before_get(self, section, option, value, d)
|
'Return a list of (name, value) tuples for each option in a section.
All % interpolations are expanded in the return values, based on the
defaults passed into the constructor, unless the optional argument
`raw\' is true. Additional substitutions may be provided using the
`vars\' argument, which must be a dictionary whose contents overrides
any pre-existing defaults.
The section DEFAULT is special.'
| def items(self, section=_UNSET, raw=False, vars=None):
| if (section is _UNSET):
return super(RawConfigParser, self).items()
d = self._defaults.copy()
try:
d.update(self._sections[section])
except KeyError:
if (section != self.default_section):
raise NoSectionError(section)
if vars:
for (key, value) in vars.items():
d[self.optionxform(key)] = value
value_getter = (lambda option: self._interpolation.before_get(self, section, option, d[option], d))
if raw:
value_getter = (lambda option: d[option])
return [(option, value_getter(option)) for option in d.keys()]
|
'Remove a section from the parser and return it as
a (section_name, section_proxy) tuple. If no section is present, raise
KeyError.
The section DEFAULT is never returned because it cannot be removed.'
| def popitem(self):
| for key in self.sections():
value = self[key]
del self[key]
return (key, value)
raise KeyError
|
'Check for the existence of a given option in a given section.
If the specified `section\' is None or an empty string, DEFAULT is
assumed. If the specified `section\' does not exist, returns False.'
| def has_option(self, section, option):
| if ((not section) or (section == self.default_section)):
option = self.optionxform(option)
return (option in self._defaults)
elif (section not in self._sections):
return False
else:
option = self.optionxform(option)
return ((option in self._sections[section]) or (option in self._defaults))
|
'Set an option.'
| def set(self, section, option, value=None):
| if value:
value = self._interpolation.before_set(self, section, option, value)
if ((not section) or (section == self.default_section)):
sectdict = self._defaults
else:
try:
sectdict = self._sections[section]
except KeyError:
raise from_none(NoSectionError(section))
sectdict[self.optionxform(option)] = value
|
'Write an .ini-format representation of the configuration state.
If `space_around_delimiters\' is True (the default), delimiters
between keys and values are surrounded by spaces.'
| def write(self, fp, space_around_delimiters=True):
| if space_around_delimiters:
d = u' {0} '.format(self._delimiters[0])
else:
d = self._delimiters[0]
if self._defaults:
self._write_section(fp, self.default_section, self._defaults.items(), d)
for section in self._sections:
self._write_section(fp, section, self._sections[section].items(), d)
|
'Write a single section to the specified `fp\'.'
| def _write_section(self, fp, section_name, section_items, delimiter):
| fp.write(u'[{0}]\n'.format(section_name))
for (key, value) in section_items:
value = self._interpolation.before_write(self, section_name, key, value)
if ((value is not None) or (not self._allow_no_value)):
value = (delimiter + str(value).replace(u'\n', u'\n DCTB '))
else:
value = u''
fp.write(u'{0}{1}\n'.format(key, value))
fp.write(u'\n')
|
'Remove an option.'
| def remove_option(self, section, option):
| if ((not section) or (section == self.default_section)):
sectdict = self._defaults
else:
try:
sectdict = self._sections[section]
except KeyError:
raise from_none(NoSectionError(section))
option = self.optionxform(option)
existed = (option in sectdict)
if existed:
del sectdict[option]
return existed
|
'Remove a file section.'
| def remove_section(self, section):
| existed = (section in self._sections)
if existed:
del self._sections[section]
del self._proxies[section]
return existed
|
'Parse a sectioned configuration file.
Each section in a configuration file contains a header, indicated by
a name in square brackets (`[]\'), plus key/value options, indicated by
`name\' and `value\' delimited with a specific substring (`=\' or `:\' by
default).
Values can span multiple lines, as long as they are indented deeper
than the first line of the value. Depending on the parser\'s mode, blank
lines may be treated as parts of multiline values or ignored.
Configuration files may include comments, prefixed by specific
characters (`#\' and `;\' by default). Comments may appear on their own
in an otherwise empty line or may be entered in lines holding values or
section names.'
| def _read(self, fp, fpname):
| elements_added = set()
cursect = None
sectname = None
optname = None
lineno = 0
indent_level = 0
e = None
for (lineno, line) in enumerate(fp, start=1):
comment_start = sys.maxsize
inline_prefixes = dict(((p, (-1)) for p in self._inline_comment_prefixes))
while ((comment_start == sys.maxsize) and inline_prefixes):
next_prefixes = {}
for (prefix, index) in inline_prefixes.items():
index = line.find(prefix, (index + 1))
if (index == (-1)):
continue
next_prefixes[prefix] = index
if ((index == 0) or ((index > 0) and line[(index - 1)].isspace())):
comment_start = min(comment_start, index)
inline_prefixes = next_prefixes
for prefix in self._comment_prefixes:
if line.strip().startswith(prefix):
comment_start = 0
break
if (comment_start == sys.maxsize):
comment_start = None
value = line[:comment_start].strip()
if (not value):
if self._empty_lines_in_values:
if ((comment_start is None) and (cursect is not None) and optname and (cursect[optname] is not None)):
cursect[optname].append(u'')
else:
indent_level = sys.maxsize
continue
first_nonspace = self.NONSPACECRE.search(line)
cur_indent_level = (first_nonspace.start() if first_nonspace else 0)
if ((cursect is not None) and optname and (cur_indent_level > indent_level)):
cursect[optname].append(value)
else:
indent_level = cur_indent_level
mo = self.SECTCRE.match(value)
if mo:
sectname = mo.group(u'header')
if (sectname in self._sections):
if (self._strict and (sectname in elements_added)):
raise DuplicateSectionError(sectname, fpname, lineno)
cursect = self._sections[sectname]
elements_added.add(sectname)
elif (sectname == self.default_section):
cursect = self._defaults
else:
cursect = self._dict()
self._sections[sectname] = cursect
self._proxies[sectname] = SectionProxy(self, sectname)
elements_added.add(sectname)
optname = None
elif (cursect is None):
raise MissingSectionHeaderError(fpname, lineno, line)
else:
mo = self._optcre.match(value)
if mo:
(optname, vi, optval) = mo.group(u'option', u'vi', u'value')
if (not optname):
e = self._handle_error(e, fpname, lineno, line)
optname = self.optionxform(optname.rstrip())
if (self._strict and ((sectname, optname) in elements_added)):
raise DuplicateOptionError(sectname, optname, fpname, lineno)
elements_added.add((sectname, optname))
if (optval is not None):
optval = optval.strip()
cursect[optname] = [optval]
else:
cursect[optname] = None
else:
e = self._handle_error(e, fpname, lineno, line)
if e:
raise e
self._join_multiline_values()
|
'Create a sequence of lookups with \'vars\' taking priority over
the \'section\' which takes priority over the DEFAULTSECT.'
| def _unify_values(self, section, vars):
| sectiondict = {}
try:
sectiondict = self._sections[section]
except KeyError:
if (section != self.default_section):
raise NoSectionError(section)
vardict = {}
if vars:
for (key, value) in vars.items():
if (value is not None):
value = str(value)
vardict[self.optionxform(key)] = value
return _ChainMap(vardict, sectiondict, self._defaults)
|
'Return a boolean value translating from other types if necessary.'
| def _convert_to_boolean(self, value):
| if (value.lower() not in self.BOOLEAN_STATES):
raise ValueError((u'Not a boolean: %s' % value))
return self.BOOLEAN_STATES[value.lower()]
|
'Raises a TypeError for non-string values.
The only legal non-string value if we allow valueless
options is None, so we need to check if the value is a
string if:
- we do not allow valueless options, or
- we allow valueless options but the value is not None
For compatibility reasons this method is not used in classic set()
for RawConfigParsers. It is invoked in every case for mapping protocol
access and in ConfigParser.set().'
| def _validate_value_types(self, **kwargs):
| section = kwargs.get(u'section', u'')
option = kwargs.get(u'option', u'')
value = kwargs.get(u'value', u'')
if (PY2 and (bytes in (type(section), type(option), type(value)))):
warnings.warn(u'You passed a bytestring. Implicitly decoding as UTF-8 string. This will not work on Python 3. Please switch to using Unicode strings across the board.', DeprecationWarning, stacklevel=2)
if isinstance(section, bytes):
section = section.decode(u'utf8')
if isinstance(option, bytes):
option = option.decode(u'utf8')
if isinstance(value, bytes):
value = value.decode(u'utf8')
if (not isinstance(section, str)):
raise TypeError(u'section names must be strings')
if (not isinstance(option, str)):
raise TypeError(u'option keys must be strings')
if ((not self._allow_no_value) or value):
if (not isinstance(value, str)):
raise TypeError(u'option values must be strings')
return (section, option, value)
|
'Set an option. Extends RawConfigParser.set by validating type and
interpolation syntax on the value.'
| def set(self, section, option, value=None):
| (_, option, value) = self._validate_value_types(option=option, value=value)
super(ConfigParser, self).set(section, option, value)
|
'Create a new section in the configuration. Extends
RawConfigParser.add_section by validating if the section name is
a string.'
| def add_section(self, section):
| (section, _, _) = self._validate_value_types(section=section)
super(ConfigParser, self).add_section(section)
|
'Creates a view on a section of the specified `name` in `parser`.'
| def __init__(self, parser, name):
| self._parser = parser
self._name = name
for conv in parser.converters:
key = (u'get' + conv)
getter = functools.partial(self.get, _impl=getattr(parser, key))
setattr(self, key, getter)
|
'Get an option value.
Unless `fallback` is provided, `None` will be returned if the option
is not found.'
| def get(self, option, fallback=None, **kwargs):
| kwargs.setdefault(u'raw', False)
kwargs.setdefault(u'vars', None)
_impl = kwargs.pop(u'_impl', None)
if (not _impl):
_impl = self._parser.get
return _impl(self._name, option, fallback=fallback, **kwargs)
|
'Initialize a ChainMap by setting *maps* to the given mappings.
If no mappings are provided, a single empty dictionary is used.'
| def __init__(self, *maps):
| self.maps = (list(maps) or [{}])
|
'Create a ChainMap with a single dict created from the iterable.'
| @classmethod
def fromkeys(cls, iterable, *args):
| return cls(dict.fromkeys(iterable, *args))
|
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
| def copy(self):
| return self.__class__(self.maps[0].copy(), *self.maps[1:])
|
'New ChainMap with a new dict followed by all previous maps.'
| def new_child(self):
| return self.__class__({}, *self.maps)
|
'New ChainMap from maps[1:].'
| @property
def parents(self):
| return self.__class__(*self.maps[1:])
|
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
| def popitem(self):
| try:
return self.maps[0].popitem()
except KeyError:
raise KeyError(u'No keys found in the first mapping.')
|
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
| def pop(self, key, *args):
| try:
return self.maps[0].pop(key, *args)
except KeyError:
raise KeyError(u'Key not found in the first mapping: {!r}'.format(key))
|
'Clear maps[0], leaving maps[1:] intact.'
| def clear(self):
| self.maps[0].clear()
|
'Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)'
| def is_package(self, fullname):
| return hasattr(self.__get_module(fullname), '__path__')
|
'Return None
Required, if is_package is implemented'
| def get_code(self, fullname):
| self.__get_module(fullname)
return None
|
'Build an astroid from a living module instance.'
| def module_build(self, module, modname=None):
| node = None
path = getattr(module, '__file__', None)
if (path is not None):
(path_, ext) = os.path.splitext(modutils._path_from_filename(path))
if ((ext in ('.py', '.pyc', '.pyo')) and os.path.exists((path_ + '.py'))):
node = self.file_build((path_ + '.py'), modname)
if (node is None):
node = self.inspect_build(module, modname=modname, path=path)
if self._apply_transforms:
node = self._manager.visit_transforms(node)
return node
|
'Build astroid from a source code file (i.e. from an ast)
*path* is expected to be a python source file'
| def file_build(self, path, modname=None):
| try:
(stream, encoding, data) = open_source_file(path)
except IOError as exc:
util.reraise(exceptions.AstroidBuildingError('Unable to load file {path}:\n{error}', modname=modname, path=path, error=exc))
except (SyntaxError, LookupError) as exc:
util.reraise(exceptions.AstroidSyntaxError('Python 3 encoding specification error or unknown encoding:\n{error}', modname=modname, path=path, error=exc))
except UnicodeError:
util.reraise(exceptions.AstroidBuildingError('Wrong or no encoding specified for {filename}.', filename=path))
with stream:
if (modname is None):
try:
modname = '.'.join(modutils.modpath_from_file(path))
except ImportError:
modname = os.path.splitext(os.path.basename(path))[0]
module = self._data_build(data, modname, path)
return self._post_build(module, encoding)
|
'Build astroid from source code string.'
| def string_build(self, data, modname='', path=None):
| module = self._data_build(data, modname, path)
module.file_bytes = data.encode('utf-8')
return self._post_build(module, 'utf-8')
|
'Handles encoding and delayed nodes after a module has been built'
| def _post_build(self, module, encoding):
| module.file_encoding = encoding
self._manager.cache_module(module)
for from_node in module._import_from_nodes:
if (from_node.modname == '__future__'):
for (symbol, _) in from_node.names:
module.future_imports.add(symbol)
self.add_from_names_to_locals(from_node)
for delayed in module._delayed_assattr:
self.delayed_assattr(delayed)
if self._apply_transforms:
module = self._manager.visit_transforms(module)
return module
|
'Build tree node from data and add some informations'
| def _data_build(self, data, modname, path):
| try:
node = _parse((data + '\n'))
except (TypeError, ValueError, SyntaxError) as exc:
util.reraise(exceptions.AstroidSyntaxError('Parsing Python code failed:\n{error}', source=data, modname=modname, path=path, error=exc))
if (path is not None):
node_file = os.path.abspath(path)
else:
node_file = '<?>'
if modname.endswith('.__init__'):
modname = modname[:(-9)]
package = True
else:
package = ((path is not None) and (os.path.splitext(os.path.basename(path))[0] == '__init__'))
builder = rebuilder.TreeRebuilder(self._manager)
module = builder.visit_module(node, modname, node_file, package)
module._import_from_nodes = builder._import_from_nodes
module._delayed_assattr = builder._delayed_assattr
return module
|
'Store imported names to the locals
Resort the locals if coming from a delayed node'
| def add_from_names_to_locals(self, node):
| _key_func = (lambda node: node.fromlineno)
def sort_locals(my_list):
my_list.sort(key=_key_func)
for (name, asname) in node.names:
if (name == '*'):
try:
imported = node.do_import_module()
except exceptions.AstroidBuildingError:
continue
for name in imported.public_names():
node.parent.set_local(name, node)
sort_locals(node.parent.scope().locals[name])
else:
node.parent.set_local((asname or name), node)
sort_locals(node.parent.scope().locals[(asname or name)])
|
'Visit a AssAttr node
This adds name to locals and handle members definition.'
| def delayed_assattr(self, node):
| try:
frame = node.frame()
for inferred in node.expr.infer():
if (inferred is util.Uninferable):
continue
try:
if (inferred.__class__ is bases.Instance):
inferred = inferred._proxied
iattrs = inferred.instance_attrs
if (not _can_assign_attr(inferred, node.attrname)):
continue
elif isinstance(inferred, bases.Instance):
continue
elif inferred.is_function:
iattrs = inferred.instance_attrs
else:
iattrs = inferred.locals
except AttributeError:
continue
values = iattrs.setdefault(node.attrname, [])
if (node in values):
continue
if ((frame.name == '__init__') and values and (values[0].frame().name != '__init__')):
values.insert(0, node)
else:
values.append(node)
except exceptions.InferenceError:
pass
|
'return the \'qualified\' name of the node, eg module.name,
module.class.name ...'
| def qname(self):
| if (self.parent is None):
return self.name
return ('%s.%s' % (self.parent.frame().qname(), self.name))
|
'return the first parent frame node (i.e. Module, FunctionDef or ClassDef)'
| def frame(self):
| return self
|
'return the first node defining a new scope (i.e. Module,
FunctionDef, ClassDef, Lambda but also GeneratorExp, DictComp and SetComp)'
| def scope(self):
| return self
|
'XXX method for interfacing the scope lookup'
| def _scope_lookup(self, node, name, offset=0):
| try:
stmts = node._filter_stmts(self.locals[name], self, offset)
except KeyError:
stmts = ()
if stmts:
return (self, stmts)
if self.parent:
pscope = self.parent.scope()
if (not pscope.is_function):
pscope = pscope.root()
return pscope.scope_lookup(node, name)
return builtin_lookup(name)
|
'define <name> in locals (<stmt> is the node defining the name)
if the node is a Module node (i.e. has globals), add the name to
globals
if the name is already defined, ignore it'
| def set_local(self, name, stmt):
| self.locals.setdefault(name, []).append(stmt)
|
'append a child, linking it in the tree'
| def _append_node(self, child):
| self.body.append(child)
child.parent = self
|
'append a child which should alter locals to the given node'
| def add_local_node(self, child_node, name=None):
| if (name != '__class__'):
self._append_node(child_node)
self.set_local((name or child_node.name), child_node)
|
'method from the `dict` interface returning the first node
associated with the given name in the locals dictionary
:type item: str
:param item: the name of the locally defined object
:raises KeyError: if the name is not defined'
| def __getitem__(self, item):
| return self.locals[item][0]
|
'method from the `dict` interface returning an iterator on
`self.keys()`'
| def __iter__(self):
| return iter(self.keys())
|
'method from the `dict` interface returning a tuple containing
locally defined names'
| def keys(self):
| return list(self.locals.keys())
|
'method from the `dict` interface returning a tuple containing
locally defined nodes which are instance of `FunctionDef` or `ClassDef`'
| def values(self):
| return [self[key] for key in self.keys()]
|
'method from the `dict` interface returning a list of tuple
containing each locally defined name with its associated node,
which is an instance of `FunctionDef` or `ClassDef`'
| def items(self):
| return list(zip(self.keys(), self.values()))
|
'Get a stream to the underlying file or bytes.'
| def stream(self):
| return self._get_stream()
|
'Close the underlying file streams.'
| def close(self):
| warnings.warn("close method is deprecated and it is slated for removal in astroid 1.6, along with 'file_stream' property. Its behaviour is replaced by managing each file stream returned by the 'stream' method.", PendingDeprecationWarning, stacklevel=2)
|
'return block line numbers.
start from the beginning whatever the given lineno'
| def block_range(self, lineno):
| return (self.fromlineno, self.tolineno)
|
'inferred getattr'
| def igetattr(self, name, context=None):
| context = contextmod.copy_context(context)
context.lookupname = name
try:
return bases._infer_stmts(self.getattr(name, context), context, frame=self)
except exceptions.AttributeInferenceError as error:
util.reraise(exceptions.InferenceError(error.message, target=self, attribute=name, context=context))
|
'return True if this module has been built from a .py file
and so contains a complete representation including the code'
| def fully_defined(self):
| return ((self.file is not None) and self.file.endswith('.py'))
|
'return the first parent node marked as statement node
consider a module as a statement...'
| def statement(self):
| return self
|
'module has no sibling'
| def previous_sibling(self):
| return
|
'module has no sibling'
| def next_sibling(self):
| return
|
'import the given module considering self as context'
| def import_module(self, modname, relative_only=False, level=None):
| if (relative_only and (level is None)):
level = 0
absmodname = self.relative_to_absolute_name(modname, level)
try:
return MANAGER.ast_from_module_name(absmodname)
except exceptions.AstroidBuildingError:
if relative_only:
raise
return MANAGER.ast_from_module_name(modname)
|
'return the absolute module name for a relative import.
The relative import can be implicit or explicit.'
| def relative_to_absolute_name(self, modname, level):
| if (self.absolute_import_activated() and (level is None)):
return modname
if level:
if self.package:
level = (level - 1)
if (level and (self.name.count('.') < level)):
raise exceptions.TooManyLevelsError(level=level, name=self.name)
package_name = self.name.rsplit('.', level)[0]
elif self.package:
package_name = self.name
else:
package_name = self.name.rsplit('.', 1)[0]
if package_name:
if (not modname):
return package_name
return ('%s.%s' % (package_name, modname))
return modname
|
'return the list of imported names when this module is \'wildcard
imported\'
It doesn\'t include the \'__builtins__\' name which is added by the
current CPython implementation of wildcard imports.'
| def wildcard_import_names(self):
| default = [name for name in self.keys() if (not name.startswith('_'))]
try:
all_values = self['__all__']
except KeyError:
return default
try:
explicit = next(all_values.assigned_stmts())
except exceptions.InferenceError:
return default
except AttributeError:
return default
inferred = []
try:
explicit = next(explicit.infer())
except exceptions.InferenceError:
return default
if (not isinstance(explicit, (node_classes.Tuple, node_classes.List))):
return default
str_const = (lambda node: (isinstance(node, node_classes.Const) and isinstance(node.value, six.string_types)))
for node in explicit.elts:
if str_const(node):
inferred.append(node.value)
else:
try:
inferred_node = next(node.infer())
except exceptions.InferenceError:
continue
if str_const(inferred_node):
inferred.append(inferred_node.value)
return inferred
|
'Get the list of the names which are publicly available in this module.'
| def public_names(self):
| return [name for name in self.keys() if (not name.startswith('_'))]
|
'return a list of argument names'
| def argnames(self):
| if self.args.args:
names = _rec_get_names(self.args.args)
else:
names = []
if self.args.vararg:
names.append(self.args.vararg)
if self.args.kwarg:
names.append(self.args.kwarg)
return names
|
'infer what a function is returning when called'
| def infer_call_result(self, caller, context=None):
| return self.body.infer(context)
|
'Get the extra decorators that this function can haves
Additional decorators are considered when they are used as
assignments, as in `method = staticmethod(method)`.
The property will return all the callables that are used for
decoration.'
| @decorators_mod.cachedproperty
def extra_decorators(self):
| frame = self.parent.frame()
if (not isinstance(frame, ClassDef)):
return []
decorators = []
for assign in frame.nodes_of_class(node_classes.Assign):
if (isinstance(assign.value, node_classes.Call) and isinstance(assign.value.func, node_classes.Name)):
for assign_node in assign.targets:
if (not isinstance(assign_node, node_classes.AssignName)):
continue
if (assign_node.name != self.name):
continue
try:
meth = frame[self.name]
except KeyError:
continue
else:
if (isinstance(meth, FunctionDef) and (assign_node.frame() == frame)):
decorators.append(assign.value)
return decorators
|
'Get the function type for this node.
Possible values are: method, function, staticmethod, classmethod.'
| @decorators_mod.cachedproperty
def type(self):
| builtin_descriptors = {'classmethod', 'staticmethod'}
for decorator in self.extra_decorators:
if (decorator.func.name in builtin_descriptors):
return decorator.func.name
frame = self.parent.frame()
type_name = 'function'
if isinstance(frame, ClassDef):
if (self.name == '__new__'):
return 'classmethod'
elif ((sys.version_info >= (3, 6)) and (self.name == '__init_subclass__')):
return 'classmethod'
else:
type_name = 'method'
if (not self.decorators):
return type_name
for node in self.decorators.nodes:
if isinstance(node, node_classes.Name):
if (node.name in builtin_descriptors):
return node.name
if isinstance(node, node_classes.Call):
try:
current = next(node.func.infer())
except exceptions.InferenceError:
continue
_type = _infer_decorator_callchain(current)
if (_type is not None):
return _type
try:
for inferred in node.infer():
_type = _infer_decorator_callchain(inferred)
if (_type is not None):
return _type
if (not isinstance(inferred, ClassDef)):
continue
for ancestor in inferred.ancestors():
if (not isinstance(ancestor, ClassDef)):
continue
if ancestor.is_subtype_of(('%s.classmethod' % BUILTINS)):
return 'classmethod'
elif ancestor.is_subtype_of(('%s.staticmethod' % BUILTINS)):
return 'staticmethod'
except exceptions.InferenceError:
pass
return type_name
|
'return block line numbers.
start from the "def" position whatever the given lineno'
| def block_range(self, lineno):
| return (self.fromlineno, self.tolineno)
|
'this method doesn\'t look in the instance_attrs dictionary since it\'s
done by an Instance proxy at inference time.'
| def getattr(self, name, context=None):
| if (name in self.instance_attrs):
return self.instance_attrs[name]
if (name in self.special_attributes):
return [self.special_attributes.lookup(name)]
raise exceptions.AttributeInferenceError(target=self, attribute=name)
|
'Inferred getattr, which returns an iterator of inferred statements.'
| def igetattr(self, name, context=None):
| try:
return bases._infer_stmts(self.getattr(name, context), context, frame=self)
except exceptions.AttributeInferenceError as error:
util.reraise(exceptions.InferenceError(error.message, target=self, attribute=name, context=context))
|
'return true if the function node should be considered as a method'
| def is_method(self):
| return ((self.type != 'function') and isinstance(self.parent.frame(), ClassDef))
|
'return a list of decorator qualified names'
| @decorators_mod.cached
def decoratornames(self):
| result = set()
decoratornodes = []
if (self.decorators is not None):
decoratornodes += self.decorators.nodes
decoratornodes += self.extra_decorators
for decnode in decoratornodes:
try:
for infnode in decnode.infer():
result.add(infnode.qname())
except exceptions.InferenceError:
continue
return result
|
'return true if the function is bound to an Instance or a class'
| def is_bound(self):
| return (self.type == 'classmethod')
|
'Returns True if the method is abstract.
A method is considered abstract if
- the only statement is \'raise NotImplementedError\', or
- the only statement is \'pass\' and pass_is_abstract is True, or
- the method is annotated with abc.astractproperty/abc.abstractmethod'
| def is_abstract(self, pass_is_abstract=True):
| if self.decorators:
for node in self.decorators.nodes:
try:
inferred = next(node.infer())
except exceptions.InferenceError:
continue
if (inferred and (inferred.qname() in ('abc.abstractproperty', 'abc.abstractmethod'))):
return True
for child_node in self.body:
if isinstance(child_node, node_classes.Raise):
if child_node.raises_not_implemented():
return True
return (pass_is_abstract and isinstance(child_node, node_classes.Pass))
if pass_is_abstract:
return True
|
'return true if this is a generator function'
| def is_generator(self):
| yield_nodes = (node_classes.Yield, node_classes.YieldFrom)
return next(self.nodes_of_class(yield_nodes, skip_klass=(FunctionDef, Lambda)), False)
|
'infer what a function is returning when called'
| def infer_call_result(self, caller, context=None):
| if self.is_generator():
result = bases.Generator(self)
(yield result)
return
if ((self.name == 'with_metaclass') and (len(self.args.args) == 1) and (self.args.vararg is not None)):
metaclass = next(caller.args[0].infer(context))
if isinstance(metaclass, ClassDef):
c = ClassDef('temporary_class', None)
c.hide = True
c.parent = self
class_bases = [next(b.infer(context)) for b in caller.args[1:]]
c.bases = [base for base in class_bases if (base != util.Uninferable)]
c._metaclass = metaclass
(yield c)
return
returns = self.nodes_of_class(node_classes.Return, skip_klass=FunctionDef)
for returnnode in returns:
if (returnnode.value is None):
(yield node_classes.Const(None))
else:
try:
for inferred in returnnode.value.infer(context):
(yield inferred)
except exceptions.InferenceError:
(yield util.Uninferable)
|
'return block line numbers.
start from the "class" position whatever the given lineno'
| def block_range(self, lineno):
| return (self.fromlineno, self.tolineno)
|
'infer what a class is returning when called'
| def infer_call_result(self, caller, context=None):
| if (self.is_subtype_of(('%s.type' % (BUILTINS,)), context) and (len(caller.args) == 3)):
result = self._infer_type_call(caller, context)
(yield result)
else:
(yield bases.Instance(self))
|
'Get the list of parent class names, as they appear in the class definition.'
| @property
def basenames(self):
| return [bnode.as_string() for bnode in self.bases]
|
'return an iterator on the node base classes in a prefixed
depth first order
:param recurs:
boolean indicating if it should recurse or return direct
ancestors only'
| def ancestors(self, recurs=True, context=None):
| yielded = set([self])
if (context is None):
context = contextmod.InferenceContext()
if six.PY3:
if ((not self.bases) and (self.qname() != 'builtins.object')):
(yield builtin_lookup('object')[1][0])
return
for stmt in self.bases:
with context.restore_path():
try:
for baseobj in stmt.infer(context):
if (not isinstance(baseobj, ClassDef)):
if isinstance(baseobj, bases.Instance):
baseobj = baseobj._proxied
else:
continue
if (not baseobj.hide):
if (baseobj in yielded):
continue
yielded.add(baseobj)
(yield baseobj)
if recurs:
for grandpa in baseobj.ancestors(recurs=True, context=context):
if (grandpa is self):
break
if (grandpa in yielded):
continue
yielded.add(grandpa)
(yield grandpa)
except exceptions.InferenceError:
continue
|
'return an iterator on astroid representation of parent classes
which have <name> defined in their locals'
| def local_attr_ancestors(self, name, context=None):
| if (self.newstyle and all((n.newstyle for n in self.ancestors(context)))):
try:
ancestors = self.mro(context)[1:]
except exceptions.MroError:
ancestors = self.ancestors(context=context)
else:
ancestors = self.ancestors(context=context)
for astroid in ancestors:
if (name in astroid):
(yield astroid)
|
'return an iterator on astroid representation of parent classes
which have <name> defined in their instance attribute dictionary'
| def instance_attr_ancestors(self, name, context=None):
| for astroid in self.ancestors(context=context):
if (name in astroid.instance_attrs):
(yield astroid)
|
'return the list of assign node associated to name in this class
locals or in its parents
:raises `AttributeInferenceError`:
if no attribute with this name has been find in this class or
its parent classes'
| def local_attr(self, name, context=None):
| result = []
if (name in self.locals):
result = self.locals[name]
else:
class_node = next(self.local_attr_ancestors(name, context), ())
if class_node:
result = class_node.locals[name]
result = [n for n in result if (not isinstance(n, node_classes.DelAttr))]
if result:
return result
raise exceptions.AttributeInferenceError(target=self, attribute=name, context=context)
|
'return the astroid nodes associated to name in this class instance
attributes dictionary and in its parents
:raises `AttributeInferenceError`:
if no attribute with this name has been find in this class or
its parent classes'
| def instance_attr(self, name, context=None):
| values = list(self.instance_attrs.get(name, []))
for class_node in self.instance_attr_ancestors(name, context):
values += class_node.instance_attrs[name]
values = [n for n in values if (not isinstance(n, node_classes.DelAttr))]
if values:
return values
raise exceptions.AttributeInferenceError(target=self, attribute=name, context=context)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.