index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
---|---|---|---|---|---|
730,084 | mmengine.fileio.file_client | join_path | Concatenate all file paths.
Join one or more filepath components intelligently. The return value
is the concatenation of filepath and any members of \*filepaths.
Args:
filepath (str or Path): Path to be concatenated.
Returns:
str: The result of concatenation.
| def join_path(self, filepath: Union[str, Path],
*filepaths: Union[str, Path]) -> str:
r"""Concatenate all file paths.
Join one or more filepath components intelligently. The return value
is the concatenation of filepath and any members of \*filepaths.
Args:
filepath (str or Path): Path to be concatenated.
Returns:
str: The result of concatenation.
"""
return self.client.join_path(filepath, *filepaths)
| (self, filepath: Union[str, pathlib.Path], *filepaths: Union[str, pathlib.Path]) -> str |
730,085 | mmengine.fileio.file_client | list_dir_or_file | Scan a directory to find the interested directories or files in
arbitrary order.
Note:
:meth:`list_dir_or_file` returns the path relative to ``dir_path``.
Args:
dir_path (str | Path): Path of the directory.
list_dir (bool): List the directories. Defaults to True.
list_file (bool): List the path of files. Defaults to True.
suffix (str or tuple[str], optional): File suffix
that we are interested in. Defaults to None.
recursive (bool): If set to True, recursively scan the
directory. Defaults to False.
Yields:
Iterable[str]: A relative path to ``dir_path``.
| def list_dir_or_file(self,
dir_path: Union[str, Path],
list_dir: bool = True,
list_file: bool = True,
suffix: Optional[Union[str, Tuple[str]]] = None,
recursive: bool = False) -> Iterator[str]:
"""Scan a directory to find the interested directories or files in
arbitrary order.
Note:
:meth:`list_dir_or_file` returns the path relative to ``dir_path``.
Args:
dir_path (str | Path): Path of the directory.
list_dir (bool): List the directories. Defaults to True.
list_file (bool): List the path of files. Defaults to True.
suffix (str or tuple[str], optional): File suffix
that we are interested in. Defaults to None.
recursive (bool): If set to True, recursively scan the
directory. Defaults to False.
Yields:
Iterable[str]: A relative path to ``dir_path``.
"""
yield from self.client.list_dir_or_file(dir_path, list_dir, list_file,
suffix, recursive)
| (self, dir_path: Union[str, pathlib.Path], list_dir: bool = True, list_file: bool = True, suffix: Union[str, Tuple[str], NoneType] = None, recursive: bool = False) -> Iterator[str] |
730,086 | mmengine.fileio.file_client | parse_uri_prefix | Parse the prefix of a uri.
Args:
uri (str | Path): Uri to be parsed that contains the file prefix.
Examples:
>>> FileClient.parse_uri_prefix('s3://path/of/your/file')
's3'
Returns:
str | None: Return the prefix of uri if the uri contains '://' else
``None``.
| @staticmethod
def parse_uri_prefix(uri: Union[str, Path]) -> Optional[str]:
"""Parse the prefix of a uri.
Args:
uri (str | Path): Uri to be parsed that contains the file prefix.
Examples:
>>> FileClient.parse_uri_prefix('s3://path/of/your/file')
's3'
Returns:
str | None: Return the prefix of uri if the uri contains '://' else
``None``.
"""
assert is_filepath(uri)
uri = str(uri)
if '://' not in uri:
return None
else:
prefix, _ = uri.split('://')
# In the case of PetrelBackend, the prefix may contains the cluster
# name like clusterName:s3
if ':' in prefix:
_, prefix = prefix.split(':')
return prefix
| (uri: Union[str, pathlib.Path]) -> Optional[str] |
730,087 | mmengine.fileio.file_client | put | Write data to a given ``filepath`` with 'wb' mode.
Note:
``put`` should create a directory if the directory of ``filepath``
does not exist.
Args:
obj (bytes): Data to be written.
filepath (str or Path): Path to write data.
| def put(self, obj: bytes, filepath: Union[str, Path]) -> None:
"""Write data to a given ``filepath`` with 'wb' mode.
Note:
``put`` should create a directory if the directory of ``filepath``
does not exist.
Args:
obj (bytes): Data to be written.
filepath (str or Path): Path to write data.
"""
self.client.put(obj, filepath)
| (self, obj: bytes, filepath: Union[str, pathlib.Path]) -> NoneType |
730,088 | mmengine.fileio.file_client | put_text | Write data to a given ``filepath`` with 'w' mode.
Note:
``put_text`` should create a directory if the directory of
``filepath`` does not exist.
Args:
obj (str): Data to be written.
filepath (str or Path): Path to write data.
encoding (str, optional): The encoding format used to open the
`filepath`. Defaults to 'utf-8'.
| def put_text(self, obj: str, filepath: Union[str, Path]) -> None:
"""Write data to a given ``filepath`` with 'w' mode.
Note:
``put_text`` should create a directory if the directory of
``filepath`` does not exist.
Args:
obj (str): Data to be written.
filepath (str or Path): Path to write data.
encoding (str, optional): The encoding format used to open the
`filepath`. Defaults to 'utf-8'.
"""
self.client.put_text(obj, filepath)
| (self, obj: str, filepath: Union[str, pathlib.Path]) -> NoneType |
730,089 | mmengine.fileio.file_client | remove | Remove a file.
Args:
filepath (str, Path): Path to be removed.
| def remove(self, filepath: Union[str, Path]) -> None:
"""Remove a file.
Args:
filepath (str, Path): Path to be removed.
"""
self.client.remove(filepath)
| (self, filepath: Union[str, pathlib.Path]) -> NoneType |
730,090 | mmengine.fileio.backends.http_backend | HTTPBackend | HTTP and HTTPS storage bachend. | class HTTPBackend(BaseStorageBackend):
"""HTTP and HTTPS storage bachend."""
def get(self, filepath: str) -> bytes:
"""Read bytes from a given ``filepath``.
Args:
filepath (str): Path to read data.
Returns:
bytes: Expected bytes object.
Examples:
>>> backend = HTTPBackend()
>>> backend.get('http://path/of/file')
b'hello world'
"""
return urlopen(filepath).read()
def get_text(self, filepath, encoding='utf-8') -> str:
"""Read text from a given ``filepath``.
Args:
filepath (str): Path to read data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Returns:
str: Expected text reading from ``filepath``.
Examples:
>>> backend = HTTPBackend()
>>> backend.get_text('http://path/of/file')
'hello world'
"""
return urlopen(filepath).read().decode(encoding)
@contextmanager
def get_local_path(
self, filepath: str) -> Generator[Union[str, Path], None, None]:
"""Download a file from ``filepath`` to a local temporary directory,
and return the temporary path.
``get_local_path`` is decorated by :meth:`contxtlib.contextmanager`. It
can be called with ``with`` statement, and when exists from the
``with`` statement, the temporary path will be released.
Args:
filepath (str): Download a file from ``filepath``.
Yields:
Iterable[str]: Only yield one temporary path.
Examples:
>>> backend = HTTPBackend()
>>> # After existing from the ``with`` clause,
>>> # the path will be removed
>>> with backend.get_local_path('http://path/of/file') as path:
... # do something here
"""
try:
f = tempfile.NamedTemporaryFile(delete=False)
f.write(self.get(filepath))
f.close()
yield f.name
finally:
os.remove(f.name)
| () |
730,091 | mmengine.fileio.backends.http_backend | get | Read bytes from a given ``filepath``.
Args:
filepath (str): Path to read data.
Returns:
bytes: Expected bytes object.
Examples:
>>> backend = HTTPBackend()
>>> backend.get('http://path/of/file')
b'hello world'
| def get(self, filepath: str) -> bytes:
"""Read bytes from a given ``filepath``.
Args:
filepath (str): Path to read data.
Returns:
bytes: Expected bytes object.
Examples:
>>> backend = HTTPBackend()
>>> backend.get('http://path/of/file')
b'hello world'
"""
return urlopen(filepath).read()
| (self, filepath: str) -> bytes |
730,092 | mmengine.fileio.backends.http_backend | get_local_path | Download a file from ``filepath`` to a local temporary directory,
and return the temporary path.
``get_local_path`` is decorated by :meth:`contxtlib.contextmanager`. It
can be called with ``with`` statement, and when exists from the
``with`` statement, the temporary path will be released.
Args:
filepath (str): Download a file from ``filepath``.
Yields:
Iterable[str]: Only yield one temporary path.
Examples:
>>> backend = HTTPBackend()
>>> # After existing from the ``with`` clause,
>>> # the path will be removed
>>> with backend.get_local_path('http://path/of/file') as path:
... # do something here
| null | (self, filepath: str) -> Generator[Union[str, pathlib.Path], NoneType, NoneType] |
730,093 | mmengine.fileio.backends.http_backend | get_text | Read text from a given ``filepath``.
Args:
filepath (str): Path to read data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Returns:
str: Expected text reading from ``filepath``.
Examples:
>>> backend = HTTPBackend()
>>> backend.get_text('http://path/of/file')
'hello world'
| def get_text(self, filepath, encoding='utf-8') -> str:
"""Read text from a given ``filepath``.
Args:
filepath (str): Path to read data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Returns:
str: Expected text reading from ``filepath``.
Examples:
>>> backend = HTTPBackend()
>>> backend.get_text('http://path/of/file')
'hello world'
"""
return urlopen(filepath).read().decode(encoding)
| (self, filepath, encoding='utf-8') -> str |
730,094 | mmengine.fileio.file_client | HardDiskBackend | Raw hard disks storage backend. | class HardDiskBackend(LocalBackend):
"""Raw hard disks storage backend."""
def __init__(self) -> None:
print_log(
'"HardDiskBackend" is the alias of "LocalBackend" '
'and the former will be deprecated in future.',
logger='current',
level=logging.WARNING)
@property
def name(self):
return self.__class__.__name__
| () -> None |
730,095 | mmengine.fileio.file_client | __init__ | null | def __init__(self) -> None:
print_log(
'"HardDiskBackend" is the alias of "LocalBackend" '
'and the former will be deprecated in future.',
logger='current',
level=logging.WARNING)
| (self) -> NoneType |
730,096 | mmengine.fileio.backends.local_backend | copy_if_symlink_fails | Create a symbolic link pointing to src named dst.
If failed to create a symbolic link pointing to src, directly copy src
to dst instead.
Args:
src (str or Path): Create a symbolic link pointing to src.
dst (str or Path): Create a symbolic link named dst.
Returns:
bool: Return True if successfully create a symbolic link pointing
to src. Otherwise, return False.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> backend.copy_if_symlink_fails(src, dst)
True
>>> src = '/path/of/dir'
>>> dst = '/path1/of/dir1'
>>> backend.copy_if_symlink_fails(src, dst)
True
| def copy_if_symlink_fails(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> bool:
"""Create a symbolic link pointing to src named dst.
If failed to create a symbolic link pointing to src, directly copy src
to dst instead.
Args:
src (str or Path): Create a symbolic link pointing to src.
dst (str or Path): Create a symbolic link named dst.
Returns:
bool: Return True if successfully create a symbolic link pointing
to src. Otherwise, return False.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> backend.copy_if_symlink_fails(src, dst)
True
>>> src = '/path/of/dir'
>>> dst = '/path1/of/dir1'
>>> backend.copy_if_symlink_fails(src, dst)
True
"""
try:
os.symlink(src, dst)
return True
except Exception:
if self.isfile(src):
self.copyfile(src, dst)
else:
self.copytree(src, dst)
return False
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> bool |
730,097 | mmengine.fileio.backends.local_backend | copyfile | Copy a file src to dst and return the destination file.
src and dst should have the same prefix. If dst specifies a directory,
the file will be copied into dst using the base filename from src. If
dst specifies a file that already exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to dst.
Returns:
str: The destination file.
Raises:
SameFileError: If src and dst are the same file, a SameFileError
will be raised.
Examples:
>>> backend = LocalBackend()
>>> # dst is a file
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> # src will be copied to '/path1/of/file1'
>>> backend.copyfile(src, dst)
'/path1/of/file1'
>>> # dst is a directory
>>> dst = '/path1/of/dir'
>>> # src will be copied to '/path1/of/dir/file'
>>> backend.copyfile(src, dst)
'/path1/of/dir/file'
| def copyfile(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Copy a file src to dst and return the destination file.
src and dst should have the same prefix. If dst specifies a directory,
the file will be copied into dst using the base filename from src. If
dst specifies a file that already exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to dst.
Returns:
str: The destination file.
Raises:
SameFileError: If src and dst are the same file, a SameFileError
will be raised.
Examples:
>>> backend = LocalBackend()
>>> # dst is a file
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> # src will be copied to '/path1/of/file1'
>>> backend.copyfile(src, dst)
'/path1/of/file1'
>>> # dst is a directory
>>> dst = '/path1/of/dir'
>>> # src will be copied to '/path1/of/dir/file'
>>> backend.copyfile(src, dst)
'/path1/of/dir/file'
"""
return shutil.copy(src, dst)
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> str |
730,098 | mmengine.fileio.backends.local_backend | copyfile_from_local | Copy a local file src to dst and return the destination file. Same
as :meth:`copyfile`.
Args:
src (str or Path): A local file to be copied.
dst (str or Path): Copy file to dst.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Raises:
SameFileError: If src and dst are the same file, a SameFileError
will be raised.
Examples:
>>> backend = LocalBackend()
>>> # dst is a file
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> # src will be copied to '/path1/of/file1'
>>> backend.copyfile_from_local(src, dst)
'/path1/of/file1'
>>> # dst is a directory
>>> dst = '/path1/of/dir'
>>> # src will be copied to
>>> backend.copyfile_from_local(src, dst)
'/path1/of/dir/file'
| def copyfile_from_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Copy a local file src to dst and return the destination file. Same
as :meth:`copyfile`.
Args:
src (str or Path): A local file to be copied.
dst (str or Path): Copy file to dst.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Raises:
SameFileError: If src and dst are the same file, a SameFileError
will be raised.
Examples:
>>> backend = LocalBackend()
>>> # dst is a file
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> # src will be copied to '/path1/of/file1'
>>> backend.copyfile_from_local(src, dst)
'/path1/of/file1'
>>> # dst is a directory
>>> dst = '/path1/of/dir'
>>> # src will be copied to
>>> backend.copyfile_from_local(src, dst)
'/path1/of/dir/file'
"""
return self.copyfile(src, dst)
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> str |
730,099 | mmengine.fileio.backends.local_backend | copyfile_to_local | Copy the file src to local dst and return the destination file. Same
as :meth:`copyfile`.
If dst specifies a directory, the file will be copied into dst using
the base filename from src. If dst specifies a file that already
exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to to local dst.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Examples:
>>> backend = LocalBackend()
>>> # dst is a file
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> # src will be copied to '/path1/of/file1'
>>> backend.copyfile_to_local(src, dst)
'/path1/of/file1'
>>> # dst is a directory
>>> dst = '/path1/of/dir'
>>> # src will be copied to
>>> backend.copyfile_to_local(src, dst)
'/path1/of/dir/file'
| def copyfile_to_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Copy the file src to local dst and return the destination file. Same
as :meth:`copyfile`.
If dst specifies a directory, the file will be copied into dst using
the base filename from src. If dst specifies a file that already
exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to to local dst.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Examples:
>>> backend = LocalBackend()
>>> # dst is a file
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> # src will be copied to '/path1/of/file1'
>>> backend.copyfile_to_local(src, dst)
'/path1/of/file1'
>>> # dst is a directory
>>> dst = '/path1/of/dir'
>>> # src will be copied to
>>> backend.copyfile_to_local(src, dst)
'/path1/of/dir/file'
"""
return self.copyfile(src, dst)
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> str |
730,100 | mmengine.fileio.backends.local_backend | copytree | Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory.
src and dst should have the same prefix and dst must not already exist.
TODO: Whether to support dirs_exist_ok parameter.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to dst.
Returns:
str: The destination directory.
Raises:
FileExistsError: If dst had already existed, a FileExistsError will
be raised.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/dir1'
>>> dst = '/path/of/dir2'
>>> backend.copytree(src, dst)
'/path/of/dir2'
| def copytree(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory.
src and dst should have the same prefix and dst must not already exist.
TODO: Whether to support dirs_exist_ok parameter.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to dst.
Returns:
str: The destination directory.
Raises:
FileExistsError: If dst had already existed, a FileExistsError will
be raised.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/dir1'
>>> dst = '/path/of/dir2'
>>> backend.copytree(src, dst)
'/path/of/dir2'
"""
return shutil.copytree(src, dst)
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> str |
730,101 | mmengine.fileio.backends.local_backend | copytree_from_local | Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory. Same as
:meth:`copytree`.
Args:
src (str or Path): A local directory to be copied.
dst (str or Path): Copy directory to dst.
Returns:
str: The destination directory.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/dir1'
>>> dst = '/path/of/dir2'
>>> backend.copytree_from_local(src, dst)
'/path/of/dir2'
| def copytree_from_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory. Same as
:meth:`copytree`.
Args:
src (str or Path): A local directory to be copied.
dst (str or Path): Copy directory to dst.
Returns:
str: The destination directory.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/dir1'
>>> dst = '/path/of/dir2'
>>> backend.copytree_from_local(src, dst)
'/path/of/dir2'
"""
return self.copytree(src, dst)
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> str |
730,102 | mmengine.fileio.backends.local_backend | copytree_to_local | Recursively copy an entire directory tree rooted at src to a local
directory named dst and return the destination directory.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to local dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: The destination directory.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/dir1'
>>> dst = '/path/of/dir2'
>>> backend.copytree_from_local(src, dst)
'/path/of/dir2'
| def copytree_to_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Recursively copy an entire directory tree rooted at src to a local
directory named dst and return the destination directory.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to local dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: The destination directory.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/dir1'
>>> dst = '/path/of/dir2'
>>> backend.copytree_from_local(src, dst)
'/path/of/dir2'
"""
return self.copytree(src, dst)
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> str |
730,103 | mmengine.fileio.backends.local_backend | exists | Check whether a file path exists.
Args:
filepath (str or Path): Path to be checked whether exists.
Returns:
bool: Return ``True`` if ``filepath`` exists, ``False`` otherwise.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.exists(filepath)
True
| def exists(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path exists.
Args:
filepath (str or Path): Path to be checked whether exists.
Returns:
bool: Return ``True`` if ``filepath`` exists, ``False`` otherwise.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.exists(filepath)
True
"""
return osp.exists(filepath)
| (self, filepath: Union[str, pathlib.Path]) -> bool |
730,104 | mmengine.fileio.backends.local_backend | get | Read bytes from a given ``filepath`` with 'rb' mode.
Args:
filepath (str or Path): Path to read data.
Returns:
bytes: Expected bytes object.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.get(filepath)
b'hello world'
| def get(self, filepath: Union[str, Path]) -> bytes:
"""Read bytes from a given ``filepath`` with 'rb' mode.
Args:
filepath (str or Path): Path to read data.
Returns:
bytes: Expected bytes object.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.get(filepath)
b'hello world'
"""
with open(filepath, 'rb') as f:
value = f.read()
return value
| (self, filepath: Union[str, pathlib.Path]) -> bytes |
730,105 | mmengine.fileio.backends.local_backend | get_local_path | Only for unified API and do nothing.
Args:
filepath (str or Path): Path to be read data.
backend_args (dict, optional): Arguments to instantiate the
corresponding backend. Defaults to None.
Examples:
>>> backend = LocalBackend()
>>> with backend.get_local_path('s3://bucket/abc.jpg') as path:
... # do something here
| def copyfile_from_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Copy a local file src to dst and return the destination file. Same
as :meth:`copyfile`.
Args:
src (str or Path): A local file to be copied.
dst (str or Path): Copy file to dst.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Raises:
SameFileError: If src and dst are the same file, a SameFileError
will be raised.
Examples:
>>> backend = LocalBackend()
>>> # dst is a file
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> # src will be copied to '/path1/of/file1'
>>> backend.copyfile_from_local(src, dst)
'/path1/of/file1'
>>> # dst is a directory
>>> dst = '/path1/of/dir'
>>> # src will be copied to
>>> backend.copyfile_from_local(src, dst)
'/path1/of/dir/file'
"""
return self.copyfile(src, dst)
| (self, filepath: Union[str, pathlib.Path]) -> Generator[Union[str, pathlib.Path], NoneType, NoneType] |
730,106 | mmengine.fileio.backends.local_backend | get_text | Read text from a given ``filepath`` with 'r' mode.
Args:
filepath (str or Path): Path to read data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Returns:
str: Expected text reading from ``filepath``.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.get_text(filepath)
'hello world'
| def get_text(self,
filepath: Union[str, Path],
encoding: str = 'utf-8') -> str:
"""Read text from a given ``filepath`` with 'r' mode.
Args:
filepath (str or Path): Path to read data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Returns:
str: Expected text reading from ``filepath``.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.get_text(filepath)
'hello world'
"""
with open(filepath, encoding=encoding) as f:
text = f.read()
return text
| (self, filepath: Union[str, pathlib.Path], encoding: str = 'utf-8') -> str |
730,107 | mmengine.fileio.backends.local_backend | isdir | Check whether a file path is a directory.
Args:
filepath (str or Path): Path to be checked whether it is a
directory.
Returns:
bool: Return ``True`` if ``filepath`` points to a directory,
``False`` otherwise.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/dir'
>>> backend.isdir(filepath)
True
| def isdir(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path is a directory.
Args:
filepath (str or Path): Path to be checked whether it is a
directory.
Returns:
bool: Return ``True`` if ``filepath`` points to a directory,
``False`` otherwise.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/dir'
>>> backend.isdir(filepath)
True
"""
return osp.isdir(filepath)
| (self, filepath: Union[str, pathlib.Path]) -> bool |
730,108 | mmengine.fileio.backends.local_backend | isfile | Check whether a file path is a file.
Args:
filepath (str or Path): Path to be checked whether it is a file.
Returns:
bool: Return ``True`` if ``filepath`` points to a file, ``False``
otherwise.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.isfile(filepath)
True
| def isfile(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path is a file.
Args:
filepath (str or Path): Path to be checked whether it is a file.
Returns:
bool: Return ``True`` if ``filepath`` points to a file, ``False``
otherwise.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.isfile(filepath)
True
"""
return osp.isfile(filepath)
| (self, filepath: Union[str, pathlib.Path]) -> bool |
730,109 | mmengine.fileio.backends.local_backend | join_path | Concatenate all file paths.
Join one or more filepath components intelligently. The return value
is the concatenation of filepath and any members of \*filepaths.
Args:
filepath (str or Path): Path to be concatenated.
Returns:
str: The result of concatenation.
Examples:
>>> backend = LocalBackend()
>>> filepath1 = '/path/of/dir1'
>>> filepath2 = 'dir2'
>>> filepath3 = 'path/of/file'
>>> backend.join_path(filepath1, filepath2, filepath3)
'/path/of/dir/dir2/path/of/file'
| def join_path(self, filepath: Union[str, Path],
*filepaths: Union[str, Path]) -> str:
r"""Concatenate all file paths.
Join one or more filepath components intelligently. The return value
is the concatenation of filepath and any members of \*filepaths.
Args:
filepath (str or Path): Path to be concatenated.
Returns:
str: The result of concatenation.
Examples:
>>> backend = LocalBackend()
>>> filepath1 = '/path/of/dir1'
>>> filepath2 = 'dir2'
>>> filepath3 = 'path/of/file'
>>> backend.join_path(filepath1, filepath2, filepath3)
'/path/of/dir/dir2/path/of/file'
"""
# TODO, if filepath or filepaths are Path, should return Path
return osp.join(filepath, *filepaths)
| (self, filepath: Union[str, pathlib.Path], *filepaths: Union[str, pathlib.Path]) -> str |
730,110 | mmengine.fileio.backends.local_backend | list_dir_or_file | Scan a directory to find the interested directories or files in
arbitrary order.
Note:
:meth:`list_dir_or_file` returns the path relative to ``dir_path``.
Args:
dir_path (str or Path): Path of the directory.
list_dir (bool): List the directories. Defaults to True.
list_file (bool): List the path of files. Defaults to True.
suffix (str or tuple[str], optional): File suffix that we are
interested in. Defaults to None.
recursive (bool): If set to True, recursively scan the directory.
Defaults to False.
Yields:
Iterable[str]: A relative path to ``dir_path``.
Examples:
>>> backend = LocalBackend()
>>> dir_path = '/path/of/dir'
>>> # list those files and directories in current directory
>>> for file_path in backend.list_dir_or_file(dir_path):
... print(file_path)
>>> # only list files
>>> for file_path in backend.list_dir_or_file(dir_path, list_dir=False):
... print(file_path)
>>> # only list directories
>>> for file_path in backend.list_dir_or_file(dir_path, list_file=False):
... print(file_path)
>>> # only list files ending with specified suffixes
>>> for file_path in backend.list_dir_or_file(dir_path, suffix='.txt'):
... print(file_path)
>>> # list all files and directory recursively
>>> for file_path in backend.list_dir_or_file(dir_path, recursive=True):
... print(file_path)
| def list_dir_or_file(self,
dir_path: Union[str, Path],
list_dir: bool = True,
list_file: bool = True,
suffix: Optional[Union[str, Tuple[str]]] = None,
recursive: bool = False) -> Iterator[str]:
"""Scan a directory to find the interested directories or files in
arbitrary order.
Note:
:meth:`list_dir_or_file` returns the path relative to ``dir_path``.
Args:
dir_path (str or Path): Path of the directory.
list_dir (bool): List the directories. Defaults to True.
list_file (bool): List the path of files. Defaults to True.
suffix (str or tuple[str], optional): File suffix that we are
interested in. Defaults to None.
recursive (bool): If set to True, recursively scan the directory.
Defaults to False.
Yields:
Iterable[str]: A relative path to ``dir_path``.
Examples:
>>> backend = LocalBackend()
>>> dir_path = '/path/of/dir'
>>> # list those files and directories in current directory
>>> for file_path in backend.list_dir_or_file(dir_path):
... print(file_path)
>>> # only list files
>>> for file_path in backend.list_dir_or_file(dir_path, list_dir=False):
... print(file_path)
>>> # only list directories
>>> for file_path in backend.list_dir_or_file(dir_path, list_file=False):
... print(file_path)
>>> # only list files ending with specified suffixes
>>> for file_path in backend.list_dir_or_file(dir_path, suffix='.txt'):
... print(file_path)
>>> # list all files and directory recursively
>>> for file_path in backend.list_dir_or_file(dir_path, recursive=True):
... print(file_path)
""" # noqa: E501
if list_dir and suffix is not None:
raise TypeError('`suffix` should be None when `list_dir` is True')
if (suffix is not None) and not isinstance(suffix, (str, tuple)):
raise TypeError('`suffix` must be a string or tuple of strings')
root = dir_path
def _list_dir_or_file(dir_path, list_dir, list_file, suffix,
recursive):
for entry in os.scandir(dir_path):
if not entry.name.startswith('.') and entry.is_file():
rel_path = osp.relpath(entry.path, root)
if (suffix is None
or rel_path.endswith(suffix)) and list_file:
yield rel_path
elif osp.isdir(entry.path):
if list_dir:
rel_dir = osp.relpath(entry.path, root)
yield rel_dir
if recursive:
yield from _list_dir_or_file(entry.path, list_dir,
list_file, suffix,
recursive)
return _list_dir_or_file(dir_path, list_dir, list_file, suffix,
recursive)
| (self, dir_path: Union[str, pathlib.Path], list_dir: bool = True, list_file: bool = True, suffix: Union[str, Tuple[str], NoneType] = None, recursive: bool = False) -> Iterator[str] |
730,111 | mmengine.fileio.backends.local_backend | put | Write bytes to a given ``filepath`` with 'wb' mode.
Note:
``put`` will create a directory if the directory of
``filepath`` does not exist.
Args:
obj (bytes): Data to be written.
filepath (str or Path): Path to write data.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.put(b'hello world', filepath)
| def put(self, obj: bytes, filepath: Union[str, Path]) -> None:
"""Write bytes to a given ``filepath`` with 'wb' mode.
Note:
``put`` will create a directory if the directory of
``filepath`` does not exist.
Args:
obj (bytes): Data to be written.
filepath (str or Path): Path to write data.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.put(b'hello world', filepath)
"""
mmengine.mkdir_or_exist(osp.dirname(filepath))
with open(filepath, 'wb') as f:
f.write(obj)
| (self, obj: bytes, filepath: Union[str, pathlib.Path]) -> NoneType |
730,112 | mmengine.fileio.backends.local_backend | put_text | Write text to a given ``filepath`` with 'w' mode.
Note:
``put_text`` will create a directory if the directory of
``filepath`` does not exist.
Args:
obj (str): Data to be written.
filepath (str or Path): Path to write data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.put_text('hello world', filepath)
| def put_text(self,
obj: str,
filepath: Union[str, Path],
encoding: str = 'utf-8') -> None:
"""Write text to a given ``filepath`` with 'w' mode.
Note:
``put_text`` will create a directory if the directory of
``filepath`` does not exist.
Args:
obj (str): Data to be written.
filepath (str or Path): Path to write data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.put_text('hello world', filepath)
"""
mmengine.mkdir_or_exist(osp.dirname(filepath))
with open(filepath, 'w', encoding=encoding) as f:
f.write(obj)
| (self, obj: str, filepath: Union[str, pathlib.Path], encoding: str = 'utf-8') -> NoneType |
730,113 | mmengine.fileio.backends.local_backend | remove | Remove a file.
Args:
filepath (str or Path): Path to be removed.
Raises:
IsADirectoryError: If filepath is a directory, an IsADirectoryError
will be raised.
FileNotFoundError: If filepath does not exist, an FileNotFoundError
will be raised.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.remove(filepath)
| def remove(self, filepath: Union[str, Path]) -> None:
"""Remove a file.
Args:
filepath (str or Path): Path to be removed.
Raises:
IsADirectoryError: If filepath is a directory, an IsADirectoryError
will be raised.
FileNotFoundError: If filepath does not exist, an FileNotFoundError
will be raised.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.remove(filepath)
"""
if not self.exists(filepath):
raise FileNotFoundError(f'filepath {filepath} does not exist')
if self.isdir(filepath):
raise IsADirectoryError('filepath should be a file')
os.remove(filepath)
| (self, filepath: Union[str, pathlib.Path]) -> NoneType |
730,114 | mmengine.fileio.backends.local_backend | rmtree | Recursively delete a directory tree.
Args:
dir_path (str or Path): A directory to be removed.
Examples:
>>> dir_path = '/path/of/dir'
>>> backend.rmtree(dir_path)
| def rmtree(self, dir_path: Union[str, Path]) -> None:
"""Recursively delete a directory tree.
Args:
dir_path (str or Path): A directory to be removed.
Examples:
>>> dir_path = '/path/of/dir'
>>> backend.rmtree(dir_path)
"""
shutil.rmtree(dir_path)
| (self, dir_path: Union[str, pathlib.Path]) -> NoneType |
730,115 | mmengine.logging.history_buffer | HistoryBuffer | Unified storage format for different log types.
``HistoryBuffer`` records the history of log for further statistics.
Examples:
>>> history_buffer = HistoryBuffer()
>>> # Update history_buffer.
>>> history_buffer.update(1)
>>> history_buffer.update(2)
>>> history_buffer.min() # minimum of (1, 2)
1
>>> history_buffer.max() # maximum of (1, 2)
2
>>> history_buffer.mean() # mean of (1, 2)
1.5
>>> history_buffer.statistics('mean') # access method by string.
1.5
Args:
log_history (Sequence): History logs. Defaults to [].
count_history (Sequence): Counts of history logs. Defaults to [].
max_length (int): The max length of history logs. Defaults to 1000000.
| class HistoryBuffer:
"""Unified storage format for different log types.
``HistoryBuffer`` records the history of log for further statistics.
Examples:
>>> history_buffer = HistoryBuffer()
>>> # Update history_buffer.
>>> history_buffer.update(1)
>>> history_buffer.update(2)
>>> history_buffer.min() # minimum of (1, 2)
1
>>> history_buffer.max() # maximum of (1, 2)
2
>>> history_buffer.mean() # mean of (1, 2)
1.5
>>> history_buffer.statistics('mean') # access method by string.
1.5
Args:
log_history (Sequence): History logs. Defaults to [].
count_history (Sequence): Counts of history logs. Defaults to [].
max_length (int): The max length of history logs. Defaults to 1000000.
"""
_statistics_methods: dict = dict()
def __init__(self,
log_history: Sequence = [],
count_history: Sequence = [],
max_length: int = 1000000):
self.max_length = max_length
self._set_default_statistics()
assert len(log_history) == len(count_history), \
'The lengths of log_history and count_histroy should be equal'
if len(log_history) > max_length:
warnings.warn(f'The length of history buffer({len(log_history)}) '
f'exceeds the max_length({max_length}), the first '
'few elements will be ignored.')
self._log_history = np.array(log_history[-max_length:])
self._count_history = np.array(count_history[-max_length:])
else:
self._log_history = np.array(log_history)
self._count_history = np.array(count_history)
def _set_default_statistics(self) -> None:
"""Register default statistic methods: min, max, current and mean."""
self._statistics_methods.setdefault('min', HistoryBuffer.min)
self._statistics_methods.setdefault('max', HistoryBuffer.max)
self._statistics_methods.setdefault('current', HistoryBuffer.current)
self._statistics_methods.setdefault('mean', HistoryBuffer.mean)
def update(self, log_val: Union[int, float], count: int = 1) -> None:
"""update the log history.
If the length of the buffer exceeds ``self._max_length``, the oldest
element will be removed from the buffer.
Args:
log_val (int or float): The value of log.
count (int): The accumulation times of log, defaults to 1.
``count`` will be used in smooth statistics.
"""
if (not isinstance(log_val, (int, float))
or not isinstance(count, (int, float))):
raise TypeError(f'log_val must be int or float but got '
f'{type(log_val)}, count must be int but got '
f'{type(count)}')
self._log_history = np.append(self._log_history, log_val)
self._count_history = np.append(self._count_history, count)
if len(self._log_history) > self.max_length:
self._log_history = self._log_history[-self.max_length:]
self._count_history = self._count_history[-self.max_length:]
@property
def data(self) -> Tuple[np.ndarray, np.ndarray]:
"""Get the ``_log_history`` and ``_count_history``.
Returns:
Tuple[np.ndarray, np.ndarray]: History logs and the counts of
the history logs.
"""
return self._log_history, self._count_history
@classmethod
def register_statistics(cls, method: Callable) -> Callable:
"""Register custom statistics method to ``_statistics_methods``.
The registered method can be called by ``history_buffer.statistics``
with corresponding method name and arguments.
Examples:
>>> @HistoryBuffer.register_statistics
>>> def weighted_mean(self, window_size, weight):
>>> assert len(weight) == window_size
>>> return (self._log_history[-window_size:] *
>>> np.array(weight)).sum() / \
>>> self._count_history[-window_size:]
>>> log_buffer = HistoryBuffer([1, 2], [1, 1])
>>> log_buffer.statistics('weighted_mean', 2, [2, 1])
2
Args:
method (Callable): Custom statistics method.
Returns:
Callable: Original custom statistics method.
"""
method_name = method.__name__
assert method_name not in cls._statistics_methods, \
'method_name cannot be registered twice!'
cls._statistics_methods[method_name] = method
return method
def statistics(self, method_name: str, *arg, **kwargs) -> Any:
"""Access statistics method by name.
Args:
method_name (str): Name of method.
Returns:
Any: Depends on corresponding method.
"""
if method_name not in self._statistics_methods:
raise KeyError(f'{method_name} has not been registered in '
'HistoryBuffer._statistics_methods')
method = self._statistics_methods[method_name]
# Provide self arguments for registered functions.
return method(self, *arg, **kwargs)
def mean(self, window_size: Optional[int] = None) -> np.ndarray:
"""Return the mean of the latest ``window_size`` values in log
histories.
If ``window_size is None`` or ``window_size > len(self._log_history)``,
return the global mean value of history logs.
Args:
window_size (int, optional): Size of statistics window.
Returns:
np.ndarray: Mean value within the window.
"""
if window_size is not None:
assert isinstance(window_size, int), \
'The type of window size should be int, but got ' \
f'{type(window_size)}'
else:
window_size = len(self._log_history)
logs_sum = self._log_history[-window_size:].sum()
counts_sum = self._count_history[-window_size:].sum()
return logs_sum / counts_sum
def max(self, window_size: Optional[int] = None) -> np.ndarray:
"""Return the maximum value of the latest ``window_size`` values in log
histories.
If ``window_size is None`` or ``window_size > len(self._log_history)``,
return the global maximum value of history logs.
Args:
window_size (int, optional): Size of statistics window.
Returns:
np.ndarray: The maximum value within the window.
"""
if window_size is not None:
assert isinstance(window_size, int), \
'The type of window size should be int, but got ' \
f'{type(window_size)}'
else:
window_size = len(self._log_history)
return self._log_history[-window_size:].max()
def min(self, window_size: Optional[int] = None) -> np.ndarray:
"""Return the minimum value of the latest ``window_size`` values in log
histories.
If ``window_size is None`` or ``window_size > len(self._log_history)``,
return the global minimum value of history logs.
Args:
window_size (int, optional): Size of statistics window.
Returns:
np.ndarray: The minimum value within the window.
"""
if window_size is not None:
assert isinstance(window_size, int), \
'The type of window size should be int, but got ' \
f'{type(window_size)}'
else:
window_size = len(self._log_history)
return self._log_history[-window_size:].min()
def current(self) -> np.ndarray:
"""Return the recently updated values in log histories.
Returns:
np.ndarray: Recently updated values in log histories.
"""
if len(self._log_history) == 0:
raise ValueError('HistoryBuffer._log_history is an empty array! '
'please call update first')
return self._log_history[-1]
def __getstate__(self) -> dict:
"""Make ``_statistics_methods`` can be resumed.
Returns:
dict: State dict including statistics_methods.
"""
self.__dict__.update(statistics_methods=self._statistics_methods)
return self.__dict__
def __setstate__(self, state):
"""Try to load ``_statistics_methods`` from state.
Args:
state (dict): State dict.
"""
statistics_methods = state.pop('statistics_methods', {})
self._set_default_statistics()
self._statistics_methods.update(statistics_methods)
self.__dict__.update(state)
| (log_history: Sequence = [], count_history: Sequence = [], max_length: int = 1000000) |
730,116 | mmengine.logging.history_buffer | __getstate__ | Make ``_statistics_methods`` can be resumed.
Returns:
dict: State dict including statistics_methods.
| def __getstate__(self) -> dict:
"""Make ``_statistics_methods`` can be resumed.
Returns:
dict: State dict including statistics_methods.
"""
self.__dict__.update(statistics_methods=self._statistics_methods)
return self.__dict__
| (self) -> dict |
730,117 | mmengine.logging.history_buffer | __init__ | null | def __init__(self,
log_history: Sequence = [],
count_history: Sequence = [],
max_length: int = 1000000):
self.max_length = max_length
self._set_default_statistics()
assert len(log_history) == len(count_history), \
'The lengths of log_history and count_histroy should be equal'
if len(log_history) > max_length:
warnings.warn(f'The length of history buffer({len(log_history)}) '
f'exceeds the max_length({max_length}), the first '
'few elements will be ignored.')
self._log_history = np.array(log_history[-max_length:])
self._count_history = np.array(count_history[-max_length:])
else:
self._log_history = np.array(log_history)
self._count_history = np.array(count_history)
| (self, log_history: Sequence = [], count_history: Sequence = [], max_length: int = 1000000) |
730,118 | mmengine.logging.history_buffer | __setstate__ | Try to load ``_statistics_methods`` from state.
Args:
state (dict): State dict.
| def __setstate__(self, state):
"""Try to load ``_statistics_methods`` from state.
Args:
state (dict): State dict.
"""
statistics_methods = state.pop('statistics_methods', {})
self._set_default_statistics()
self._statistics_methods.update(statistics_methods)
self.__dict__.update(state)
| (self, state) |
730,119 | mmengine.logging.history_buffer | _set_default_statistics | Register default statistic methods: min, max, current and mean. | def _set_default_statistics(self) -> None:
"""Register default statistic methods: min, max, current and mean."""
self._statistics_methods.setdefault('min', HistoryBuffer.min)
self._statistics_methods.setdefault('max', HistoryBuffer.max)
self._statistics_methods.setdefault('current', HistoryBuffer.current)
self._statistics_methods.setdefault('mean', HistoryBuffer.mean)
| (self) -> NoneType |
730,120 | mmengine.logging.history_buffer | current | Return the recently updated values in log histories.
Returns:
np.ndarray: Recently updated values in log histories.
| def current(self) -> np.ndarray:
"""Return the recently updated values in log histories.
Returns:
np.ndarray: Recently updated values in log histories.
"""
if len(self._log_history) == 0:
raise ValueError('HistoryBuffer._log_history is an empty array! '
'please call update first')
return self._log_history[-1]
| (self) -> numpy.ndarray |
730,121 | mmengine.logging.history_buffer | max | Return the maximum value of the latest ``window_size`` values in log
histories.
If ``window_size is None`` or ``window_size > len(self._log_history)``,
return the global maximum value of history logs.
Args:
window_size (int, optional): Size of statistics window.
Returns:
np.ndarray: The maximum value within the window.
| def max(self, window_size: Optional[int] = None) -> np.ndarray:
"""Return the maximum value of the latest ``window_size`` values in log
histories.
If ``window_size is None`` or ``window_size > len(self._log_history)``,
return the global maximum value of history logs.
Args:
window_size (int, optional): Size of statistics window.
Returns:
np.ndarray: The maximum value within the window.
"""
if window_size is not None:
assert isinstance(window_size, int), \
'The type of window size should be int, but got ' \
f'{type(window_size)}'
else:
window_size = len(self._log_history)
return self._log_history[-window_size:].max()
| (self, window_size: Optional[int] = None) -> numpy.ndarray |
730,122 | mmengine.logging.history_buffer | mean | Return the mean of the latest ``window_size`` values in log
histories.
If ``window_size is None`` or ``window_size > len(self._log_history)``,
return the global mean value of history logs.
Args:
window_size (int, optional): Size of statistics window.
Returns:
np.ndarray: Mean value within the window.
| def mean(self, window_size: Optional[int] = None) -> np.ndarray:
"""Return the mean of the latest ``window_size`` values in log
histories.
If ``window_size is None`` or ``window_size > len(self._log_history)``,
return the global mean value of history logs.
Args:
window_size (int, optional): Size of statistics window.
Returns:
np.ndarray: Mean value within the window.
"""
if window_size is not None:
assert isinstance(window_size, int), \
'The type of window size should be int, but got ' \
f'{type(window_size)}'
else:
window_size = len(self._log_history)
logs_sum = self._log_history[-window_size:].sum()
counts_sum = self._count_history[-window_size:].sum()
return logs_sum / counts_sum
| (self, window_size: Optional[int] = None) -> numpy.ndarray |
730,123 | mmengine.logging.history_buffer | min | Return the minimum value of the latest ``window_size`` values in log
histories.
If ``window_size is None`` or ``window_size > len(self._log_history)``,
return the global minimum value of history logs.
Args:
window_size (int, optional): Size of statistics window.
Returns:
np.ndarray: The minimum value within the window.
| def min(self, window_size: Optional[int] = None) -> np.ndarray:
"""Return the minimum value of the latest ``window_size`` values in log
histories.
If ``window_size is None`` or ``window_size > len(self._log_history)``,
return the global minimum value of history logs.
Args:
window_size (int, optional): Size of statistics window.
Returns:
np.ndarray: The minimum value within the window.
"""
if window_size is not None:
assert isinstance(window_size, int), \
'The type of window size should be int, but got ' \
f'{type(window_size)}'
else:
window_size = len(self._log_history)
return self._log_history[-window_size:].min()
| (self, window_size: Optional[int] = None) -> numpy.ndarray |
730,124 | mmengine.logging.history_buffer | statistics | Access statistics method by name.
Args:
method_name (str): Name of method.
Returns:
Any: Depends on corresponding method.
| def statistics(self, method_name: str, *arg, **kwargs) -> Any:
"""Access statistics method by name.
Args:
method_name (str): Name of method.
Returns:
Any: Depends on corresponding method.
"""
if method_name not in self._statistics_methods:
raise KeyError(f'{method_name} has not been registered in '
'HistoryBuffer._statistics_methods')
method = self._statistics_methods[method_name]
# Provide self arguments for registered functions.
return method(self, *arg, **kwargs)
| (self, method_name: str, *arg, **kwargs) -> Any |
730,125 | mmengine.logging.history_buffer | update | update the log history.
If the length of the buffer exceeds ``self._max_length``, the oldest
element will be removed from the buffer.
Args:
log_val (int or float): The value of log.
count (int): The accumulation times of log, defaults to 1.
``count`` will be used in smooth statistics.
| def update(self, log_val: Union[int, float], count: int = 1) -> None:
"""update the log history.
If the length of the buffer exceeds ``self._max_length``, the oldest
element will be removed from the buffer.
Args:
log_val (int or float): The value of log.
count (int): The accumulation times of log, defaults to 1.
``count`` will be used in smooth statistics.
"""
if (not isinstance(log_val, (int, float))
or not isinstance(count, (int, float))):
raise TypeError(f'log_val must be int or float but got '
f'{type(log_val)}, count must be int but got '
f'{type(count)}')
self._log_history = np.append(self._log_history, log_val)
self._count_history = np.append(self._count_history, count)
if len(self._log_history) > self.max_length:
self._log_history = self._log_history[-self.max_length:]
self._count_history = self._count_history[-self.max_length:]
| (self, log_val: Union[int, float], count: int = 1) -> NoneType |
730,126 | mmengine.fileio.handlers.json_handler | JsonHandler | null | class JsonHandler(BaseFileHandler):
def load_from_fileobj(self, file):
return json.load(file)
def dump_to_fileobj(self, obj, file, **kwargs):
kwargs.setdefault('default', set_default)
json.dump(obj, file, **kwargs)
def dump_to_str(self, obj, **kwargs):
kwargs.setdefault('default', set_default)
return json.dumps(obj, **kwargs)
| () |
730,127 | mmengine.fileio.handlers.json_handler | dump_to_fileobj | null | def dump_to_fileobj(self, obj, file, **kwargs):
kwargs.setdefault('default', set_default)
json.dump(obj, file, **kwargs)
| (self, obj, file, **kwargs) |
730,129 | mmengine.fileio.handlers.json_handler | dump_to_str | null | def dump_to_str(self, obj, **kwargs):
kwargs.setdefault('default', set_default)
return json.dumps(obj, **kwargs)
| (self, obj, **kwargs) |
730,130 | mmengine.fileio.handlers.json_handler | load_from_fileobj | null | def load_from_fileobj(self, file):
return json.load(file)
| (self, file) |
730,132 | mmengine.fileio.backends.lmdb_backend | LmdbBackend | Lmdb storage backend.
Args:
db_path (str): Lmdb database path.
readonly (bool): Lmdb environment parameter. If True, disallow any
write operations. Defaults to True.
lock (bool): Lmdb environment parameter. If False, when concurrent
access occurs, do not lock the database. Defaults to False.
readahead (bool): Lmdb environment parameter. If False, disable the OS
filesystem readahead mechanism, which may improve random read
performance when a database is larger than RAM. Defaults to False.
**kwargs: Keyword arguments passed to `lmdb.open`.
Attributes:
db_path (str): Lmdb database path.
| class LmdbBackend(BaseStorageBackend):
"""Lmdb storage backend.
Args:
db_path (str): Lmdb database path.
readonly (bool): Lmdb environment parameter. If True, disallow any
write operations. Defaults to True.
lock (bool): Lmdb environment parameter. If False, when concurrent
access occurs, do not lock the database. Defaults to False.
readahead (bool): Lmdb environment parameter. If False, disable the OS
filesystem readahead mechanism, which may improve random read
performance when a database is larger than RAM. Defaults to False.
**kwargs: Keyword arguments passed to `lmdb.open`.
Attributes:
db_path (str): Lmdb database path.
"""
def __init__(self,
db_path,
readonly=True,
lock=False,
readahead=False,
**kwargs):
try:
import lmdb # noqa: F401
except ImportError:
raise ImportError(
'Please run "pip install lmdb" to enable LmdbBackend.')
self.db_path = str(db_path)
self.readonly = readonly
self.lock = lock
self.readahead = readahead
self.kwargs = kwargs
self._client = None
def get(self, filepath: Union[str, Path]) -> bytes:
"""Get values according to the filepath.
Args:
filepath (str or Path): Here, filepath is the lmdb key.
Returns:
bytes: Expected bytes object.
Examples:
>>> backend = LmdbBackend('path/to/lmdb')
>>> backend.get('key')
b'hello world'
"""
if self._client is None:
self._client = self._get_client()
filepath = str(filepath)
with self._client.begin(write=False) as txn:
value_buf = txn.get(filepath.encode('ascii'))
return value_buf
def get_text(self, filepath, encoding=None):
raise NotImplementedError
def _get_client(self):
import lmdb
return lmdb.open(
self.db_path,
readonly=self.readonly,
lock=self.lock,
readahead=self.readahead,
**self.kwargs)
def __del__(self):
if self._client is not None:
self._client.close()
| (db_path, readonly=True, lock=False, readahead=False, **kwargs) |
730,133 | mmengine.fileio.backends.lmdb_backend | __del__ | null | def __del__(self):
if self._client is not None:
self._client.close()
| (self) |
730,134 | mmengine.fileio.backends.lmdb_backend | __init__ | null | def __init__(self,
db_path,
readonly=True,
lock=False,
readahead=False,
**kwargs):
try:
import lmdb # noqa: F401
except ImportError:
raise ImportError(
'Please run "pip install lmdb" to enable LmdbBackend.')
self.db_path = str(db_path)
self.readonly = readonly
self.lock = lock
self.readahead = readahead
self.kwargs = kwargs
self._client = None
| (self, db_path, readonly=True, lock=False, readahead=False, **kwargs) |
730,135 | mmengine.fileio.backends.lmdb_backend | _get_client | null | def _get_client(self):
import lmdb
return lmdb.open(
self.db_path,
readonly=self.readonly,
lock=self.lock,
readahead=self.readahead,
**self.kwargs)
| (self) |
730,136 | mmengine.fileio.backends.lmdb_backend | get | Get values according to the filepath.
Args:
filepath (str or Path): Here, filepath is the lmdb key.
Returns:
bytes: Expected bytes object.
Examples:
>>> backend = LmdbBackend('path/to/lmdb')
>>> backend.get('key')
b'hello world'
| def get(self, filepath: Union[str, Path]) -> bytes:
"""Get values according to the filepath.
Args:
filepath (str or Path): Here, filepath is the lmdb key.
Returns:
bytes: Expected bytes object.
Examples:
>>> backend = LmdbBackend('path/to/lmdb')
>>> backend.get('key')
b'hello world'
"""
if self._client is None:
self._client = self._get_client()
filepath = str(filepath)
with self._client.begin(write=False) as txn:
value_buf = txn.get(filepath.encode('ascii'))
return value_buf
| (self, filepath: Union[str, pathlib.Path]) -> bytes |
730,137 | mmengine.fileio.backends.lmdb_backend | get_text | null | def get_text(self, filepath, encoding=None):
raise NotImplementedError
| (self, filepath, encoding=None) |
730,138 | mmengine.fileio.backends.local_backend | LocalBackend | Raw local storage backend. | class LocalBackend(BaseStorageBackend):
"""Raw local storage backend."""
_allow_symlink = True
def get(self, filepath: Union[str, Path]) -> bytes:
"""Read bytes from a given ``filepath`` with 'rb' mode.
Args:
filepath (str or Path): Path to read data.
Returns:
bytes: Expected bytes object.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.get(filepath)
b'hello world'
"""
with open(filepath, 'rb') as f:
value = f.read()
return value
def get_text(self,
filepath: Union[str, Path],
encoding: str = 'utf-8') -> str:
"""Read text from a given ``filepath`` with 'r' mode.
Args:
filepath (str or Path): Path to read data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Returns:
str: Expected text reading from ``filepath``.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.get_text(filepath)
'hello world'
"""
with open(filepath, encoding=encoding) as f:
text = f.read()
return text
def put(self, obj: bytes, filepath: Union[str, Path]) -> None:
"""Write bytes to a given ``filepath`` with 'wb' mode.
Note:
``put`` will create a directory if the directory of
``filepath`` does not exist.
Args:
obj (bytes): Data to be written.
filepath (str or Path): Path to write data.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.put(b'hello world', filepath)
"""
mmengine.mkdir_or_exist(osp.dirname(filepath))
with open(filepath, 'wb') as f:
f.write(obj)
def put_text(self,
obj: str,
filepath: Union[str, Path],
encoding: str = 'utf-8') -> None:
"""Write text to a given ``filepath`` with 'w' mode.
Note:
``put_text`` will create a directory if the directory of
``filepath`` does not exist.
Args:
obj (str): Data to be written.
filepath (str or Path): Path to write data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.put_text('hello world', filepath)
"""
mmengine.mkdir_or_exist(osp.dirname(filepath))
with open(filepath, 'w', encoding=encoding) as f:
f.write(obj)
def exists(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path exists.
Args:
filepath (str or Path): Path to be checked whether exists.
Returns:
bool: Return ``True`` if ``filepath`` exists, ``False`` otherwise.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.exists(filepath)
True
"""
return osp.exists(filepath)
def isdir(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path is a directory.
Args:
filepath (str or Path): Path to be checked whether it is a
directory.
Returns:
bool: Return ``True`` if ``filepath`` points to a directory,
``False`` otherwise.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/dir'
>>> backend.isdir(filepath)
True
"""
return osp.isdir(filepath)
def isfile(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path is a file.
Args:
filepath (str or Path): Path to be checked whether it is a file.
Returns:
bool: Return ``True`` if ``filepath`` points to a file, ``False``
otherwise.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.isfile(filepath)
True
"""
return osp.isfile(filepath)
def join_path(self, filepath: Union[str, Path],
*filepaths: Union[str, Path]) -> str:
r"""Concatenate all file paths.
Join one or more filepath components intelligently. The return value
is the concatenation of filepath and any members of \*filepaths.
Args:
filepath (str or Path): Path to be concatenated.
Returns:
str: The result of concatenation.
Examples:
>>> backend = LocalBackend()
>>> filepath1 = '/path/of/dir1'
>>> filepath2 = 'dir2'
>>> filepath3 = 'path/of/file'
>>> backend.join_path(filepath1, filepath2, filepath3)
'/path/of/dir/dir2/path/of/file'
"""
# TODO, if filepath or filepaths are Path, should return Path
return osp.join(filepath, *filepaths)
@contextmanager
def get_local_path(
self,
filepath: Union[str, Path],
) -> Generator[Union[str, Path], None, None]:
"""Only for unified API and do nothing.
Args:
filepath (str or Path): Path to be read data.
backend_args (dict, optional): Arguments to instantiate the
corresponding backend. Defaults to None.
Examples:
>>> backend = LocalBackend()
>>> with backend.get_local_path('s3://bucket/abc.jpg') as path:
... # do something here
"""
yield filepath
def copyfile(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Copy a file src to dst and return the destination file.
src and dst should have the same prefix. If dst specifies a directory,
the file will be copied into dst using the base filename from src. If
dst specifies a file that already exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to dst.
Returns:
str: The destination file.
Raises:
SameFileError: If src and dst are the same file, a SameFileError
will be raised.
Examples:
>>> backend = LocalBackend()
>>> # dst is a file
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> # src will be copied to '/path1/of/file1'
>>> backend.copyfile(src, dst)
'/path1/of/file1'
>>> # dst is a directory
>>> dst = '/path1/of/dir'
>>> # src will be copied to '/path1/of/dir/file'
>>> backend.copyfile(src, dst)
'/path1/of/dir/file'
"""
return shutil.copy(src, dst)
def copytree(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory.
src and dst should have the same prefix and dst must not already exist.
TODO: Whether to support dirs_exist_ok parameter.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to dst.
Returns:
str: The destination directory.
Raises:
FileExistsError: If dst had already existed, a FileExistsError will
be raised.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/dir1'
>>> dst = '/path/of/dir2'
>>> backend.copytree(src, dst)
'/path/of/dir2'
"""
return shutil.copytree(src, dst)
def copyfile_from_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Copy a local file src to dst and return the destination file. Same
as :meth:`copyfile`.
Args:
src (str or Path): A local file to be copied.
dst (str or Path): Copy file to dst.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Raises:
SameFileError: If src and dst are the same file, a SameFileError
will be raised.
Examples:
>>> backend = LocalBackend()
>>> # dst is a file
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> # src will be copied to '/path1/of/file1'
>>> backend.copyfile_from_local(src, dst)
'/path1/of/file1'
>>> # dst is a directory
>>> dst = '/path1/of/dir'
>>> # src will be copied to
>>> backend.copyfile_from_local(src, dst)
'/path1/of/dir/file'
"""
return self.copyfile(src, dst)
def copytree_from_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory. Same as
:meth:`copytree`.
Args:
src (str or Path): A local directory to be copied.
dst (str or Path): Copy directory to dst.
Returns:
str: The destination directory.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/dir1'
>>> dst = '/path/of/dir2'
>>> backend.copytree_from_local(src, dst)
'/path/of/dir2'
"""
return self.copytree(src, dst)
def copyfile_to_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Copy the file src to local dst and return the destination file. Same
as :meth:`copyfile`.
If dst specifies a directory, the file will be copied into dst using
the base filename from src. If dst specifies a file that already
exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to to local dst.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Examples:
>>> backend = LocalBackend()
>>> # dst is a file
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> # src will be copied to '/path1/of/file1'
>>> backend.copyfile_to_local(src, dst)
'/path1/of/file1'
>>> # dst is a directory
>>> dst = '/path1/of/dir'
>>> # src will be copied to
>>> backend.copyfile_to_local(src, dst)
'/path1/of/dir/file'
"""
return self.copyfile(src, dst)
def copytree_to_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Recursively copy an entire directory tree rooted at src to a local
directory named dst and return the destination directory.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to local dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: The destination directory.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/dir1'
>>> dst = '/path/of/dir2'
>>> backend.copytree_from_local(src, dst)
'/path/of/dir2'
"""
return self.copytree(src, dst)
def remove(self, filepath: Union[str, Path]) -> None:
"""Remove a file.
Args:
filepath (str or Path): Path to be removed.
Raises:
IsADirectoryError: If filepath is a directory, an IsADirectoryError
will be raised.
FileNotFoundError: If filepath does not exist, an FileNotFoundError
will be raised.
Examples:
>>> backend = LocalBackend()
>>> filepath = '/path/of/file'
>>> backend.remove(filepath)
"""
if not self.exists(filepath):
raise FileNotFoundError(f'filepath {filepath} does not exist')
if self.isdir(filepath):
raise IsADirectoryError('filepath should be a file')
os.remove(filepath)
def rmtree(self, dir_path: Union[str, Path]) -> None:
"""Recursively delete a directory tree.
Args:
dir_path (str or Path): A directory to be removed.
Examples:
>>> dir_path = '/path/of/dir'
>>> backend.rmtree(dir_path)
"""
shutil.rmtree(dir_path)
def copy_if_symlink_fails(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> bool:
"""Create a symbolic link pointing to src named dst.
If failed to create a symbolic link pointing to src, directly copy src
to dst instead.
Args:
src (str or Path): Create a symbolic link pointing to src.
dst (str or Path): Create a symbolic link named dst.
Returns:
bool: Return True if successfully create a symbolic link pointing
to src. Otherwise, return False.
Examples:
>>> backend = LocalBackend()
>>> src = '/path/of/file'
>>> dst = '/path1/of/file1'
>>> backend.copy_if_symlink_fails(src, dst)
True
>>> src = '/path/of/dir'
>>> dst = '/path1/of/dir1'
>>> backend.copy_if_symlink_fails(src, dst)
True
"""
try:
os.symlink(src, dst)
return True
except Exception:
if self.isfile(src):
self.copyfile(src, dst)
else:
self.copytree(src, dst)
return False
def list_dir_or_file(self,
dir_path: Union[str, Path],
list_dir: bool = True,
list_file: bool = True,
suffix: Optional[Union[str, Tuple[str]]] = None,
recursive: bool = False) -> Iterator[str]:
"""Scan a directory to find the interested directories or files in
arbitrary order.
Note:
:meth:`list_dir_or_file` returns the path relative to ``dir_path``.
Args:
dir_path (str or Path): Path of the directory.
list_dir (bool): List the directories. Defaults to True.
list_file (bool): List the path of files. Defaults to True.
suffix (str or tuple[str], optional): File suffix that we are
interested in. Defaults to None.
recursive (bool): If set to True, recursively scan the directory.
Defaults to False.
Yields:
Iterable[str]: A relative path to ``dir_path``.
Examples:
>>> backend = LocalBackend()
>>> dir_path = '/path/of/dir'
>>> # list those files and directories in current directory
>>> for file_path in backend.list_dir_or_file(dir_path):
... print(file_path)
>>> # only list files
>>> for file_path in backend.list_dir_or_file(dir_path, list_dir=False):
... print(file_path)
>>> # only list directories
>>> for file_path in backend.list_dir_or_file(dir_path, list_file=False):
... print(file_path)
>>> # only list files ending with specified suffixes
>>> for file_path in backend.list_dir_or_file(dir_path, suffix='.txt'):
... print(file_path)
>>> # list all files and directory recursively
>>> for file_path in backend.list_dir_or_file(dir_path, recursive=True):
... print(file_path)
""" # noqa: E501
if list_dir and suffix is not None:
raise TypeError('`suffix` should be None when `list_dir` is True')
if (suffix is not None) and not isinstance(suffix, (str, tuple)):
raise TypeError('`suffix` must be a string or tuple of strings')
root = dir_path
def _list_dir_or_file(dir_path, list_dir, list_file, suffix,
recursive):
for entry in os.scandir(dir_path):
if not entry.name.startswith('.') and entry.is_file():
rel_path = osp.relpath(entry.path, root)
if (suffix is None
or rel_path.endswith(suffix)) and list_file:
yield rel_path
elif osp.isdir(entry.path):
if list_dir:
rel_dir = osp.relpath(entry.path, root)
yield rel_dir
if recursive:
yield from _list_dir_or_file(entry.path, list_dir,
list_file, suffix,
recursive)
return _list_dir_or_file(dir_path, list_dir, list_file, suffix,
recursive)
| () |
730,158 | mmengine.logging.logger | MMLogger | Formatted logger used to record messages.
``MMLogger`` can create formatted logger to log message with different
log levels and get instance in the same way as ``ManagerMixin``.
``MMLogger`` has the following features:
- Distributed log storage, ``MMLogger`` can choose whether to save log of
different ranks according to `log_file`.
- Message with different log levels will have different colors and format
when displayed on terminal.
Note:
- The `name` of logger and the ``instance_name`` of ``MMLogger`` could
be different. We can only get ``MMLogger`` instance by
``MMLogger.get_instance`` but not ``logging.getLogger``. This feature
ensures ``MMLogger`` will not be incluenced by third-party logging
config.
- Different from ``logging.Logger``, ``MMLogger`` will not log warning
or error message without ``Handler``.
Examples:
>>> logger = MMLogger.get_instance(name='MMLogger',
>>> logger_name='Logger')
>>> # Although logger has name attribute just like `logging.Logger`
>>> # We cannot get logger instance by `logging.getLogger`.
>>> assert logger.name == 'Logger'
>>> assert logger.instance_name = 'MMLogger'
>>> assert id(logger) != id(logging.getLogger('Logger'))
>>> # Get logger that do not store logs.
>>> logger1 = MMLogger.get_instance('logger1')
>>> # Get logger only save rank0 logs.
>>> logger2 = MMLogger.get_instance('logger2', log_file='out.log')
>>> # Get logger only save multiple ranks logs.
>>> logger3 = MMLogger.get_instance('logger3', log_file='out.log',
>>> distributed=True)
Args:
name (str): Global instance name.
logger_name (str): ``name`` attribute of ``Logging.Logger`` instance.
If `logger_name` is not defined, defaults to 'mmengine'.
log_file (str, optional): The log filename. If specified, a
``FileHandler`` will be added to the logger. Defaults to None.
log_level (str): The log level of the handler. Defaults to
'INFO'. If log level is 'DEBUG', distributed logs will be saved
during distributed training.
file_mode (str): The file mode used to open log file. Defaults to 'w'.
distributed (bool): Whether to save distributed logs, Defaults to
false.
file_handler_cfg (dict, optional): Configuration of file handler.
Defaults to None. If ``file_handler_cfg`` is not specified,
``logging.FileHandler`` will be used by default. If it is
specified, the ``type`` key should be set. It can be
``RotatingFileHandler``, ``TimedRotatingFileHandler``,
``WatchedFileHandler`` or other file handlers, and the remaining
fields will be used to build the handler.
Examples:
>>> file_handler_cfg = dict(
>>> type='TimedRotatingFileHandler',
>>> when='MIDNIGHT',
>>> interval=1,
>>> backupCount=365)
`New in version 0.9.0.`
| class MMLogger(Logger, ManagerMixin):
"""Formatted logger used to record messages.
``MMLogger`` can create formatted logger to log message with different
log levels and get instance in the same way as ``ManagerMixin``.
``MMLogger`` has the following features:
- Distributed log storage, ``MMLogger`` can choose whether to save log of
different ranks according to `log_file`.
- Message with different log levels will have different colors and format
when displayed on terminal.
Note:
- The `name` of logger and the ``instance_name`` of ``MMLogger`` could
be different. We can only get ``MMLogger`` instance by
``MMLogger.get_instance`` but not ``logging.getLogger``. This feature
ensures ``MMLogger`` will not be incluenced by third-party logging
config.
- Different from ``logging.Logger``, ``MMLogger`` will not log warning
or error message without ``Handler``.
Examples:
>>> logger = MMLogger.get_instance(name='MMLogger',
>>> logger_name='Logger')
>>> # Although logger has name attribute just like `logging.Logger`
>>> # We cannot get logger instance by `logging.getLogger`.
>>> assert logger.name == 'Logger'
>>> assert logger.instance_name = 'MMLogger'
>>> assert id(logger) != id(logging.getLogger('Logger'))
>>> # Get logger that do not store logs.
>>> logger1 = MMLogger.get_instance('logger1')
>>> # Get logger only save rank0 logs.
>>> logger2 = MMLogger.get_instance('logger2', log_file='out.log')
>>> # Get logger only save multiple ranks logs.
>>> logger3 = MMLogger.get_instance('logger3', log_file='out.log',
>>> distributed=True)
Args:
name (str): Global instance name.
logger_name (str): ``name`` attribute of ``Logging.Logger`` instance.
If `logger_name` is not defined, defaults to 'mmengine'.
log_file (str, optional): The log filename. If specified, a
``FileHandler`` will be added to the logger. Defaults to None.
log_level (str): The log level of the handler. Defaults to
'INFO'. If log level is 'DEBUG', distributed logs will be saved
during distributed training.
file_mode (str): The file mode used to open log file. Defaults to 'w'.
distributed (bool): Whether to save distributed logs, Defaults to
false.
file_handler_cfg (dict, optional): Configuration of file handler.
Defaults to None. If ``file_handler_cfg`` is not specified,
``logging.FileHandler`` will be used by default. If it is
specified, the ``type`` key should be set. It can be
``RotatingFileHandler``, ``TimedRotatingFileHandler``,
``WatchedFileHandler`` or other file handlers, and the remaining
fields will be used to build the handler.
Examples:
>>> file_handler_cfg = dict(
>>> type='TimedRotatingFileHandler',
>>> when='MIDNIGHT',
>>> interval=1,
>>> backupCount=365)
`New in version 0.9.0.`
"""
def __init__(self,
name: str,
logger_name='mmengine',
log_file: Optional[str] = None,
log_level: Union[int, str] = 'INFO',
file_mode: str = 'w',
distributed=False,
file_handler_cfg: Optional[dict] = None):
Logger.__init__(self, logger_name)
ManagerMixin.__init__(self, name)
# Get rank in DDP mode.
if isinstance(log_level, str):
log_level = logging._nameToLevel[log_level]
global_rank = _get_rank()
device_id = _get_device_id()
# Config stream_handler. If `rank != 0`. stream_handler can only
# export ERROR logs.
stream_handler = logging.StreamHandler(stream=sys.stdout)
# `StreamHandler` record month, day, hour, minute, and second
# timestamp.
stream_handler.setFormatter(
MMFormatter(color=True, datefmt='%m/%d %H:%M:%S'))
# Only rank0 `StreamHandler` will log messages below error level.
if global_rank == 0:
stream_handler.setLevel(log_level)
else:
stream_handler.setLevel(logging.ERROR)
stream_handler.addFilter(FilterDuplicateWarning(logger_name))
self.handlers.append(stream_handler)
if log_file is not None:
world_size = _get_world_size()
is_distributed = (log_level <= logging.DEBUG
or distributed) and world_size > 1
if is_distributed:
filename, suffix = osp.splitext(osp.basename(log_file))
hostname = _get_host_info()
if hostname:
filename = (f'{filename}_{hostname}_device{device_id}_'
f'rank{global_rank}{suffix}')
else:
# Omit hostname if it is empty
filename = (f'{filename}_device{device_id}_'
f'rank{global_rank}{suffix}')
log_file = osp.join(osp.dirname(log_file), filename)
# Save multi-ranks logs if distributed is True. The logs of rank0
# will always be saved.
if global_rank == 0 or is_distributed:
if file_handler_cfg is not None:
assert 'type' in file_handler_cfg
file_handler_type = file_handler_cfg.pop('type')
file_handlers_map = _get_logging_file_handlers()
if file_handler_type in file_handlers_map:
file_handler_cls = file_handlers_map[file_handler_type]
file_handler_cfg.setdefault('filename', log_file)
file_handler = file_handler_cls(**file_handler_cfg)
else:
raise ValueError('`logging.handlers` does not '
f'contain {file_handler_type}')
else:
# Here, the default behavior of the official
# logger is 'a'. Thus, we provide an interface to
# change the file mode to the default behavior.
# `FileHandler` is not supported to have colors,
# otherwise it will appear garbled.
file_handler = logging.FileHandler(log_file, file_mode)
# `StreamHandler` record year, month, day hour, minute,
# and second timestamp. file_handler will only record logs
# without color to avoid garbled code saved in files.
file_handler.setFormatter(
MMFormatter(color=False, datefmt='%Y/%m/%d %H:%M:%S'))
file_handler.setLevel(log_level)
file_handler.addFilter(FilterDuplicateWarning(logger_name))
self.handlers.append(file_handler)
self._log_file = log_file
@property
def log_file(self):
return self._log_file
@classmethod
def get_current_instance(cls) -> 'MMLogger':
"""Get latest created ``MMLogger`` instance.
:obj:`MMLogger` can call :meth:`get_current_instance` before any
instance has been created, and return a logger with the instance name
"mmengine".
Returns:
MMLogger: Configured logger instance.
"""
if not cls._instance_dict:
cls.get_instance('mmengine')
return super().get_current_instance()
def callHandlers(self, record: LogRecord) -> None:
"""Pass a record to all relevant handlers.
Override ``callHandlers`` method in ``logging.Logger`` to avoid
multiple warning messages in DDP mode. Loop through all handlers of
the logger instance and its parents in the logger hierarchy. If no
handler was found, the record will not be output.
Args:
record (LogRecord): A ``LogRecord`` instance contains logged
message.
"""
for handler in self.handlers:
if record.levelno >= handler.level:
handler.handle(record)
def setLevel(self, level):
"""Set the logging level of this logger.
If ``logging.Logger.selLevel`` is called, all ``logging.Logger``
instances managed by ``logging.Manager`` will clear the cache. Since
``MMLogger`` is not managed by ``logging.Manager`` anymore,
``MMLogger`` should override this method to clear caches of all
``MMLogger`` instance which is managed by :obj:`ManagerMixin`.
level must be an int or a str.
"""
self.level = logging._checkLevel(level)
_accquire_lock()
# The same logic as `logging.Manager._clear_cache`.
for logger in MMLogger._instance_dict.values():
logger._cache.clear()
_release_lock()
| (name: str, logger_name='mmengine', log_file: Optional[str] = None, log_level: Union[int, str] = 'INFO', file_mode: str = 'w', distributed=False, file_handler_cfg: Optional[dict] = None) |
730,159 | mmengine.logging.logger | __init__ | null | def __init__(self,
name: str,
logger_name='mmengine',
log_file: Optional[str] = None,
log_level: Union[int, str] = 'INFO',
file_mode: str = 'w',
distributed=False,
file_handler_cfg: Optional[dict] = None):
Logger.__init__(self, logger_name)
ManagerMixin.__init__(self, name)
# Get rank in DDP mode.
if isinstance(log_level, str):
log_level = logging._nameToLevel[log_level]
global_rank = _get_rank()
device_id = _get_device_id()
# Config stream_handler. If `rank != 0`. stream_handler can only
# export ERROR logs.
stream_handler = logging.StreamHandler(stream=sys.stdout)
# `StreamHandler` record month, day, hour, minute, and second
# timestamp.
stream_handler.setFormatter(
MMFormatter(color=True, datefmt='%m/%d %H:%M:%S'))
# Only rank0 `StreamHandler` will log messages below error level.
if global_rank == 0:
stream_handler.setLevel(log_level)
else:
stream_handler.setLevel(logging.ERROR)
stream_handler.addFilter(FilterDuplicateWarning(logger_name))
self.handlers.append(stream_handler)
if log_file is not None:
world_size = _get_world_size()
is_distributed = (log_level <= logging.DEBUG
or distributed) and world_size > 1
if is_distributed:
filename, suffix = osp.splitext(osp.basename(log_file))
hostname = _get_host_info()
if hostname:
filename = (f'{filename}_{hostname}_device{device_id}_'
f'rank{global_rank}{suffix}')
else:
# Omit hostname if it is empty
filename = (f'{filename}_device{device_id}_'
f'rank{global_rank}{suffix}')
log_file = osp.join(osp.dirname(log_file), filename)
# Save multi-ranks logs if distributed is True. The logs of rank0
# will always be saved.
if global_rank == 0 or is_distributed:
if file_handler_cfg is not None:
assert 'type' in file_handler_cfg
file_handler_type = file_handler_cfg.pop('type')
file_handlers_map = _get_logging_file_handlers()
if file_handler_type in file_handlers_map:
file_handler_cls = file_handlers_map[file_handler_type]
file_handler_cfg.setdefault('filename', log_file)
file_handler = file_handler_cls(**file_handler_cfg)
else:
raise ValueError('`logging.handlers` does not '
f'contain {file_handler_type}')
else:
# Here, the default behavior of the official
# logger is 'a'. Thus, we provide an interface to
# change the file mode to the default behavior.
# `FileHandler` is not supported to have colors,
# otherwise it will appear garbled.
file_handler = logging.FileHandler(log_file, file_mode)
# `StreamHandler` record year, month, day hour, minute,
# and second timestamp. file_handler will only record logs
# without color to avoid garbled code saved in files.
file_handler.setFormatter(
MMFormatter(color=False, datefmt='%Y/%m/%d %H:%M:%S'))
file_handler.setLevel(log_level)
file_handler.addFilter(FilterDuplicateWarning(logger_name))
self.handlers.append(file_handler)
self._log_file = log_file
| (self, name: str, logger_name='mmengine', log_file: Optional[str] = None, log_level: Union[int, str] = 'INFO', file_mode: str = 'w', distributed=False, file_handler_cfg: Optional[dict] = None) |
730,165 | mmengine.logging.logger | callHandlers | Pass a record to all relevant handlers.
Override ``callHandlers`` method in ``logging.Logger`` to avoid
multiple warning messages in DDP mode. Loop through all handlers of
the logger instance and its parents in the logger hierarchy. If no
handler was found, the record will not be output.
Args:
record (LogRecord): A ``LogRecord`` instance contains logged
message.
| def callHandlers(self, record: LogRecord) -> None:
"""Pass a record to all relevant handlers.
Override ``callHandlers`` method in ``logging.Logger`` to avoid
multiple warning messages in DDP mode. Loop through all handlers of
the logger instance and its parents in the logger hierarchy. If no
handler was found, the record will not be output.
Args:
record (LogRecord): A ``LogRecord`` instance contains logged
message.
"""
for handler in self.handlers:
if record.levelno >= handler.level:
handler.handle(record)
| (self, record: logging.LogRecord) -> NoneType |
730,183 | mmengine.logging.logger | setLevel | Set the logging level of this logger.
If ``logging.Logger.selLevel`` is called, all ``logging.Logger``
instances managed by ``logging.Manager`` will clear the cache. Since
``MMLogger`` is not managed by ``logging.Manager`` anymore,
``MMLogger`` should override this method to clear caches of all
``MMLogger`` instance which is managed by :obj:`ManagerMixin`.
level must be an int or a str.
| def setLevel(self, level):
"""Set the logging level of this logger.
If ``logging.Logger.selLevel`` is called, all ``logging.Logger``
instances managed by ``logging.Manager`` will clear the cache. Since
``MMLogger`` is not managed by ``logging.Manager`` anymore,
``MMLogger`` should override this method to clear caches of all
``MMLogger`` instance which is managed by :obj:`ManagerMixin`.
level must be an int or a str.
"""
self.level = logging._checkLevel(level)
_accquire_lock()
# The same logic as `logging.Manager._clear_cache`.
for logger in MMLogger._instance_dict.values():
logger._cache.clear()
_release_lock()
| (self, level) |
730,186 | mmengine.utils.manager | ManagerMeta | The metaclass for global accessible class.
The subclasses inheriting from ``ManagerMeta`` will manage their
own ``_instance_dict`` and root instances. The constructors of subclasses
must contain the ``name`` argument.
Examples:
>>> class SubClass1(metaclass=ManagerMeta):
>>> def __init__(self, *args, **kwargs):
>>> pass
AssertionError: <class '__main__.SubClass1'>.__init__ must have the
name argument.
>>> class SubClass2(metaclass=ManagerMeta):
>>> def __init__(self, name):
>>> pass
>>> # valid format.
| class ManagerMeta(type):
"""The metaclass for global accessible class.
The subclasses inheriting from ``ManagerMeta`` will manage their
own ``_instance_dict`` and root instances. The constructors of subclasses
must contain the ``name`` argument.
Examples:
>>> class SubClass1(metaclass=ManagerMeta):
>>> def __init__(self, *args, **kwargs):
>>> pass
AssertionError: <class '__main__.SubClass1'>.__init__ must have the
name argument.
>>> class SubClass2(metaclass=ManagerMeta):
>>> def __init__(self, name):
>>> pass
>>> # valid format.
"""
def __init__(cls, *args):
cls._instance_dict = OrderedDict()
params = inspect.getfullargspec(cls)
params_names = params[0] if params[0] else []
assert 'name' in params_names, f'{cls} must have the `name` argument'
super().__init__(*args)
| (*args) |
730,187 | mmengine.utils.manager | __init__ | null | def __init__(cls, *args):
cls._instance_dict = OrderedDict()
params = inspect.getfullargspec(cls)
params_names = params[0] if params[0] else []
assert 'name' in params_names, f'{cls} must have the `name` argument'
super().__init__(*args)
| (cls, *args) |
730,188 | mmengine.utils.manager | ManagerMixin | ``ManagerMixin`` is the base class for classes that have global access
requirements.
The subclasses inheriting from ``ManagerMixin`` can get their
global instances.
Examples:
>>> class GlobalAccessible(ManagerMixin):
>>> def __init__(self, name=''):
>>> super().__init__(name)
>>>
>>> GlobalAccessible.get_instance('name')
>>> instance_1 = GlobalAccessible.get_instance('name')
>>> instance_2 = GlobalAccessible.get_instance('name')
>>> assert id(instance_1) == id(instance_2)
Args:
name (str): Name of the instance. Defaults to ''.
| class ManagerMixin(metaclass=ManagerMeta):
"""``ManagerMixin`` is the base class for classes that have global access
requirements.
The subclasses inheriting from ``ManagerMixin`` can get their
global instances.
Examples:
>>> class GlobalAccessible(ManagerMixin):
>>> def __init__(self, name=''):
>>> super().__init__(name)
>>>
>>> GlobalAccessible.get_instance('name')
>>> instance_1 = GlobalAccessible.get_instance('name')
>>> instance_2 = GlobalAccessible.get_instance('name')
>>> assert id(instance_1) == id(instance_2)
Args:
name (str): Name of the instance. Defaults to ''.
"""
def __init__(self, name: str = '', **kwargs):
assert isinstance(name, str) and name, \
'name argument must be an non-empty string.'
self._instance_name = name
@classmethod
def get_instance(cls: Type[T], name: str, **kwargs) -> T:
"""Get subclass instance by name if the name exists.
If corresponding name instance has not been created, ``get_instance``
will create an instance, otherwise ``get_instance`` will return the
corresponding instance.
Examples
>>> instance1 = GlobalAccessible.get_instance('name1')
>>> # Create name1 instance.
>>> instance.instance_name
name1
>>> instance2 = GlobalAccessible.get_instance('name1')
>>> # Get name1 instance.
>>> assert id(instance1) == id(instance2)
Args:
name (str): Name of instance. Defaults to ''.
Returns:
object: Corresponding name instance, the latest instance, or root
instance.
"""
_accquire_lock()
assert isinstance(name, str), \
f'type of name should be str, but got {type(cls)}'
instance_dict = cls._instance_dict # type: ignore
# Get the instance by name.
if name not in instance_dict:
instance = cls(name=name, **kwargs) # type: ignore
instance_dict[name] = instance # type: ignore
elif kwargs:
warnings.warn(
f'{cls} instance named of {name} has been created, '
'the method `get_instance` should not accept any other '
'arguments')
# Get latest instantiated instance or root instance.
_release_lock()
return instance_dict[name]
@classmethod
def get_current_instance(cls):
"""Get latest created instance.
Before calling ``get_current_instance``, The subclass must have called
``get_instance(xxx)`` at least once.
Examples
>>> instance = GlobalAccessible.get_current_instance()
AssertionError: At least one of name and current needs to be set
>>> instance = GlobalAccessible.get_instance('name1')
>>> instance.instance_name
name1
>>> instance = GlobalAccessible.get_current_instance()
>>> instance.instance_name
name1
Returns:
object: Latest created instance.
"""
_accquire_lock()
if not cls._instance_dict:
raise RuntimeError(
f'Before calling {cls.__name__}.get_current_instance(), you '
'should call get_instance(name=xxx) at least once.')
name = next(iter(reversed(cls._instance_dict)))
_release_lock()
return cls._instance_dict[name]
@classmethod
def check_instance_created(cls, name: str) -> bool:
"""Check whether the name corresponding instance exists.
Args:
name (str): Name of instance.
Returns:
bool: Whether the name corresponding instance exists.
"""
return name in cls._instance_dict
@property
def instance_name(self) -> str:
"""Get the name of instance.
Returns:
str: Name of instance.
"""
return self._instance_name
| (name: str = '', **kwargs) |
730,189 | mmengine.utils.manager | __init__ | null | def __init__(self, name: str = '', **kwargs):
assert isinstance(name, str) and name, \
'name argument must be an non-empty string.'
self._instance_name = name
| (self, name: str = '', **kwargs) |
730,190 | mmengine.fileio.backends.memcached_backend | MemcachedBackend | Memcached storage backend.
Attributes:
server_list_cfg (str): Config file for memcached server list.
client_cfg (str): Config file for memcached client.
sys_path (str, optional): Additional path to be appended to `sys.path`.
Defaults to None.
| class MemcachedBackend(BaseStorageBackend):
"""Memcached storage backend.
Attributes:
server_list_cfg (str): Config file for memcached server list.
client_cfg (str): Config file for memcached client.
sys_path (str, optional): Additional path to be appended to `sys.path`.
Defaults to None.
"""
def __init__(self, server_list_cfg, client_cfg, sys_path=None):
if sys_path is not None:
import sys
sys.path.append(sys_path)
try:
import mc
except ImportError:
raise ImportError(
'Please install memcached to enable MemcachedBackend.')
self.server_list_cfg = server_list_cfg
self.client_cfg = client_cfg
self._client = mc.MemcachedClient.GetInstance(self.server_list_cfg,
self.client_cfg)
# mc.pyvector servers as a point which points to a memory cache
self._mc_buffer = mc.pyvector()
def get(self, filepath: Union[str, Path]):
"""Get values according to the filepath.
Args:
filepath (str or Path): Path to read data.
Returns:
bytes: Expected bytes object.
Examples:
>>> server_list_cfg = '/path/of/server_list.conf'
>>> client_cfg = '/path/of/mc.conf'
>>> backend = MemcachedBackend(server_list_cfg, client_cfg)
>>> backend.get('/path/of/file')
b'hello world'
"""
filepath = str(filepath)
import mc
self._client.Get(filepath, self._mc_buffer)
value_buf = mc.ConvertBuffer(self._mc_buffer)
return value_buf
def get_text(self, filepath, encoding=None):
raise NotImplementedError
| (server_list_cfg, client_cfg, sys_path=None) |
730,191 | mmengine.fileio.backends.memcached_backend | __init__ | null | def __init__(self, server_list_cfg, client_cfg, sys_path=None):
if sys_path is not None:
import sys
sys.path.append(sys_path)
try:
import mc
except ImportError:
raise ImportError(
'Please install memcached to enable MemcachedBackend.')
self.server_list_cfg = server_list_cfg
self.client_cfg = client_cfg
self._client = mc.MemcachedClient.GetInstance(self.server_list_cfg,
self.client_cfg)
# mc.pyvector servers as a point which points to a memory cache
self._mc_buffer = mc.pyvector()
| (self, server_list_cfg, client_cfg, sys_path=None) |
730,192 | mmengine.fileio.backends.memcached_backend | get | Get values according to the filepath.
Args:
filepath (str or Path): Path to read data.
Returns:
bytes: Expected bytes object.
Examples:
>>> server_list_cfg = '/path/of/server_list.conf'
>>> client_cfg = '/path/of/mc.conf'
>>> backend = MemcachedBackend(server_list_cfg, client_cfg)
>>> backend.get('/path/of/file')
b'hello world'
| def get(self, filepath: Union[str, Path]):
"""Get values according to the filepath.
Args:
filepath (str or Path): Path to read data.
Returns:
bytes: Expected bytes object.
Examples:
>>> server_list_cfg = '/path/of/server_list.conf'
>>> client_cfg = '/path/of/mc.conf'
>>> backend = MemcachedBackend(server_list_cfg, client_cfg)
>>> backend.get('/path/of/file')
b'hello world'
"""
filepath = str(filepath)
import mc
self._client.Get(filepath, self._mc_buffer)
value_buf = mc.ConvertBuffer(self._mc_buffer)
return value_buf
| (self, filepath: Union[str, pathlib.Path]) |
730,194 | mmengine.logging.message_hub | MessageHub | Message hub for component interaction. MessageHub is created and
accessed in the same way as ManagerMixin.
``MessageHub`` will record log information and runtime information. The
log information refers to the learning rate, loss, etc. of the model
during training phase, which will be stored as ``HistoryBuffer``. The
runtime information refers to the iter times, meta information of
runner etc., which will be overwritten by next update.
Args:
name (str): Name of message hub used to get corresponding instance
globally.
log_scalars (dict, optional): Each key-value pair in the
dictionary is the name of the log information such as "loss", "lr",
"metric" and their corresponding values. The type of value must be
HistoryBuffer. Defaults to None.
runtime_info (dict, optional): Each key-value pair in the
dictionary is the name of the runtime information and their
corresponding values. Defaults to None.
resumed_keys (dict, optional): Each key-value pair in the
dictionary decides whether the key in :attr:`_log_scalars` and
:attr:`_runtime_info` will be serialized.
Note:
Key in :attr:`_resumed_keys` belongs to :attr:`_log_scalars` or
:attr:`_runtime_info`. The corresponding value cannot be set
repeatedly.
Examples:
>>> # create empty `MessageHub`.
>>> message_hub1 = MessageHub('name')
>>> log_scalars = dict(loss=HistoryBuffer())
>>> runtime_info = dict(task='task')
>>> resumed_keys = dict(loss=True)
>>> # create `MessageHub` from data.
>>> message_hub2 = MessageHub(
>>> name='name',
>>> log_scalars=log_scalars,
>>> runtime_info=runtime_info,
>>> resumed_keys=resumed_keys)
| class MessageHub(ManagerMixin):
"""Message hub for component interaction. MessageHub is created and
accessed in the same way as ManagerMixin.
``MessageHub`` will record log information and runtime information. The
log information refers to the learning rate, loss, etc. of the model
during training phase, which will be stored as ``HistoryBuffer``. The
runtime information refers to the iter times, meta information of
runner etc., which will be overwritten by next update.
Args:
name (str): Name of message hub used to get corresponding instance
globally.
log_scalars (dict, optional): Each key-value pair in the
dictionary is the name of the log information such as "loss", "lr",
"metric" and their corresponding values. The type of value must be
HistoryBuffer. Defaults to None.
runtime_info (dict, optional): Each key-value pair in the
dictionary is the name of the runtime information and their
corresponding values. Defaults to None.
resumed_keys (dict, optional): Each key-value pair in the
dictionary decides whether the key in :attr:`_log_scalars` and
:attr:`_runtime_info` will be serialized.
Note:
Key in :attr:`_resumed_keys` belongs to :attr:`_log_scalars` or
:attr:`_runtime_info`. The corresponding value cannot be set
repeatedly.
Examples:
>>> # create empty `MessageHub`.
>>> message_hub1 = MessageHub('name')
>>> log_scalars = dict(loss=HistoryBuffer())
>>> runtime_info = dict(task='task')
>>> resumed_keys = dict(loss=True)
>>> # create `MessageHub` from data.
>>> message_hub2 = MessageHub(
>>> name='name',
>>> log_scalars=log_scalars,
>>> runtime_info=runtime_info,
>>> resumed_keys=resumed_keys)
"""
def __init__(self,
name: str,
log_scalars: Optional[dict] = None,
runtime_info: Optional[dict] = None,
resumed_keys: Optional[dict] = None):
super().__init__(name)
self._log_scalars = self._parse_input('log_scalars', log_scalars)
self._runtime_info = self._parse_input('runtime_info', runtime_info)
self._resumed_keys = self._parse_input('resumed_keys', resumed_keys)
for value in self._log_scalars.values():
assert isinstance(value, HistoryBuffer), \
("The type of log_scalars'value must be HistoryBuffer, but "
f'got {type(value)}')
for key in self._resumed_keys.keys():
assert key in self._log_scalars or key in self._runtime_info, \
('Key in `resumed_keys` must contained in `log_scalars` or '
f'`runtime_info`, but got {key}')
@classmethod
def get_current_instance(cls) -> 'MessageHub':
"""Get latest created ``MessageHub`` instance.
:obj:`MessageHub` can call :meth:`get_current_instance` before any
instance has been created, and return a message hub with the instance
name "mmengine".
Returns:
MessageHub: Empty ``MessageHub`` instance.
"""
if not cls._instance_dict:
cls.get_instance('mmengine')
return super().get_current_instance()
def update_scalar(self,
key: str,
value: Union[int, float, np.ndarray, 'torch.Tensor'],
count: int = 1,
resumed: bool = True) -> None:
"""Update :attr:_log_scalars.
Update ``HistoryBuffer`` in :attr:`_log_scalars`. If corresponding key
``HistoryBuffer`` has been created, ``value`` and ``count`` is the
argument of ``HistoryBuffer.update``, Otherwise, ``update_scalar``
will create an ``HistoryBuffer`` with value and count via the
constructor of ``HistoryBuffer``.
Examples:
>>> message_hub = MessageHub(name='name')
>>> # create loss `HistoryBuffer` with value=1, count=1
>>> message_hub.update_scalar('loss', 1)
>>> # update loss `HistoryBuffer` with value
>>> message_hub.update_scalar('loss', 3)
>>> message_hub.update_scalar('loss', 3, resumed=False)
AssertionError: loss used to be true, but got false now. resumed
keys cannot be modified repeatedly'
Note:
The ``resumed`` argument needs to be consistent for the same
``key``.
Args:
key (str): Key of ``HistoryBuffer``.
value (torch.Tensor or np.ndarray or int or float): Value of log.
count (torch.Tensor or np.ndarray or int or float): Accumulation
times of log, defaults to 1. `count` will be used in smooth
statistics.
resumed (str): Whether the corresponding ``HistoryBuffer``
could be resumed. Defaults to True.
"""
self._set_resumed_keys(key, resumed)
checked_value = self._get_valid_value(value)
assert isinstance(count, int), (
f'The type of count must be int. but got {type(count): {count}}')
if key in self._log_scalars:
self._log_scalars[key].update(checked_value, count)
else:
self._log_scalars[key] = HistoryBuffer([checked_value], [count])
def update_scalars(self, log_dict: dict, resumed: bool = True) -> None:
"""Update :attr:`_log_scalars` with a dict.
``update_scalars`` iterates through each pair of log_dict key-value,
and calls ``update_scalar``. If type of value is dict, the value should
be ``dict(value=xxx) or dict(value=xxx, count=xxx)``. Item in
``log_dict`` has the same resume option.
Note:
The ``resumed`` argument needs to be consistent for the same
``log_dict``.
Args:
log_dict (str): Used for batch updating :attr:`_log_scalars`.
resumed (bool): Whether all ``HistoryBuffer`` referred in
log_dict should be resumed. Defaults to True.
Examples:
>>> message_hub = MessageHub.get_instance('mmengine')
>>> log_dict = dict(a=1, b=2, c=3)
>>> message_hub.update_scalars(log_dict)
>>> # The default count of `a`, `b` and `c` is 1.
>>> log_dict = dict(a=1, b=2, c=dict(value=1, count=2))
>>> message_hub.update_scalars(log_dict)
>>> # The count of `c` is 2.
"""
assert isinstance(log_dict, dict), ('`log_dict` must be a dict!, '
f'but got {type(log_dict)}')
for log_name, log_val in log_dict.items():
if isinstance(log_val, dict):
assert 'value' in log_val, \
f'value must be defined in {log_val}'
count = self._get_valid_value(log_val.get('count', 1))
value = log_val['value']
else:
count = 1
value = log_val
assert isinstance(count,
int), ('The type of count must be int. but got '
f'{type(count): {count}}')
self.update_scalar(log_name, value, count, resumed)
def update_info(self, key: str, value: Any, resumed: bool = True) -> None:
"""Update runtime information.
The key corresponding runtime information will be overwritten each
time calling ``update_info``.
Note:
The ``resumed`` argument needs to be consistent for the same
``key``.
Examples:
>>> message_hub = MessageHub(name='name')
>>> message_hub.update_info('iter', 100)
Args:
key (str): Key of runtime information.
value (Any): Value of runtime information.
resumed (bool): Whether the corresponding ``HistoryBuffer``
could be resumed.
"""
self._set_resumed_keys(key, resumed)
self._runtime_info[key] = value
def pop_info(self, key: str, default: Optional[Any] = None) -> Any:
"""Remove runtime information by key. If the key does not exist, this
method will return the default value.
Args:
key (str): Key of runtime information.
default (Any, optional): The default returned value for the
given key.
Returns:
Any: The runtime information if the key exists.
"""
return self._runtime_info.pop(key, default)
def update_info_dict(self, info_dict: dict, resumed: bool = True) -> None:
"""Update runtime information with dictionary.
The key corresponding runtime information will be overwritten each
time calling ``update_info``.
Note:
The ``resumed`` argument needs to be consistent for the same
``info_dict``.
Examples:
>>> message_hub = MessageHub(name='name')
>>> message_hub.update_info({'iter': 100})
Args:
info_dict (str): Runtime information dictionary.
resumed (bool): Whether the corresponding ``HistoryBuffer``
could be resumed.
"""
assert isinstance(info_dict, dict), ('`log_dict` must be a dict!, '
f'but got {type(info_dict)}')
for key, value in info_dict.items():
self.update_info(key, value, resumed=resumed)
def _set_resumed_keys(self, key: str, resumed: bool) -> None:
"""Set corresponding resumed keys.
This method is called by ``update_scalar``, ``update_scalars`` and
``update_info`` to set the corresponding key is true or false in
:attr:`_resumed_keys`.
Args:
key (str): Key of :attr:`_log_scalrs` or :attr:`_runtime_info`.
resumed (bool): Whether the corresponding ``HistoryBuffer``
could be resumed.
"""
if key not in self._resumed_keys:
self._resumed_keys[key] = resumed
else:
assert self._resumed_keys[key] == resumed, \
f'{key} used to be {self._resumed_keys[key]}, but got ' \
'{resumed} now. resumed keys cannot be modified repeatedly.'
@property
def log_scalars(self) -> OrderedDict:
"""Get all ``HistoryBuffer`` instances.
Note:
Considering the large memory footprint of history buffers in the
post-training, :meth:`get_scalar` will return a reference of
history buffer rather than a copy.
Returns:
OrderedDict: All ``HistoryBuffer`` instances.
"""
return self._log_scalars
@property
def runtime_info(self) -> OrderedDict:
"""Get all runtime information.
Returns:
OrderedDict: A copy of all runtime information.
"""
return self._runtime_info
def get_scalar(self, key: str) -> HistoryBuffer:
"""Get ``HistoryBuffer`` instance by key.
Note:
Considering the large memory footprint of history buffers in the
post-training, :meth:`get_scalar` will not return a reference of
history buffer rather than a copy.
Args:
key (str): Key of ``HistoryBuffer``.
Returns:
HistoryBuffer: Corresponding ``HistoryBuffer`` instance if the
key exists.
"""
if key not in self.log_scalars:
raise KeyError(f'{key} is not found in Messagehub.log_buffers: '
f'instance name is: {MessageHub.instance_name}')
return self.log_scalars[key]
def get_info(self, key: str, default: Optional[Any] = None) -> Any:
"""Get runtime information by key. If the key does not exist, this
method will return default information.
Args:
key (str): Key of runtime information.
default (Any, optional): The default returned value for the
given key.
Returns:
Any: A copy of corresponding runtime information if the key exists.
"""
if key not in self.runtime_info:
return default
else:
# TODO: There are restrictions on objects that can be saved
# return copy.deepcopy(self._runtime_info[key])
return self._runtime_info[key]
def _get_valid_value(
self,
value: Union['torch.Tensor', np.ndarray, np.number, int, float],
) -> Union[int, float]:
"""Convert value to python built-in type.
Args:
value (torch.Tensor or np.ndarray or np.number or int or float):
value of log.
Returns:
float or int: python built-in type value.
"""
if isinstance(value, (np.ndarray, np.number)):
assert value.size == 1
value = value.item()
elif isinstance(value, (int, float)):
value = value
else:
# check whether value is torch.Tensor but don't want
# to import torch in this file
assert hasattr(value, 'numel') and value.numel() == 1
value = value.item()
return value # type: ignore
def state_dict(self) -> dict:
"""Returns a dictionary containing log scalars, runtime information and
resumed keys, which should be resumed.
The returned ``state_dict`` can be loaded by :meth:`load_state_dict`.
Returns:
dict: A dictionary contains ``log_scalars``, ``runtime_info`` and
``resumed_keys``.
"""
saved_scalars = OrderedDict()
saved_info = OrderedDict()
for key, value in self._log_scalars.items():
if self._resumed_keys.get(key, False):
saved_scalars[key] = copy.deepcopy(value)
for key, value in self._runtime_info.items():
if self._resumed_keys.get(key, False):
try:
saved_info[key] = copy.deepcopy(value)
except: # noqa: E722
print_log(
f'{key} in message_hub cannot be copied, '
f'just return its reference. ',
logger='current',
level=logging.WARNING)
saved_info[key] = value
return dict(
log_scalars=saved_scalars,
runtime_info=saved_info,
resumed_keys=self._resumed_keys)
def load_state_dict(self, state_dict: Union['MessageHub', dict]) -> None:
"""Loads log scalars, runtime information and resumed keys from
``state_dict`` or ``message_hub``.
If ``state_dict`` is a dictionary returned by :meth:`state_dict`, it
will only make copies of data which should be resumed from the source
``message_hub``.
If ``state_dict`` is a ``message_hub`` instance, it will make copies of
all data from the source message_hub. We suggest to load data from
``dict`` rather than a ``MessageHub`` instance.
Args:
state_dict (dict or MessageHub): A dictionary contains key
``log_scalars`` ``runtime_info`` and ``resumed_keys``, or a
MessageHub instance.
"""
if isinstance(state_dict, dict):
for key in ('log_scalars', 'runtime_info', 'resumed_keys'):
assert key in state_dict, (
'The loaded `state_dict` of `MessageHub` must contain '
f'key: `{key}`')
# The old `MessageHub` could save non-HistoryBuffer `log_scalars`,
# therefore the loaded `log_scalars` needs to be filtered.
for key, value in state_dict['log_scalars'].items():
if not isinstance(value, HistoryBuffer):
print_log(
f'{key} in message_hub is not HistoryBuffer, '
f'just skip resuming it.',
logger='current',
level=logging.WARNING)
continue
self.log_scalars[key] = value
for key, value in state_dict['runtime_info'].items():
try:
self._runtime_info[key] = copy.deepcopy(value)
except: # noqa: E722
print_log(
f'{key} in message_hub cannot be copied, '
f'just return its reference.',
logger='current',
level=logging.WARNING)
self._runtime_info[key] = value
for key, value in state_dict['resumed_keys'].items():
if key not in set(self.log_scalars.keys()) | \
set(self._runtime_info.keys()):
print_log(
f'resumed key: {key} is not defined in message_hub, '
f'just skip resuming this key.',
logger='current',
level=logging.WARNING)
continue
elif not value:
print_log(
f'Although resumed key: {key} is False, {key} '
'will still be loaded this time. This key will '
'not be saved by the next calling of '
'`MessageHub.state_dict()`',
logger='current',
level=logging.WARNING)
self._resumed_keys[key] = value
# Since some checkpoints saved serialized `message_hub` instance,
# `load_state_dict` support loading `message_hub` instance for
# compatibility
else:
self._log_scalars = copy.deepcopy(state_dict._log_scalars)
self._runtime_info = copy.deepcopy(state_dict._runtime_info)
self._resumed_keys = copy.deepcopy(state_dict._resumed_keys)
def _parse_input(self, name: str, value: Any) -> OrderedDict:
"""Parse input value.
Args:
name (str): name of input value.
value (Any): Input value.
Returns:
dict: Parsed input value.
"""
if value is None:
return OrderedDict()
elif isinstance(value, dict):
return OrderedDict(value)
else:
raise TypeError(f'{name} should be a dict or `None`, but '
f'got {type(name)}')
| (name: str, log_scalars: Optional[dict] = None, runtime_info: Optional[dict] = None, resumed_keys: Optional[dict] = None) |
730,195 | mmengine.logging.message_hub | __init__ | null | def __init__(self,
name: str,
log_scalars: Optional[dict] = None,
runtime_info: Optional[dict] = None,
resumed_keys: Optional[dict] = None):
super().__init__(name)
self._log_scalars = self._parse_input('log_scalars', log_scalars)
self._runtime_info = self._parse_input('runtime_info', runtime_info)
self._resumed_keys = self._parse_input('resumed_keys', resumed_keys)
for value in self._log_scalars.values():
assert isinstance(value, HistoryBuffer), \
("The type of log_scalars'value must be HistoryBuffer, but "
f'got {type(value)}')
for key in self._resumed_keys.keys():
assert key in self._log_scalars or key in self._runtime_info, \
('Key in `resumed_keys` must contained in `log_scalars` or '
f'`runtime_info`, but got {key}')
| (self, name: str, log_scalars: Optional[dict] = None, runtime_info: Optional[dict] = None, resumed_keys: Optional[dict] = None) |
730,196 | mmengine.logging.message_hub | _get_valid_value | Convert value to python built-in type.
Args:
value (torch.Tensor or np.ndarray or np.number or int or float):
value of log.
Returns:
float or int: python built-in type value.
| def _get_valid_value(
self,
value: Union['torch.Tensor', np.ndarray, np.number, int, float],
) -> Union[int, float]:
"""Convert value to python built-in type.
Args:
value (torch.Tensor or np.ndarray or np.number or int or float):
value of log.
Returns:
float or int: python built-in type value.
"""
if isinstance(value, (np.ndarray, np.number)):
assert value.size == 1
value = value.item()
elif isinstance(value, (int, float)):
value = value
else:
# check whether value is torch.Tensor but don't want
# to import torch in this file
assert hasattr(value, 'numel') and value.numel() == 1
value = value.item()
return value # type: ignore
| (self, value: Union[ForwardRef('torch.Tensor'), numpy.ndarray, numpy.number, int, float]) -> Union[int, float] |
730,197 | mmengine.logging.message_hub | _parse_input | Parse input value.
Args:
name (str): name of input value.
value (Any): Input value.
Returns:
dict: Parsed input value.
| def _parse_input(self, name: str, value: Any) -> OrderedDict:
"""Parse input value.
Args:
name (str): name of input value.
value (Any): Input value.
Returns:
dict: Parsed input value.
"""
if value is None:
return OrderedDict()
elif isinstance(value, dict):
return OrderedDict(value)
else:
raise TypeError(f'{name} should be a dict or `None`, but '
f'got {type(name)}')
| (self, name: str, value: Any) -> collections.OrderedDict |
730,198 | mmengine.logging.message_hub | _set_resumed_keys | Set corresponding resumed keys.
This method is called by ``update_scalar``, ``update_scalars`` and
``update_info`` to set the corresponding key is true or false in
:attr:`_resumed_keys`.
Args:
key (str): Key of :attr:`_log_scalrs` or :attr:`_runtime_info`.
resumed (bool): Whether the corresponding ``HistoryBuffer``
could be resumed.
| def _set_resumed_keys(self, key: str, resumed: bool) -> None:
"""Set corresponding resumed keys.
This method is called by ``update_scalar``, ``update_scalars`` and
``update_info`` to set the corresponding key is true or false in
:attr:`_resumed_keys`.
Args:
key (str): Key of :attr:`_log_scalrs` or :attr:`_runtime_info`.
resumed (bool): Whether the corresponding ``HistoryBuffer``
could be resumed.
"""
if key not in self._resumed_keys:
self._resumed_keys[key] = resumed
else:
assert self._resumed_keys[key] == resumed, \
f'{key} used to be {self._resumed_keys[key]}, but got ' \
'{resumed} now. resumed keys cannot be modified repeatedly.'
| (self, key: str, resumed: bool) -> NoneType |
730,199 | mmengine.logging.message_hub | get_info | Get runtime information by key. If the key does not exist, this
method will return default information.
Args:
key (str): Key of runtime information.
default (Any, optional): The default returned value for the
given key.
Returns:
Any: A copy of corresponding runtime information if the key exists.
| def get_info(self, key: str, default: Optional[Any] = None) -> Any:
"""Get runtime information by key. If the key does not exist, this
method will return default information.
Args:
key (str): Key of runtime information.
default (Any, optional): The default returned value for the
given key.
Returns:
Any: A copy of corresponding runtime information if the key exists.
"""
if key not in self.runtime_info:
return default
else:
# TODO: There are restrictions on objects that can be saved
# return copy.deepcopy(self._runtime_info[key])
return self._runtime_info[key]
| (self, key: str, default: Optional[Any] = None) -> Any |
730,200 | mmengine.logging.message_hub | get_scalar | Get ``HistoryBuffer`` instance by key.
Note:
Considering the large memory footprint of history buffers in the
post-training, :meth:`get_scalar` will not return a reference of
history buffer rather than a copy.
Args:
key (str): Key of ``HistoryBuffer``.
Returns:
HistoryBuffer: Corresponding ``HistoryBuffer`` instance if the
key exists.
| def get_scalar(self, key: str) -> HistoryBuffer:
"""Get ``HistoryBuffer`` instance by key.
Note:
Considering the large memory footprint of history buffers in the
post-training, :meth:`get_scalar` will not return a reference of
history buffer rather than a copy.
Args:
key (str): Key of ``HistoryBuffer``.
Returns:
HistoryBuffer: Corresponding ``HistoryBuffer`` instance if the
key exists.
"""
if key not in self.log_scalars:
raise KeyError(f'{key} is not found in Messagehub.log_buffers: '
f'instance name is: {MessageHub.instance_name}')
return self.log_scalars[key]
| (self, key: str) -> mmengine.logging.history_buffer.HistoryBuffer |
730,201 | mmengine.logging.message_hub | load_state_dict | Loads log scalars, runtime information and resumed keys from
``state_dict`` or ``message_hub``.
If ``state_dict`` is a dictionary returned by :meth:`state_dict`, it
will only make copies of data which should be resumed from the source
``message_hub``.
If ``state_dict`` is a ``message_hub`` instance, it will make copies of
all data from the source message_hub. We suggest to load data from
``dict`` rather than a ``MessageHub`` instance.
Args:
state_dict (dict or MessageHub): A dictionary contains key
``log_scalars`` ``runtime_info`` and ``resumed_keys``, or a
MessageHub instance.
| def load_state_dict(self, state_dict: Union['MessageHub', dict]) -> None:
"""Loads log scalars, runtime information and resumed keys from
``state_dict`` or ``message_hub``.
If ``state_dict`` is a dictionary returned by :meth:`state_dict`, it
will only make copies of data which should be resumed from the source
``message_hub``.
If ``state_dict`` is a ``message_hub`` instance, it will make copies of
all data from the source message_hub. We suggest to load data from
``dict`` rather than a ``MessageHub`` instance.
Args:
state_dict (dict or MessageHub): A dictionary contains key
``log_scalars`` ``runtime_info`` and ``resumed_keys``, or a
MessageHub instance.
"""
if isinstance(state_dict, dict):
for key in ('log_scalars', 'runtime_info', 'resumed_keys'):
assert key in state_dict, (
'The loaded `state_dict` of `MessageHub` must contain '
f'key: `{key}`')
# The old `MessageHub` could save non-HistoryBuffer `log_scalars`,
# therefore the loaded `log_scalars` needs to be filtered.
for key, value in state_dict['log_scalars'].items():
if not isinstance(value, HistoryBuffer):
print_log(
f'{key} in message_hub is not HistoryBuffer, '
f'just skip resuming it.',
logger='current',
level=logging.WARNING)
continue
self.log_scalars[key] = value
for key, value in state_dict['runtime_info'].items():
try:
self._runtime_info[key] = copy.deepcopy(value)
except: # noqa: E722
print_log(
f'{key} in message_hub cannot be copied, '
f'just return its reference.',
logger='current',
level=logging.WARNING)
self._runtime_info[key] = value
for key, value in state_dict['resumed_keys'].items():
if key not in set(self.log_scalars.keys()) | \
set(self._runtime_info.keys()):
print_log(
f'resumed key: {key} is not defined in message_hub, '
f'just skip resuming this key.',
logger='current',
level=logging.WARNING)
continue
elif not value:
print_log(
f'Although resumed key: {key} is False, {key} '
'will still be loaded this time. This key will '
'not be saved by the next calling of '
'`MessageHub.state_dict()`',
logger='current',
level=logging.WARNING)
self._resumed_keys[key] = value
# Since some checkpoints saved serialized `message_hub` instance,
# `load_state_dict` support loading `message_hub` instance for
# compatibility
else:
self._log_scalars = copy.deepcopy(state_dict._log_scalars)
self._runtime_info = copy.deepcopy(state_dict._runtime_info)
self._resumed_keys = copy.deepcopy(state_dict._resumed_keys)
| (self, state_dict: Union[mmengine.logging.message_hub.MessageHub, dict]) -> NoneType |
730,202 | mmengine.logging.message_hub | pop_info | Remove runtime information by key. If the key does not exist, this
method will return the default value.
Args:
key (str): Key of runtime information.
default (Any, optional): The default returned value for the
given key.
Returns:
Any: The runtime information if the key exists.
| def pop_info(self, key: str, default: Optional[Any] = None) -> Any:
"""Remove runtime information by key. If the key does not exist, this
method will return the default value.
Args:
key (str): Key of runtime information.
default (Any, optional): The default returned value for the
given key.
Returns:
Any: The runtime information if the key exists.
"""
return self._runtime_info.pop(key, default)
| (self, key: str, default: Optional[Any] = None) -> Any |
730,203 | mmengine.logging.message_hub | state_dict | Returns a dictionary containing log scalars, runtime information and
resumed keys, which should be resumed.
The returned ``state_dict`` can be loaded by :meth:`load_state_dict`.
Returns:
dict: A dictionary contains ``log_scalars``, ``runtime_info`` and
``resumed_keys``.
| def state_dict(self) -> dict:
"""Returns a dictionary containing log scalars, runtime information and
resumed keys, which should be resumed.
The returned ``state_dict`` can be loaded by :meth:`load_state_dict`.
Returns:
dict: A dictionary contains ``log_scalars``, ``runtime_info`` and
``resumed_keys``.
"""
saved_scalars = OrderedDict()
saved_info = OrderedDict()
for key, value in self._log_scalars.items():
if self._resumed_keys.get(key, False):
saved_scalars[key] = copy.deepcopy(value)
for key, value in self._runtime_info.items():
if self._resumed_keys.get(key, False):
try:
saved_info[key] = copy.deepcopy(value)
except: # noqa: E722
print_log(
f'{key} in message_hub cannot be copied, '
f'just return its reference. ',
logger='current',
level=logging.WARNING)
saved_info[key] = value
return dict(
log_scalars=saved_scalars,
runtime_info=saved_info,
resumed_keys=self._resumed_keys)
| (self) -> dict |
730,204 | mmengine.logging.message_hub | update_info | Update runtime information.
The key corresponding runtime information will be overwritten each
time calling ``update_info``.
Note:
The ``resumed`` argument needs to be consistent for the same
``key``.
Examples:
>>> message_hub = MessageHub(name='name')
>>> message_hub.update_info('iter', 100)
Args:
key (str): Key of runtime information.
value (Any): Value of runtime information.
resumed (bool): Whether the corresponding ``HistoryBuffer``
could be resumed.
| def update_info(self, key: str, value: Any, resumed: bool = True) -> None:
"""Update runtime information.
The key corresponding runtime information will be overwritten each
time calling ``update_info``.
Note:
The ``resumed`` argument needs to be consistent for the same
``key``.
Examples:
>>> message_hub = MessageHub(name='name')
>>> message_hub.update_info('iter', 100)
Args:
key (str): Key of runtime information.
value (Any): Value of runtime information.
resumed (bool): Whether the corresponding ``HistoryBuffer``
could be resumed.
"""
self._set_resumed_keys(key, resumed)
self._runtime_info[key] = value
| (self, key: str, value: Any, resumed: bool = True) -> NoneType |
730,205 | mmengine.logging.message_hub | update_info_dict | Update runtime information with dictionary.
The key corresponding runtime information will be overwritten each
time calling ``update_info``.
Note:
The ``resumed`` argument needs to be consistent for the same
``info_dict``.
Examples:
>>> message_hub = MessageHub(name='name')
>>> message_hub.update_info({'iter': 100})
Args:
info_dict (str): Runtime information dictionary.
resumed (bool): Whether the corresponding ``HistoryBuffer``
could be resumed.
| def update_info_dict(self, info_dict: dict, resumed: bool = True) -> None:
"""Update runtime information with dictionary.
The key corresponding runtime information will be overwritten each
time calling ``update_info``.
Note:
The ``resumed`` argument needs to be consistent for the same
``info_dict``.
Examples:
>>> message_hub = MessageHub(name='name')
>>> message_hub.update_info({'iter': 100})
Args:
info_dict (str): Runtime information dictionary.
resumed (bool): Whether the corresponding ``HistoryBuffer``
could be resumed.
"""
assert isinstance(info_dict, dict), ('`log_dict` must be a dict!, '
f'but got {type(info_dict)}')
for key, value in info_dict.items():
self.update_info(key, value, resumed=resumed)
| (self, info_dict: dict, resumed: bool = True) -> NoneType |
730,206 | mmengine.logging.message_hub | update_scalar | Update :attr:_log_scalars.
Update ``HistoryBuffer`` in :attr:`_log_scalars`. If corresponding key
``HistoryBuffer`` has been created, ``value`` and ``count`` is the
argument of ``HistoryBuffer.update``, Otherwise, ``update_scalar``
will create an ``HistoryBuffer`` with value and count via the
constructor of ``HistoryBuffer``.
Examples:
>>> message_hub = MessageHub(name='name')
>>> # create loss `HistoryBuffer` with value=1, count=1
>>> message_hub.update_scalar('loss', 1)
>>> # update loss `HistoryBuffer` with value
>>> message_hub.update_scalar('loss', 3)
>>> message_hub.update_scalar('loss', 3, resumed=False)
AssertionError: loss used to be true, but got false now. resumed
keys cannot be modified repeatedly'
Note:
The ``resumed`` argument needs to be consistent for the same
``key``.
Args:
key (str): Key of ``HistoryBuffer``.
value (torch.Tensor or np.ndarray or int or float): Value of log.
count (torch.Tensor or np.ndarray or int or float): Accumulation
times of log, defaults to 1. `count` will be used in smooth
statistics.
resumed (str): Whether the corresponding ``HistoryBuffer``
could be resumed. Defaults to True.
| def update_scalar(self,
key: str,
value: Union[int, float, np.ndarray, 'torch.Tensor'],
count: int = 1,
resumed: bool = True) -> None:
"""Update :attr:_log_scalars.
Update ``HistoryBuffer`` in :attr:`_log_scalars`. If corresponding key
``HistoryBuffer`` has been created, ``value`` and ``count`` is the
argument of ``HistoryBuffer.update``, Otherwise, ``update_scalar``
will create an ``HistoryBuffer`` with value and count via the
constructor of ``HistoryBuffer``.
Examples:
>>> message_hub = MessageHub(name='name')
>>> # create loss `HistoryBuffer` with value=1, count=1
>>> message_hub.update_scalar('loss', 1)
>>> # update loss `HistoryBuffer` with value
>>> message_hub.update_scalar('loss', 3)
>>> message_hub.update_scalar('loss', 3, resumed=False)
AssertionError: loss used to be true, but got false now. resumed
keys cannot be modified repeatedly'
Note:
The ``resumed`` argument needs to be consistent for the same
``key``.
Args:
key (str): Key of ``HistoryBuffer``.
value (torch.Tensor or np.ndarray or int or float): Value of log.
count (torch.Tensor or np.ndarray or int or float): Accumulation
times of log, defaults to 1. `count` will be used in smooth
statistics.
resumed (str): Whether the corresponding ``HistoryBuffer``
could be resumed. Defaults to True.
"""
self._set_resumed_keys(key, resumed)
checked_value = self._get_valid_value(value)
assert isinstance(count, int), (
f'The type of count must be int. but got {type(count): {count}}')
if key in self._log_scalars:
self._log_scalars[key].update(checked_value, count)
else:
self._log_scalars[key] = HistoryBuffer([checked_value], [count])
| (self, key: str, value: Union[int, float, numpy.ndarray, ForwardRef('torch.Tensor')], count: int = 1, resumed: bool = True) -> None |
730,207 | mmengine.logging.message_hub | update_scalars | Update :attr:`_log_scalars` with a dict.
``update_scalars`` iterates through each pair of log_dict key-value,
and calls ``update_scalar``. If type of value is dict, the value should
be ``dict(value=xxx) or dict(value=xxx, count=xxx)``. Item in
``log_dict`` has the same resume option.
Note:
The ``resumed`` argument needs to be consistent for the same
``log_dict``.
Args:
log_dict (str): Used for batch updating :attr:`_log_scalars`.
resumed (bool): Whether all ``HistoryBuffer`` referred in
log_dict should be resumed. Defaults to True.
Examples:
>>> message_hub = MessageHub.get_instance('mmengine')
>>> log_dict = dict(a=1, b=2, c=3)
>>> message_hub.update_scalars(log_dict)
>>> # The default count of `a`, `b` and `c` is 1.
>>> log_dict = dict(a=1, b=2, c=dict(value=1, count=2))
>>> message_hub.update_scalars(log_dict)
>>> # The count of `c` is 2.
| def update_scalars(self, log_dict: dict, resumed: bool = True) -> None:
"""Update :attr:`_log_scalars` with a dict.
``update_scalars`` iterates through each pair of log_dict key-value,
and calls ``update_scalar``. If type of value is dict, the value should
be ``dict(value=xxx) or dict(value=xxx, count=xxx)``. Item in
``log_dict`` has the same resume option.
Note:
The ``resumed`` argument needs to be consistent for the same
``log_dict``.
Args:
log_dict (str): Used for batch updating :attr:`_log_scalars`.
resumed (bool): Whether all ``HistoryBuffer`` referred in
log_dict should be resumed. Defaults to True.
Examples:
>>> message_hub = MessageHub.get_instance('mmengine')
>>> log_dict = dict(a=1, b=2, c=3)
>>> message_hub.update_scalars(log_dict)
>>> # The default count of `a`, `b` and `c` is 1.
>>> log_dict = dict(a=1, b=2, c=dict(value=1, count=2))
>>> message_hub.update_scalars(log_dict)
>>> # The count of `c` is 2.
"""
assert isinstance(log_dict, dict), ('`log_dict` must be a dict!, '
f'but got {type(log_dict)}')
for log_name, log_val in log_dict.items():
if isinstance(log_val, dict):
assert 'value' in log_val, \
f'value must be defined in {log_val}'
count = self._get_valid_value(log_val.get('count', 1))
value = log_val['value']
else:
count = 1
value = log_val
assert isinstance(count,
int), ('The type of count must be int. but got '
f'{type(count): {count}}')
self.update_scalar(log_name, value, count, resumed)
| (self, log_dict: dict, resumed: bool = True) -> NoneType |
730,208 | mmengine.fileio.backends.petrel_backend | PetrelBackend | Petrel storage backend (for internal usage).
PetrelBackend supports reading and writing data to multiple clusters.
If the file path contains the cluster name, PetrelBackend will read data
from specified cluster or write data to it. Otherwise, PetrelBackend will
access the default cluster.
Args:
path_mapping (dict, optional): Path mapping dict from local path to
Petrel path. When ``path_mapping={'src': 'dst'}``, ``src`` in
``filepath`` will be replaced by ``dst``. Defaults to None.
enable_mc (bool, optional): Whether to enable memcached support.
Defaults to True.
conf_path (str, optional): Config path of Petrel client. Default: None.
`New in version 0.3.3`.
Examples:
>>> backend = PetrelBackend()
>>> filepath1 = 'petrel://path/of/file'
>>> filepath2 = 'cluster-name:petrel://path/of/file'
>>> backend.get(filepath1) # get data from default cluster
>>> client.get(filepath2) # get data from 'cluster-name' cluster
| class PetrelBackend(BaseStorageBackend):
"""Petrel storage backend (for internal usage).
PetrelBackend supports reading and writing data to multiple clusters.
If the file path contains the cluster name, PetrelBackend will read data
from specified cluster or write data to it. Otherwise, PetrelBackend will
access the default cluster.
Args:
path_mapping (dict, optional): Path mapping dict from local path to
Petrel path. When ``path_mapping={'src': 'dst'}``, ``src`` in
``filepath`` will be replaced by ``dst``. Defaults to None.
enable_mc (bool, optional): Whether to enable memcached support.
Defaults to True.
conf_path (str, optional): Config path of Petrel client. Default: None.
`New in version 0.3.3`.
Examples:
>>> backend = PetrelBackend()
>>> filepath1 = 'petrel://path/of/file'
>>> filepath2 = 'cluster-name:petrel://path/of/file'
>>> backend.get(filepath1) # get data from default cluster
>>> client.get(filepath2) # get data from 'cluster-name' cluster
"""
def __init__(self,
path_mapping: Optional[dict] = None,
enable_mc: bool = True,
conf_path: Optional[str] = None):
try:
from petrel_client import client
except ImportError:
raise ImportError('Please install petrel_client to enable '
'PetrelBackend.')
self._client = client.Client(conf_path=conf_path, enable_mc=enable_mc)
assert isinstance(path_mapping, dict) or path_mapping is None
self.path_mapping = path_mapping
def _map_path(self, filepath: Union[str, Path]) -> str:
"""Map ``filepath`` to a string path whose prefix will be replaced by
:attr:`self.path_mapping`.
Args:
filepath (str or Path): Path to be mapped.
"""
filepath = str(filepath)
if self.path_mapping is not None:
for k, v in self.path_mapping.items():
filepath = filepath.replace(k, v, 1)
return filepath
def _format_path(self, filepath: str) -> str:
"""Convert a ``filepath`` to standard format of petrel oss.
If the ``filepath`` is concatenated by ``os.path.join``, in a Windows
environment, the ``filepath`` will be the format of
's3://bucket_name\\image.jpg'. By invoking :meth:`_format_path`, the
above ``filepath`` will be converted to 's3://bucket_name/image.jpg'.
Args:
filepath (str): Path to be formatted.
"""
return re.sub(r'\\+', '/', filepath)
def _replace_prefix(self, filepath: Union[str, Path]) -> str:
filepath = str(filepath)
return filepath.replace('petrel://', 's3://')
def get(self, filepath: Union[str, Path]) -> bytes:
"""Read bytes from a given ``filepath`` with 'rb' mode.
Args:
filepath (str or Path): Path to read data.
Returns:
bytes: Return bytes read from filepath.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.get(filepath)
b'hello world'
"""
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
value = self._client.Get(filepath)
return value
def get_text(
self,
filepath: Union[str, Path],
encoding: str = 'utf-8',
) -> str:
"""Read text from a given ``filepath`` with 'r' mode.
Args:
filepath (str or Path): Path to read data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Returns:
str: Expected text reading from ``filepath``.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.get_text(filepath)
'hello world'
"""
return str(self.get(filepath), encoding=encoding)
def put(self, obj: bytes, filepath: Union[str, Path]) -> None:
"""Write bytes to a given ``filepath``.
Args:
obj (bytes): Data to be saved.
filepath (str or Path): Path to write data.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.put(b'hello world', filepath)
"""
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
self._client.put(filepath, obj)
def put_text(
self,
obj: str,
filepath: Union[str, Path],
encoding: str = 'utf-8',
) -> None:
"""Write text to a given ``filepath``.
Args:
obj (str): Data to be written.
filepath (str or Path): Path to write data.
encoding (str): The encoding format used to encode the ``obj``.
Defaults to 'utf-8'.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.put_text('hello world', filepath)
"""
self.put(bytes(obj, encoding=encoding), filepath)
def exists(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path exists.
Args:
filepath (str or Path): Path to be checked whether exists.
Returns:
bool: Return ``True`` if ``filepath`` exists, ``False`` otherwise.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.exists(filepath)
True
"""
if not (has_method(self._client, 'contains')
and has_method(self._client, 'isdir')):
raise NotImplementedError(
'Current version of Petrel Python SDK has not supported '
'the `contains` and `isdir` methods, please use a higher'
'version or dev branch instead.')
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
return self._client.contains(filepath) or self._client.isdir(filepath)
def isdir(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path is a directory.
Args:
filepath (str or Path): Path to be checked whether it is a
directory.
Returns:
bool: Return ``True`` if ``filepath`` points to a directory,
``False`` otherwise.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/dir'
>>> backend.isdir(filepath)
True
"""
if not has_method(self._client, 'isdir'):
raise NotImplementedError(
'Current version of Petrel Python SDK has not supported '
'the `isdir` method, please use a higher version or dev'
' branch instead.')
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
return self._client.isdir(filepath)
def isfile(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path is a file.
Args:
filepath (str or Path): Path to be checked whether it is a file.
Returns:
bool: Return ``True`` if ``filepath`` points to a file, ``False``
otherwise.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.isfile(filepath)
True
"""
if not has_method(self._client, 'contains'):
raise NotImplementedError(
'Current version of Petrel Python SDK has not supported '
'the `contains` method, please use a higher version or '
'dev branch instead.')
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
return self._client.contains(filepath)
def join_path(
self,
filepath: Union[str, Path],
*filepaths: Union[str, Path],
) -> str:
r"""Concatenate all file paths.
Join one or more filepath components intelligently. The return value
is the concatenation of filepath and any members of \*filepaths.
Args:
filepath (str or Path): Path to be concatenated.
Returns:
str: The result after concatenation.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.join_path(filepath, 'another/path')
'petrel://path/of/file/another/path'
>>> backend.join_path(filepath, '/another/path')
'petrel://path/of/file/another/path'
"""
filepath = self._format_path(self._map_path(filepath))
if filepath.endswith('/'):
filepath = filepath[:-1]
formatted_paths = [filepath]
for path in filepaths:
formatted_path = self._format_path(self._map_path(path))
formatted_paths.append(formatted_path.lstrip('/'))
return '/'.join(formatted_paths)
@contextmanager
def get_local_path(
self,
filepath: Union[str, Path],
) -> Generator[Union[str, Path], None, None]:
"""Download a file from ``filepath`` to a local temporary directory,
and return the temporary path.
``get_local_path`` is decorated by :meth:`contxtlib.contextmanager`. It
can be called with ``with`` statement, and when exists from the
``with`` statement, the temporary path will be released.
Args:
filepath (str or Path): Download a file from ``filepath``.
Yields:
Iterable[str]: Only yield one temporary path.
Examples:
>>> backend = PetrelBackend()
>>> # After existing from the ``with`` clause,
>>> # the path will be removed
>>> filepath = 'petrel://path/of/file'
>>> with backend.get_local_path(filepath) as path:
... # do something here
"""
assert self.isfile(filepath)
try:
f = tempfile.NamedTemporaryFile(delete=False)
f.write(self.get(filepath))
f.close()
yield f.name
finally:
os.remove(f.name)
def copyfile(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Copy a file src to dst and return the destination file.
src and dst should have the same prefix. If dst specifies a directory,
the file will be copied into dst using the base filename from src. If
dst specifies a file that already exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to dst.
Returns:
str: The destination file.
Raises:
SameFileError: If src and dst are the same file, a SameFileError
will be raised.
Examples:
>>> backend = PetrelBackend()
>>> # dst is a file
>>> src = 'petrel://path/of/file'
>>> dst = 'petrel://path/of/file1'
>>> backend.copyfile(src, dst)
'petrel://path/of/file1'
>>> # dst is a directory
>>> dst = 'petrel://path/of/dir'
>>> backend.copyfile(src, dst)
'petrel://path/of/dir/file'
"""
src = self._format_path(self._map_path(src))
dst = self._format_path(self._map_path(dst))
if self.isdir(dst):
dst = self.join_path(dst, src.split('/')[-1])
if src == dst:
raise SameFileError('src and dst should not be same')
self.put(self.get(src), dst)
return dst
def copytree(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory.
src and dst should have the same prefix.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: The destination directory.
Raises:
FileExistsError: If dst had already existed, a FileExistsError will
be raised.
Examples:
>>> backend = PetrelBackend()
>>> src = 'petrel://path/of/dir'
>>> dst = 'petrel://path/of/dir1'
>>> backend.copytree(src, dst)
'petrel://path/of/dir1'
"""
src = self._format_path(self._map_path(src))
dst = self._format_path(self._map_path(dst))
if self.exists(dst):
raise FileExistsError('dst should not exist')
for path in self.list_dir_or_file(src, list_dir=False, recursive=True):
src_path = self.join_path(src, path)
dst_path = self.join_path(dst, path)
self.put(self.get(src_path), dst_path)
return dst
def copyfile_from_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Upload a local file src to dst and return the destination file.
Args:
src (str or Path): A local file to be copied.
dst (str or Path): Copy file to dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Examples:
>>> backend = PetrelBackend()
>>> # dst is a file
>>> src = 'path/of/your/file'
>>> dst = 'petrel://path/of/file1'
>>> backend.copyfile_from_local(src, dst)
'petrel://path/of/file1'
>>> # dst is a directory
>>> dst = 'petrel://path/of/dir'
>>> backend.copyfile_from_local(src, dst)
'petrel://path/of/dir/file'
"""
dst = self._format_path(self._map_path(dst))
if self.isdir(dst):
dst = self.join_path(dst, osp.basename(src))
with open(src, 'rb') as f:
self.put(f.read(), dst)
return dst
def copytree_from_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory.
Args:
src (str or Path): A local directory to be copied.
dst (str or Path): Copy directory to dst.
Returns:
str: The destination directory.
Raises:
FileExistsError: If dst had already existed, a FileExistsError will
be raised.
Examples:
>>> backend = PetrelBackend()
>>> src = 'path/of/your/dir'
>>> dst = 'petrel://path/of/dir1'
>>> backend.copytree_from_local(src, dst)
'petrel://path/of/dir1'
"""
dst = self._format_path(self._map_path(dst))
if self.exists(dst):
raise FileExistsError('dst should not exist')
src = str(src)
for cur_dir, _, files in os.walk(src):
for f in files:
src_path = osp.join(cur_dir, f)
dst_path = self.join_path(dst, src_path.replace(src, ''))
self.copyfile_from_local(src_path, dst_path)
return dst
def copyfile_to_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> Union[str, Path]:
"""Copy the file src to local dst and return the destination file.
If dst specifies a directory, the file will be copied into dst using
the base filename from src. If dst specifies a file that already
exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to to local dst.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Examples:
>>> backend = PetrelBackend()
>>> # dst is a file
>>> src = 'petrel://path/of/file'
>>> dst = 'path/of/your/file'
>>> backend.copyfile_to_local(src, dst)
'path/of/your/file'
>>> # dst is a directory
>>> dst = 'path/of/your/dir'
>>> backend.copyfile_to_local(src, dst)
'path/of/your/dir/file'
"""
if osp.isdir(dst):
basename = osp.basename(src)
if isinstance(dst, str):
dst = osp.join(dst, basename)
else:
assert isinstance(dst, Path)
dst = dst / basename
with open(dst, 'wb') as f:
f.write(self.get(src))
return dst
def copytree_to_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> Union[str, Path]:
"""Recursively copy an entire directory tree rooted at src to a local
directory named dst and return the destination directory.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to local dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: The destination directory.
Examples:
>>> backend = PetrelBackend()
>>> src = 'petrel://path/of/dir'
>>> dst = 'path/of/your/dir'
>>> backend.copytree_to_local(src, dst)
'path/of/your/dir'
"""
for path in self.list_dir_or_file(src, list_dir=False, recursive=True):
dst_path = osp.join(dst, path)
mmengine.mkdir_or_exist(osp.dirname(dst_path))
with open(dst_path, 'wb') as f:
f.write(self.get(self.join_path(src, path)))
return dst
def remove(self, filepath: Union[str, Path]) -> None:
"""Remove a file.
Args:
filepath (str or Path): Path to be removed.
Raises:
FileNotFoundError: If filepath does not exist, an FileNotFoundError
will be raised.
IsADirectoryError: If filepath is a directory, an IsADirectoryError
will be raised.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.remove(filepath)
"""
if not has_method(self._client, 'delete'):
raise NotImplementedError(
'Current version of Petrel Python SDK has not supported '
'the `delete` method, please use a higher version or dev '
'branch instead.')
if not self.exists(filepath):
raise FileNotFoundError(f'filepath {filepath} does not exist')
if self.isdir(filepath):
raise IsADirectoryError('filepath should be a file')
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
self._client.delete(filepath)
def rmtree(self, dir_path: Union[str, Path]) -> None:
"""Recursively delete a directory tree.
Args:
dir_path (str or Path): A directory to be removed.
Examples:
>>> backend = PetrelBackend()
>>> dir_path = 'petrel://path/of/dir'
>>> backend.rmtree(dir_path)
"""
for path in self.list_dir_or_file(
dir_path, list_dir=False, recursive=True):
filepath = self.join_path(dir_path, path)
self.remove(filepath)
def copy_if_symlink_fails(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> bool:
"""Create a symbolic link pointing to src named dst.
Directly copy src to dst because PetrelBacekend does not support create
a symbolic link.
Args:
src (str or Path): A file or directory to be copied.
dst (str or Path): Copy a file or directory to dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
bool: Return False because PetrelBackend does not support create
a symbolic link.
Examples:
>>> backend = PetrelBackend()
>>> src = 'petrel://path/of/file'
>>> dst = 'petrel://path/of/your/file'
>>> backend.copy_if_symlink_fails(src, dst)
False
>>> src = 'petrel://path/of/dir'
>>> dst = 'petrel://path/of/your/dir'
>>> backend.copy_if_symlink_fails(src, dst)
False
"""
if self.isfile(src):
self.copyfile(src, dst)
else:
self.copytree(src, dst)
return False
def list_dir_or_file(self,
dir_path: Union[str, Path],
list_dir: bool = True,
list_file: bool = True,
suffix: Optional[Union[str, Tuple[str]]] = None,
recursive: bool = False) -> Iterator[str]:
"""Scan a directory to find the interested directories or files in
arbitrary order.
Note:
Petrel has no concept of directories but it simulates the directory
hierarchy in the filesystem through public prefixes. In addition,
if the returned path ends with '/', it means the path is a public
prefix which is a logical directory.
Note:
:meth:`list_dir_or_file` returns the path relative to ``dir_path``.
In addition, the returned path of directory will not contains the
suffix '/' which is consistent with other backends.
Args:
dir_path (str | Path): Path of the directory.
list_dir (bool): List the directories. Defaults to True.
list_file (bool): List the path of files. Defaults to True.
suffix (str or tuple[str], optional): File suffix
that we are interested in. Defaults to None.
recursive (bool): If set to True, recursively scan the
directory. Defaults to False.
Yields:
Iterable[str]: A relative path to ``dir_path``.
Examples:
>>> backend = PetrelBackend()
>>> dir_path = 'petrel://path/of/dir'
>>> # list those files and directories in current directory
>>> for file_path in backend.list_dir_or_file(dir_path):
... print(file_path)
>>> # only list files
>>> for file_path in backend.list_dir_or_file(dir_path, list_dir=False):
... print(file_path)
>>> # only list directories
>>> for file_path in backend.list_dir_or_file(dir_path, list_file=False):
... print(file_path)
>>> # only list files ending with specified suffixes
>>> for file_path in backend.list_dir_or_file(dir_path, suffix='.txt'):
... print(file_path)
>>> # list all files and directory recursively
>>> for file_path in backend.list_dir_or_file(dir_path, recursive=True):
... print(file_path)
""" # noqa: E501
if not has_method(self._client, 'list'):
raise NotImplementedError(
'Current version of Petrel Python SDK has not supported '
'the `list` method, please use a higher version or dev'
' branch instead.')
dir_path = self._map_path(dir_path)
dir_path = self._format_path(dir_path)
dir_path = self._replace_prefix(dir_path)
if list_dir and suffix is not None:
raise TypeError(
'`list_dir` should be False when `suffix` is not None')
if (suffix is not None) and not isinstance(suffix, (str, tuple)):
raise TypeError('`suffix` must be a string or tuple of strings')
# Petrel's simulated directory hierarchy assumes that directory paths
# should end with `/`
if not dir_path.endswith('/'):
dir_path += '/'
root = dir_path
def _list_dir_or_file(dir_path, list_dir, list_file, suffix,
recursive):
for path in self._client.list(dir_path):
# the `self.isdir` is not used here to determine whether path
# is a directory, because `self.isdir` relies on
# `self._client.list`
if path.endswith('/'): # a directory path
next_dir_path = self.join_path(dir_path, path)
if list_dir:
# get the relative path and exclude the last
# character '/'
rel_dir = next_dir_path[len(root):-1]
yield rel_dir
if recursive:
yield from _list_dir_or_file(next_dir_path, list_dir,
list_file, suffix,
recursive)
else: # a file path
absolute_path = self.join_path(dir_path, path)
rel_path = absolute_path[len(root):]
if (suffix is None
or rel_path.endswith(suffix)) and list_file:
yield rel_path
return _list_dir_or_file(dir_path, list_dir, list_file, suffix,
recursive)
def generate_presigned_url(self,
url: str,
client_method: str = 'get_object',
expires_in: int = 3600) -> str:
"""Generate the presigned url of video stream which can be passed to
mmcv.VideoReader. Now only work on Petrel backend.
Note:
Now only work on Petrel backend.
Args:
url (str): Url of video stream.
client_method (str): Method of client, 'get_object' or
'put_object'. Default: 'get_object'.
expires_in (int): expires, in seconds. Default: 3600.
Returns:
str: Generated presigned url.
"""
return self._client.generate_presigned_url(url, client_method,
expires_in)
| (path_mapping: Optional[dict] = None, enable_mc: bool = True, conf_path: Optional[str] = None) |
730,209 | mmengine.fileio.backends.petrel_backend | __init__ | null | def __init__(self,
path_mapping: Optional[dict] = None,
enable_mc: bool = True,
conf_path: Optional[str] = None):
try:
from petrel_client import client
except ImportError:
raise ImportError('Please install petrel_client to enable '
'PetrelBackend.')
self._client = client.Client(conf_path=conf_path, enable_mc=enable_mc)
assert isinstance(path_mapping, dict) or path_mapping is None
self.path_mapping = path_mapping
| (self, path_mapping: Optional[dict] = None, enable_mc: bool = True, conf_path: Optional[str] = None) |
730,210 | mmengine.fileio.backends.petrel_backend | _format_path | Convert a ``filepath`` to standard format of petrel oss.
If the ``filepath`` is concatenated by ``os.path.join``, in a Windows
environment, the ``filepath`` will be the format of
's3://bucket_name\image.jpg'. By invoking :meth:`_format_path`, the
above ``filepath`` will be converted to 's3://bucket_name/image.jpg'.
Args:
filepath (str): Path to be formatted.
| def _format_path(self, filepath: str) -> str:
"""Convert a ``filepath`` to standard format of petrel oss.
If the ``filepath`` is concatenated by ``os.path.join``, in a Windows
environment, the ``filepath`` will be the format of
's3://bucket_name\\image.jpg'. By invoking :meth:`_format_path`, the
above ``filepath`` will be converted to 's3://bucket_name/image.jpg'.
Args:
filepath (str): Path to be formatted.
"""
return re.sub(r'\\+', '/', filepath)
| (self, filepath: str) -> str |
730,211 | mmengine.fileio.backends.petrel_backend | _map_path | Map ``filepath`` to a string path whose prefix will be replaced by
:attr:`self.path_mapping`.
Args:
filepath (str or Path): Path to be mapped.
| def _map_path(self, filepath: Union[str, Path]) -> str:
"""Map ``filepath`` to a string path whose prefix will be replaced by
:attr:`self.path_mapping`.
Args:
filepath (str or Path): Path to be mapped.
"""
filepath = str(filepath)
if self.path_mapping is not None:
for k, v in self.path_mapping.items():
filepath = filepath.replace(k, v, 1)
return filepath
| (self, filepath: Union[str, pathlib.Path]) -> str |
730,212 | mmengine.fileio.backends.petrel_backend | _replace_prefix | null | def _replace_prefix(self, filepath: Union[str, Path]) -> str:
filepath = str(filepath)
return filepath.replace('petrel://', 's3://')
| (self, filepath: Union[str, pathlib.Path]) -> str |
730,213 | mmengine.fileio.backends.petrel_backend | copy_if_symlink_fails | Create a symbolic link pointing to src named dst.
Directly copy src to dst because PetrelBacekend does not support create
a symbolic link.
Args:
src (str or Path): A file or directory to be copied.
dst (str or Path): Copy a file or directory to dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
bool: Return False because PetrelBackend does not support create
a symbolic link.
Examples:
>>> backend = PetrelBackend()
>>> src = 'petrel://path/of/file'
>>> dst = 'petrel://path/of/your/file'
>>> backend.copy_if_symlink_fails(src, dst)
False
>>> src = 'petrel://path/of/dir'
>>> dst = 'petrel://path/of/your/dir'
>>> backend.copy_if_symlink_fails(src, dst)
False
| def copy_if_symlink_fails(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> bool:
"""Create a symbolic link pointing to src named dst.
Directly copy src to dst because PetrelBacekend does not support create
a symbolic link.
Args:
src (str or Path): A file or directory to be copied.
dst (str or Path): Copy a file or directory to dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
bool: Return False because PetrelBackend does not support create
a symbolic link.
Examples:
>>> backend = PetrelBackend()
>>> src = 'petrel://path/of/file'
>>> dst = 'petrel://path/of/your/file'
>>> backend.copy_if_symlink_fails(src, dst)
False
>>> src = 'petrel://path/of/dir'
>>> dst = 'petrel://path/of/your/dir'
>>> backend.copy_if_symlink_fails(src, dst)
False
"""
if self.isfile(src):
self.copyfile(src, dst)
else:
self.copytree(src, dst)
return False
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> bool |
730,214 | mmengine.fileio.backends.petrel_backend | copyfile | Copy a file src to dst and return the destination file.
src and dst should have the same prefix. If dst specifies a directory,
the file will be copied into dst using the base filename from src. If
dst specifies a file that already exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to dst.
Returns:
str: The destination file.
Raises:
SameFileError: If src and dst are the same file, a SameFileError
will be raised.
Examples:
>>> backend = PetrelBackend()
>>> # dst is a file
>>> src = 'petrel://path/of/file'
>>> dst = 'petrel://path/of/file1'
>>> backend.copyfile(src, dst)
'petrel://path/of/file1'
>>> # dst is a directory
>>> dst = 'petrel://path/of/dir'
>>> backend.copyfile(src, dst)
'petrel://path/of/dir/file'
| def copyfile(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Copy a file src to dst and return the destination file.
src and dst should have the same prefix. If dst specifies a directory,
the file will be copied into dst using the base filename from src. If
dst specifies a file that already exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to dst.
Returns:
str: The destination file.
Raises:
SameFileError: If src and dst are the same file, a SameFileError
will be raised.
Examples:
>>> backend = PetrelBackend()
>>> # dst is a file
>>> src = 'petrel://path/of/file'
>>> dst = 'petrel://path/of/file1'
>>> backend.copyfile(src, dst)
'petrel://path/of/file1'
>>> # dst is a directory
>>> dst = 'petrel://path/of/dir'
>>> backend.copyfile(src, dst)
'petrel://path/of/dir/file'
"""
src = self._format_path(self._map_path(src))
dst = self._format_path(self._map_path(dst))
if self.isdir(dst):
dst = self.join_path(dst, src.split('/')[-1])
if src == dst:
raise SameFileError('src and dst should not be same')
self.put(self.get(src), dst)
return dst
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> str |
730,215 | mmengine.fileio.backends.petrel_backend | copyfile_from_local | Upload a local file src to dst and return the destination file.
Args:
src (str or Path): A local file to be copied.
dst (str or Path): Copy file to dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Examples:
>>> backend = PetrelBackend()
>>> # dst is a file
>>> src = 'path/of/your/file'
>>> dst = 'petrel://path/of/file1'
>>> backend.copyfile_from_local(src, dst)
'petrel://path/of/file1'
>>> # dst is a directory
>>> dst = 'petrel://path/of/dir'
>>> backend.copyfile_from_local(src, dst)
'petrel://path/of/dir/file'
| def copyfile_from_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Upload a local file src to dst and return the destination file.
Args:
src (str or Path): A local file to be copied.
dst (str or Path): Copy file to dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Examples:
>>> backend = PetrelBackend()
>>> # dst is a file
>>> src = 'path/of/your/file'
>>> dst = 'petrel://path/of/file1'
>>> backend.copyfile_from_local(src, dst)
'petrel://path/of/file1'
>>> # dst is a directory
>>> dst = 'petrel://path/of/dir'
>>> backend.copyfile_from_local(src, dst)
'petrel://path/of/dir/file'
"""
dst = self._format_path(self._map_path(dst))
if self.isdir(dst):
dst = self.join_path(dst, osp.basename(src))
with open(src, 'rb') as f:
self.put(f.read(), dst)
return dst
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> str |
730,216 | mmengine.fileio.backends.petrel_backend | copyfile_to_local | Copy the file src to local dst and return the destination file.
If dst specifies a directory, the file will be copied into dst using
the base filename from src. If dst specifies a file that already
exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to to local dst.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Examples:
>>> backend = PetrelBackend()
>>> # dst is a file
>>> src = 'petrel://path/of/file'
>>> dst = 'path/of/your/file'
>>> backend.copyfile_to_local(src, dst)
'path/of/your/file'
>>> # dst is a directory
>>> dst = 'path/of/your/dir'
>>> backend.copyfile_to_local(src, dst)
'path/of/your/dir/file'
| def copyfile_to_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> Union[str, Path]:
"""Copy the file src to local dst and return the destination file.
If dst specifies a directory, the file will be copied into dst using
the base filename from src. If dst specifies a file that already
exists, it will be replaced.
Args:
src (str or Path): A file to be copied.
dst (str or Path): Copy file to to local dst.
Returns:
str: If dst specifies a directory, the file will be copied into dst
using the base filename from src.
Examples:
>>> backend = PetrelBackend()
>>> # dst is a file
>>> src = 'petrel://path/of/file'
>>> dst = 'path/of/your/file'
>>> backend.copyfile_to_local(src, dst)
'path/of/your/file'
>>> # dst is a directory
>>> dst = 'path/of/your/dir'
>>> backend.copyfile_to_local(src, dst)
'path/of/your/dir/file'
"""
if osp.isdir(dst):
basename = osp.basename(src)
if isinstance(dst, str):
dst = osp.join(dst, basename)
else:
assert isinstance(dst, Path)
dst = dst / basename
with open(dst, 'wb') as f:
f.write(self.get(src))
return dst
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> Union[str, pathlib.Path] |
730,217 | mmengine.fileio.backends.petrel_backend | copytree | Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory.
src and dst should have the same prefix.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: The destination directory.
Raises:
FileExistsError: If dst had already existed, a FileExistsError will
be raised.
Examples:
>>> backend = PetrelBackend()
>>> src = 'petrel://path/of/dir'
>>> dst = 'petrel://path/of/dir1'
>>> backend.copytree(src, dst)
'petrel://path/of/dir1'
| def copytree(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory.
src and dst should have the same prefix.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: The destination directory.
Raises:
FileExistsError: If dst had already existed, a FileExistsError will
be raised.
Examples:
>>> backend = PetrelBackend()
>>> src = 'petrel://path/of/dir'
>>> dst = 'petrel://path/of/dir1'
>>> backend.copytree(src, dst)
'petrel://path/of/dir1'
"""
src = self._format_path(self._map_path(src))
dst = self._format_path(self._map_path(dst))
if self.exists(dst):
raise FileExistsError('dst should not exist')
for path in self.list_dir_or_file(src, list_dir=False, recursive=True):
src_path = self.join_path(src, path)
dst_path = self.join_path(dst, path)
self.put(self.get(src_path), dst_path)
return dst
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> str |
730,218 | mmengine.fileio.backends.petrel_backend | copytree_from_local | Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory.
Args:
src (str or Path): A local directory to be copied.
dst (str or Path): Copy directory to dst.
Returns:
str: The destination directory.
Raises:
FileExistsError: If dst had already existed, a FileExistsError will
be raised.
Examples:
>>> backend = PetrelBackend()
>>> src = 'path/of/your/dir'
>>> dst = 'petrel://path/of/dir1'
>>> backend.copytree_from_local(src, dst)
'petrel://path/of/dir1'
| def copytree_from_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> str:
"""Recursively copy an entire directory tree rooted at src to a
directory named dst and return the destination directory.
Args:
src (str or Path): A local directory to be copied.
dst (str or Path): Copy directory to dst.
Returns:
str: The destination directory.
Raises:
FileExistsError: If dst had already existed, a FileExistsError will
be raised.
Examples:
>>> backend = PetrelBackend()
>>> src = 'path/of/your/dir'
>>> dst = 'petrel://path/of/dir1'
>>> backend.copytree_from_local(src, dst)
'petrel://path/of/dir1'
"""
dst = self._format_path(self._map_path(dst))
if self.exists(dst):
raise FileExistsError('dst should not exist')
src = str(src)
for cur_dir, _, files in os.walk(src):
for f in files:
src_path = osp.join(cur_dir, f)
dst_path = self.join_path(dst, src_path.replace(src, ''))
self.copyfile_from_local(src_path, dst_path)
return dst
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> str |
730,219 | mmengine.fileio.backends.petrel_backend | copytree_to_local | Recursively copy an entire directory tree rooted at src to a local
directory named dst and return the destination directory.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to local dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: The destination directory.
Examples:
>>> backend = PetrelBackend()
>>> src = 'petrel://path/of/dir'
>>> dst = 'path/of/your/dir'
>>> backend.copytree_to_local(src, dst)
'path/of/your/dir'
| def copytree_to_local(
self,
src: Union[str, Path],
dst: Union[str, Path],
) -> Union[str, Path]:
"""Recursively copy an entire directory tree rooted at src to a local
directory named dst and return the destination directory.
Args:
src (str or Path): A directory to be copied.
dst (str or Path): Copy directory to local dst.
backend_args (dict, optional): Arguments to instantiate the
prefix of uri corresponding backend. Defaults to None.
Returns:
str: The destination directory.
Examples:
>>> backend = PetrelBackend()
>>> src = 'petrel://path/of/dir'
>>> dst = 'path/of/your/dir'
>>> backend.copytree_to_local(src, dst)
'path/of/your/dir'
"""
for path in self.list_dir_or_file(src, list_dir=False, recursive=True):
dst_path = osp.join(dst, path)
mmengine.mkdir_or_exist(osp.dirname(dst_path))
with open(dst_path, 'wb') as f:
f.write(self.get(self.join_path(src, path)))
return dst
| (self, src: Union[str, pathlib.Path], dst: Union[str, pathlib.Path]) -> Union[str, pathlib.Path] |
730,220 | mmengine.fileio.backends.petrel_backend | exists | Check whether a file path exists.
Args:
filepath (str or Path): Path to be checked whether exists.
Returns:
bool: Return ``True`` if ``filepath`` exists, ``False`` otherwise.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.exists(filepath)
True
| def exists(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path exists.
Args:
filepath (str or Path): Path to be checked whether exists.
Returns:
bool: Return ``True`` if ``filepath`` exists, ``False`` otherwise.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.exists(filepath)
True
"""
if not (has_method(self._client, 'contains')
and has_method(self._client, 'isdir')):
raise NotImplementedError(
'Current version of Petrel Python SDK has not supported '
'the `contains` and `isdir` methods, please use a higher'
'version or dev branch instead.')
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
return self._client.contains(filepath) or self._client.isdir(filepath)
| (self, filepath: Union[str, pathlib.Path]) -> bool |
730,221 | mmengine.fileio.backends.petrel_backend | generate_presigned_url | Generate the presigned url of video stream which can be passed to
mmcv.VideoReader. Now only work on Petrel backend.
Note:
Now only work on Petrel backend.
Args:
url (str): Url of video stream.
client_method (str): Method of client, 'get_object' or
'put_object'. Default: 'get_object'.
expires_in (int): expires, in seconds. Default: 3600.
Returns:
str: Generated presigned url.
| def generate_presigned_url(self,
url: str,
client_method: str = 'get_object',
expires_in: int = 3600) -> str:
"""Generate the presigned url of video stream which can be passed to
mmcv.VideoReader. Now only work on Petrel backend.
Note:
Now only work on Petrel backend.
Args:
url (str): Url of video stream.
client_method (str): Method of client, 'get_object' or
'put_object'. Default: 'get_object'.
expires_in (int): expires, in seconds. Default: 3600.
Returns:
str: Generated presigned url.
"""
return self._client.generate_presigned_url(url, client_method,
expires_in)
| (self, url: str, client_method: str = 'get_object', expires_in: int = 3600) -> str |
730,222 | mmengine.fileio.backends.petrel_backend | get | Read bytes from a given ``filepath`` with 'rb' mode.
Args:
filepath (str or Path): Path to read data.
Returns:
bytes: Return bytes read from filepath.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.get(filepath)
b'hello world'
| def get(self, filepath: Union[str, Path]) -> bytes:
"""Read bytes from a given ``filepath`` with 'rb' mode.
Args:
filepath (str or Path): Path to read data.
Returns:
bytes: Return bytes read from filepath.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.get(filepath)
b'hello world'
"""
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
value = self._client.Get(filepath)
return value
| (self, filepath: Union[str, pathlib.Path]) -> bytes |
730,223 | mmengine.fileio.backends.petrel_backend | get_local_path | Download a file from ``filepath`` to a local temporary directory,
and return the temporary path.
``get_local_path`` is decorated by :meth:`contxtlib.contextmanager`. It
can be called with ``with`` statement, and when exists from the
``with`` statement, the temporary path will be released.
Args:
filepath (str or Path): Download a file from ``filepath``.
Yields:
Iterable[str]: Only yield one temporary path.
Examples:
>>> backend = PetrelBackend()
>>> # After existing from the ``with`` clause,
>>> # the path will be removed
>>> filepath = 'petrel://path/of/file'
>>> with backend.get_local_path(filepath) as path:
... # do something here
| def join_path(
self,
filepath: Union[str, Path],
*filepaths: Union[str, Path],
) -> str:
r"""Concatenate all file paths.
Join one or more filepath components intelligently. The return value
is the concatenation of filepath and any members of \*filepaths.
Args:
filepath (str or Path): Path to be concatenated.
Returns:
str: The result after concatenation.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.join_path(filepath, 'another/path')
'petrel://path/of/file/another/path'
>>> backend.join_path(filepath, '/another/path')
'petrel://path/of/file/another/path'
"""
filepath = self._format_path(self._map_path(filepath))
if filepath.endswith('/'):
filepath = filepath[:-1]
formatted_paths = [filepath]
for path in filepaths:
formatted_path = self._format_path(self._map_path(path))
formatted_paths.append(formatted_path.lstrip('/'))
return '/'.join(formatted_paths)
| (self, filepath: Union[str, pathlib.Path]) -> Generator[Union[str, pathlib.Path], NoneType, NoneType] |
730,224 | mmengine.fileio.backends.petrel_backend | get_text | Read text from a given ``filepath`` with 'r' mode.
Args:
filepath (str or Path): Path to read data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Returns:
str: Expected text reading from ``filepath``.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.get_text(filepath)
'hello world'
| def get_text(
self,
filepath: Union[str, Path],
encoding: str = 'utf-8',
) -> str:
"""Read text from a given ``filepath`` with 'r' mode.
Args:
filepath (str or Path): Path to read data.
encoding (str): The encoding format used to open the ``filepath``.
Defaults to 'utf-8'.
Returns:
str: Expected text reading from ``filepath``.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.get_text(filepath)
'hello world'
"""
return str(self.get(filepath), encoding=encoding)
| (self, filepath: Union[str, pathlib.Path], encoding: str = 'utf-8') -> str |
730,225 | mmengine.fileio.backends.petrel_backend | isdir | Check whether a file path is a directory.
Args:
filepath (str or Path): Path to be checked whether it is a
directory.
Returns:
bool: Return ``True`` if ``filepath`` points to a directory,
``False`` otherwise.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/dir'
>>> backend.isdir(filepath)
True
| def isdir(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path is a directory.
Args:
filepath (str or Path): Path to be checked whether it is a
directory.
Returns:
bool: Return ``True`` if ``filepath`` points to a directory,
``False`` otherwise.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/dir'
>>> backend.isdir(filepath)
True
"""
if not has_method(self._client, 'isdir'):
raise NotImplementedError(
'Current version of Petrel Python SDK has not supported '
'the `isdir` method, please use a higher version or dev'
' branch instead.')
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
return self._client.isdir(filepath)
| (self, filepath: Union[str, pathlib.Path]) -> bool |
730,226 | mmengine.fileio.backends.petrel_backend | isfile | Check whether a file path is a file.
Args:
filepath (str or Path): Path to be checked whether it is a file.
Returns:
bool: Return ``True`` if ``filepath`` points to a file, ``False``
otherwise.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.isfile(filepath)
True
| def isfile(self, filepath: Union[str, Path]) -> bool:
"""Check whether a file path is a file.
Args:
filepath (str or Path): Path to be checked whether it is a file.
Returns:
bool: Return ``True`` if ``filepath`` points to a file, ``False``
otherwise.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.isfile(filepath)
True
"""
if not has_method(self._client, 'contains'):
raise NotImplementedError(
'Current version of Petrel Python SDK has not supported '
'the `contains` method, please use a higher version or '
'dev branch instead.')
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
return self._client.contains(filepath)
| (self, filepath: Union[str, pathlib.Path]) -> bool |
730,227 | mmengine.fileio.backends.petrel_backend | join_path | Concatenate all file paths.
Join one or more filepath components intelligently. The return value
is the concatenation of filepath and any members of \*filepaths.
Args:
filepath (str or Path): Path to be concatenated.
Returns:
str: The result after concatenation.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.join_path(filepath, 'another/path')
'petrel://path/of/file/another/path'
>>> backend.join_path(filepath, '/another/path')
'petrel://path/of/file/another/path'
| def join_path(
self,
filepath: Union[str, Path],
*filepaths: Union[str, Path],
) -> str:
r"""Concatenate all file paths.
Join one or more filepath components intelligently. The return value
is the concatenation of filepath and any members of \*filepaths.
Args:
filepath (str or Path): Path to be concatenated.
Returns:
str: The result after concatenation.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.join_path(filepath, 'another/path')
'petrel://path/of/file/another/path'
>>> backend.join_path(filepath, '/another/path')
'petrel://path/of/file/another/path'
"""
filepath = self._format_path(self._map_path(filepath))
if filepath.endswith('/'):
filepath = filepath[:-1]
formatted_paths = [filepath]
for path in filepaths:
formatted_path = self._format_path(self._map_path(path))
formatted_paths.append(formatted_path.lstrip('/'))
return '/'.join(formatted_paths)
| (self, filepath: Union[str, pathlib.Path], *filepaths: Union[str, pathlib.Path]) -> str |
730,228 | mmengine.fileio.backends.petrel_backend | list_dir_or_file | Scan a directory to find the interested directories or files in
arbitrary order.
Note:
Petrel has no concept of directories but it simulates the directory
hierarchy in the filesystem through public prefixes. In addition,
if the returned path ends with '/', it means the path is a public
prefix which is a logical directory.
Note:
:meth:`list_dir_or_file` returns the path relative to ``dir_path``.
In addition, the returned path of directory will not contains the
suffix '/' which is consistent with other backends.
Args:
dir_path (str | Path): Path of the directory.
list_dir (bool): List the directories. Defaults to True.
list_file (bool): List the path of files. Defaults to True.
suffix (str or tuple[str], optional): File suffix
that we are interested in. Defaults to None.
recursive (bool): If set to True, recursively scan the
directory. Defaults to False.
Yields:
Iterable[str]: A relative path to ``dir_path``.
Examples:
>>> backend = PetrelBackend()
>>> dir_path = 'petrel://path/of/dir'
>>> # list those files and directories in current directory
>>> for file_path in backend.list_dir_or_file(dir_path):
... print(file_path)
>>> # only list files
>>> for file_path in backend.list_dir_or_file(dir_path, list_dir=False):
... print(file_path)
>>> # only list directories
>>> for file_path in backend.list_dir_or_file(dir_path, list_file=False):
... print(file_path)
>>> # only list files ending with specified suffixes
>>> for file_path in backend.list_dir_or_file(dir_path, suffix='.txt'):
... print(file_path)
>>> # list all files and directory recursively
>>> for file_path in backend.list_dir_or_file(dir_path, recursive=True):
... print(file_path)
| def list_dir_or_file(self,
dir_path: Union[str, Path],
list_dir: bool = True,
list_file: bool = True,
suffix: Optional[Union[str, Tuple[str]]] = None,
recursive: bool = False) -> Iterator[str]:
"""Scan a directory to find the interested directories or files in
arbitrary order.
Note:
Petrel has no concept of directories but it simulates the directory
hierarchy in the filesystem through public prefixes. In addition,
if the returned path ends with '/', it means the path is a public
prefix which is a logical directory.
Note:
:meth:`list_dir_or_file` returns the path relative to ``dir_path``.
In addition, the returned path of directory will not contains the
suffix '/' which is consistent with other backends.
Args:
dir_path (str | Path): Path of the directory.
list_dir (bool): List the directories. Defaults to True.
list_file (bool): List the path of files. Defaults to True.
suffix (str or tuple[str], optional): File suffix
that we are interested in. Defaults to None.
recursive (bool): If set to True, recursively scan the
directory. Defaults to False.
Yields:
Iterable[str]: A relative path to ``dir_path``.
Examples:
>>> backend = PetrelBackend()
>>> dir_path = 'petrel://path/of/dir'
>>> # list those files and directories in current directory
>>> for file_path in backend.list_dir_or_file(dir_path):
... print(file_path)
>>> # only list files
>>> for file_path in backend.list_dir_or_file(dir_path, list_dir=False):
... print(file_path)
>>> # only list directories
>>> for file_path in backend.list_dir_or_file(dir_path, list_file=False):
... print(file_path)
>>> # only list files ending with specified suffixes
>>> for file_path in backend.list_dir_or_file(dir_path, suffix='.txt'):
... print(file_path)
>>> # list all files and directory recursively
>>> for file_path in backend.list_dir_or_file(dir_path, recursive=True):
... print(file_path)
""" # noqa: E501
if not has_method(self._client, 'list'):
raise NotImplementedError(
'Current version of Petrel Python SDK has not supported '
'the `list` method, please use a higher version or dev'
' branch instead.')
dir_path = self._map_path(dir_path)
dir_path = self._format_path(dir_path)
dir_path = self._replace_prefix(dir_path)
if list_dir and suffix is not None:
raise TypeError(
'`list_dir` should be False when `suffix` is not None')
if (suffix is not None) and not isinstance(suffix, (str, tuple)):
raise TypeError('`suffix` must be a string or tuple of strings')
# Petrel's simulated directory hierarchy assumes that directory paths
# should end with `/`
if not dir_path.endswith('/'):
dir_path += '/'
root = dir_path
def _list_dir_or_file(dir_path, list_dir, list_file, suffix,
recursive):
for path in self._client.list(dir_path):
# the `self.isdir` is not used here to determine whether path
# is a directory, because `self.isdir` relies on
# `self._client.list`
if path.endswith('/'): # a directory path
next_dir_path = self.join_path(dir_path, path)
if list_dir:
# get the relative path and exclude the last
# character '/'
rel_dir = next_dir_path[len(root):-1]
yield rel_dir
if recursive:
yield from _list_dir_or_file(next_dir_path, list_dir,
list_file, suffix,
recursive)
else: # a file path
absolute_path = self.join_path(dir_path, path)
rel_path = absolute_path[len(root):]
if (suffix is None
or rel_path.endswith(suffix)) and list_file:
yield rel_path
return _list_dir_or_file(dir_path, list_dir, list_file, suffix,
recursive)
| (self, dir_path: Union[str, pathlib.Path], list_dir: bool = True, list_file: bool = True, suffix: Union[str, Tuple[str], NoneType] = None, recursive: bool = False) -> Iterator[str] |
730,229 | mmengine.fileio.backends.petrel_backend | put | Write bytes to a given ``filepath``.
Args:
obj (bytes): Data to be saved.
filepath (str or Path): Path to write data.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.put(b'hello world', filepath)
| def put(self, obj: bytes, filepath: Union[str, Path]) -> None:
"""Write bytes to a given ``filepath``.
Args:
obj (bytes): Data to be saved.
filepath (str or Path): Path to write data.
Examples:
>>> backend = PetrelBackend()
>>> filepath = 'petrel://path/of/file'
>>> backend.put(b'hello world', filepath)
"""
filepath = self._map_path(filepath)
filepath = self._format_path(filepath)
filepath = self._replace_prefix(filepath)
self._client.put(filepath, obj)
| (self, obj: bytes, filepath: Union[str, pathlib.Path]) -> NoneType |
Subsets and Splits